From ae40e9435ffa5326df66233edcb7653a7fbf788f Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Wed, 21 Jun 2023 12:12:08 -0700 Subject: [PATCH 001/408] Fix variant propagation with exception to spec --- lib/spack/spack/test/concretize.py | 10 ++++++++++ .../repos/builtin.mock/packages/openblas/package.py | 2 ++ 2 files changed, 12 insertions(+) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index d72860a31d2706..54cda0ad513a6f 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -348,6 +348,10 @@ def test_compiler_flags_differ_identical_compilers(self): spec.concretize() assert spec.satisfies("cflags=-O2") + @pytest.mark.skipif( + os.environ.get("SPACK_TEST_SOLVER") == "original", + reason="Optional compiler propagation isn't deprecated for original concretizer", + ) def test_concretize_compiler_flag_propagate(self): spec = Spec("hypre cflags=='-g' ^openblas") spec.concretize() @@ -463,13 +467,19 @@ def test_concretize_propagate_disabled_variant(self): spec.concretize() assert spec.satisfies("^openblas~shared") + assert spec.satisfies("^zlib~shared") + @pytest.mark.skipif( + os.environ.get("SPACK_TEST_SOLVER") == "original", + reason="Optional compiler propagation isn't deprecated for original concretizer", + ) def test_concretize_propagated_variant_is_not_passed_to_dependent(self): """Test a package variant value was passed from its parent.""" spec = Spec("hypre~~shared ^openblas+shared") spec.concretize() assert spec.satisfies("^openblas+shared") + assert spec.satisfies("^zlib~shared") def test_no_matching_compiler_specs(self, mock_low_high_config): # only relevant when not building compilers as needed diff --git a/var/spack/repos/builtin.mock/packages/openblas/package.py b/var/spack/repos/builtin.mock/packages/openblas/package.py index 33a6e20d514d63..51ba404b559da2 100644 --- a/var/spack/repos/builtin.mock/packages/openblas/package.py +++ b/var/spack/repos/builtin.mock/packages/openblas/package.py @@ -17,6 +17,8 @@ class Openblas(Package): version("0.2.14", md5="b1190f3d3471685f17cfd1ec1d252ac9") version("0.2.13", md5="b1190f3d3471685f17cfd1ec1d252ac9") + depends_on("zlib") + variant("shared", default=True, description="Build shared libraries") # See #20019 for this conflict From 71d77098805576c6e23cb8cb49dd3aa4a21d2710 Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Mon, 26 Jun 2023 16:56:20 -0700 Subject: [PATCH 002/408] Add test for all unify options in env --- lib/spack/spack/test/concretize.py | 35 ++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 54cda0ad513a6f..fe5df189202121 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -17,6 +17,7 @@ import spack.concretize import spack.config import spack.detection +import spack.environment as ev import spack.error import spack.hash_types as ht import spack.platforms @@ -24,9 +25,12 @@ import spack.solver.asp import spack.variant as vt from spack.concretize import find_spec +from spack.main import SpackCommand from spack.spec import CompilerSpec, Spec from spack.version import Version, ver +env = SpackCommand("env") + def check_spec(abstract, concrete): if abstract.versions.concrete: @@ -461,6 +465,37 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): @pytest.mark.only_clingo( "Optional compiler propagation isn't deprecated for original concretizer" ) + @pytest.mark.parametrize("unify", [True, False, "when_possible"]) + def test_concretize_environment_propagated_disabled_variant(self, unify, tmpdir, mutable_mock_env_path): + path = tmpdir.join("spack.yaml") + + with tmpdir.as_cwd(): + with open(str(path), "w") as f: + f.write( + """\ +spack: + specs: + - hypre ~~shared ^openblas +""" + ) + + env("create", "test", str(path)) + + test = ev.read("test") + test.unify = unify + test.concretize() + + for spec in test.specs_by_hash.values(): + for dep in spec.dependencies(): + if dep.name == "openblas": + assert dep.satisfies("~shared") + assert dep.satisfies("^zlib ~shared") + + + @pytest.mark.skipif( + os.environ.get("SPACK_TEST_SOLVER") == "original", + reason="Optional compiler propagation isn't deprecated for original concretizer", + ) def test_concretize_propagate_disabled_variant(self): """Test a package variant value was passed from its parent.""" spec = Spec("hypre~~shared ^openblas") From d0dbdc532625f342c9e414d8a7ba8174bf14fb5f Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Mon, 26 Jun 2023 18:25:39 -0700 Subject: [PATCH 003/408] Fix style --- lib/spack/spack/test/concretize.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index fe5df189202121..682bd50e625712 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -477,7 +477,7 @@ def test_concretize_environment_propagated_disabled_variant(self, unify, tmpdir, specs: - hypre ~~shared ^openblas """ - ) + ) env("create", "test", str(path)) @@ -491,7 +491,6 @@ def test_concretize_environment_propagated_disabled_variant(self, unify, tmpdir, assert dep.satisfies("~shared") assert dep.satisfies("^zlib ~shared") - @pytest.mark.skipif( os.environ.get("SPACK_TEST_SOLVER") == "original", reason="Optional compiler propagation isn't deprecated for original concretizer", From 0882d9abcf4acb39b1b547c2e9ede5447737d506 Mon Sep 17 00:00:00 2001 From: RikkiButler20 <39577672+RikkiButler20@users.noreply.github.com> Date: Tue, 27 Jun 2023 01:33:00 +0000 Subject: [PATCH 004/408] [@spackbot] updating style on behalf of RikkiButler20 --- lib/spack/spack/test/concretize.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 682bd50e625712..b99eb357bcea61 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -466,7 +466,9 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): "Optional compiler propagation isn't deprecated for original concretizer" ) @pytest.mark.parametrize("unify", [True, False, "when_possible"]) - def test_concretize_environment_propagated_disabled_variant(self, unify, tmpdir, mutable_mock_env_path): + def test_concretize_environment_propagated_disabled_variant( + self, unify, tmpdir, mutable_mock_env_path + ): path = tmpdir.join("spack.yaml") with tmpdir.as_cwd(): From 70351179552f5ea8f7b82d270eee61bd5a438c6e Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Wed, 28 Jun 2023 10:54:49 -0700 Subject: [PATCH 005/408] Make sure variants are propagated w/o using path --- lib/spack/spack/test/concretize.py | 1 + var/spack/repos/builtin.mock/packages/netlib-lapack/package.py | 1 + var/spack/repos/builtin.mock/packages/perl/package.py | 2 ++ 3 files changed, 4 insertions(+) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index b99eb357bcea61..bff1637e952f15 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -502,6 +502,7 @@ def test_concretize_propagate_disabled_variant(self): spec = Spec("hypre~~shared ^openblas") spec.concretize() + assert spec.satisfies("^perl~shared") assert spec.satisfies("^openblas~shared") assert spec.satisfies("^zlib~shared") diff --git a/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py b/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py index dd6a24ee5ec80d..378c19ea11b5fa 100644 --- a/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py +++ b/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py @@ -14,3 +14,4 @@ class NetlibLapack(Package): provides("lapack") depends_on("blas") + depends_on("perl") diff --git a/var/spack/repos/builtin.mock/packages/perl/package.py b/var/spack/repos/builtin.mock/packages/perl/package.py index 1025efb2e9316b..b35f741b699650 100644 --- a/var/spack/repos/builtin.mock/packages/perl/package.py +++ b/var/spack/repos/builtin.mock/packages/perl/package.py @@ -14,3 +14,5 @@ class Perl(Package): extendable = True version("0.0.0", md5="abcdef1234567890abcdef1234567890") + + variant("shared", default=True, description="Build a shared libperl.so library") From 8383d68b04852a44bcf540477825258b4d51a386 Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Wed, 28 Jun 2023 11:05:09 -0700 Subject: [PATCH 006/408] Remove unnecessary additions to tests --- lib/spack/spack/test/concretize.py | 1 - var/spack/repos/builtin.mock/packages/netlib-lapack/package.py | 1 - var/spack/repos/builtin.mock/packages/perl/package.py | 2 -- 3 files changed, 4 deletions(-) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index bff1637e952f15..b99eb357bcea61 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -502,7 +502,6 @@ def test_concretize_propagate_disabled_variant(self): spec = Spec("hypre~~shared ^openblas") spec.concretize() - assert spec.satisfies("^perl~shared") assert spec.satisfies("^openblas~shared") assert spec.satisfies("^zlib~shared") diff --git a/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py b/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py index 378c19ea11b5fa..dd6a24ee5ec80d 100644 --- a/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py +++ b/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py @@ -14,4 +14,3 @@ class NetlibLapack(Package): provides("lapack") depends_on("blas") - depends_on("perl") diff --git a/var/spack/repos/builtin.mock/packages/perl/package.py b/var/spack/repos/builtin.mock/packages/perl/package.py index b35f741b699650..1025efb2e9316b 100644 --- a/var/spack/repos/builtin.mock/packages/perl/package.py +++ b/var/spack/repos/builtin.mock/packages/perl/package.py @@ -14,5 +14,3 @@ class Perl(Package): extendable = True version("0.0.0", md5="abcdef1234567890abcdef1234567890") - - variant("shared", default=True, description="Build a shared libperl.so library") From e0542b43801b53260753904dc81eae852bd5574e Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Mon, 17 Jul 2023 08:38:44 -0700 Subject: [PATCH 007/408] Propagate when concretizing separately --- lib/spack/spack/spec.py | 8 +++++++- lib/spack/spack/variant.py | 8 ++++---- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index bf06ca7aff51e2..1dfb388fbcbf61 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -2048,6 +2048,9 @@ def to_node_dict(self, hash=ht.dag_hash): params = syaml.syaml_dict(sorted(v.yaml_entry() for _, v in self.variants.items())) + for k, v in params.items(): + params[k] = (params[k], self.variants[k].propagate) + # Only need the string compiler flag for yaml file params.update( sorted( @@ -4890,7 +4893,10 @@ def from_node_dict(cls, node): for val in values: spec.compiler_flags.add_flag(name, val, False) else: - spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values) + if isinstance(values, tuple): + spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values[0], values[1]) + else: + spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values) spec.external_path = None spec.external_modules = None diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index e0b9a5540b9b8c..5501b96b36e6ce 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -254,19 +254,19 @@ def __init__(self, name, value, propagate=False): self.value = value @staticmethod - def from_node_dict(name, value): + def from_node_dict(name, value, propagate=False): """Reconstruct a variant from a node dict.""" if isinstance(value, list): # read multi-value variants in and be faithful to the YAML - mvar = MultiValuedVariant(name, ()) + mvar = MultiValuedVariant(name, (), propagate) mvar._value = tuple(value) mvar._original_value = mvar._value return mvar elif str(value).upper() == "TRUE" or str(value).upper() == "FALSE": - return BoolValuedVariant(name, value) + return BoolValuedVariant(name, value, propagate) - return SingleValuedVariant(name, value) + return SingleValuedVariant(name, value, propagate) def yaml_entry(self): """Returns a key, value tuple suitable to be an entry in a yaml dict. From 9bfb3c8de9a9908674c4d3bf20f7b25c4a43f853 Mon Sep 17 00:00:00 2001 From: RikkiButler20 <39577672+RikkiButler20@users.noreply.github.com> Date: Mon, 17 Jul 2023 16:45:13 +0000 Subject: [PATCH 008/408] [@spackbot] updating style on behalf of RikkiButler20 --- lib/spack/spack/spec.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 1dfb388fbcbf61..e4712be28ba816 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -4894,7 +4894,9 @@ def from_node_dict(cls, node): spec.compiler_flags.add_flag(name, val, False) else: if isinstance(values, tuple): - spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values[0], values[1]) + spec.variants[name] = vt.MultiValuedVariant.from_node_dict( + name, values[0], values[1] + ) else: spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values) From 28ea39e4a36fab791d4ce873f651909a24ca6ebd Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Tue, 18 Jul 2023 09:55:55 -0700 Subject: [PATCH 009/408] Create propagation namedtuple --- lib/spack/spack/spec.py | 14 ++++++++------ lib/spack/spack/test/concretize.py | 2 +- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index e4712be28ba816..f269a5b509a1b2 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -174,6 +174,9 @@ SPECFILE_FORMAT_VERSION = 4 +PropagateValue = collections.namedtuple("PropagateValue", ["value", "propagate"]) + + # InstallStatus is used to map install statuses to symbols for display # Options are artificially disjoint for dispay purposes class InstallStatus(enum.Enum): @@ -2049,7 +2052,8 @@ def to_node_dict(self, hash=ht.dag_hash): params = syaml.syaml_dict(sorted(v.yaml_entry() for _, v in self.variants.items())) for k, v in params.items(): - params[k] = (params[k], self.variants[k].propagate) + if self.variants[k].propagate: + params[k] = PropagateValue(params[k], self.variants[k].propagate) # Only need the string compiler flag for yaml file params.update( @@ -4893,12 +4897,10 @@ def from_node_dict(cls, node): for val in values: spec.compiler_flags.add_flag(name, val, False) else: - if isinstance(values, tuple): - spec.variants[name] = vt.MultiValuedVariant.from_node_dict( - name, values[0], values[1] - ) + if isinstance(values, PropagateValue): + spec.variants[name] = vt.AbstractVariant.from_node_dict(name, values.value, values.propagate) else: - spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values) + spec.variants[name] = vt.AbstractVariant.from_node_dict(name, values) spec.external_path = None spec.external_modules = None diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index b99eb357bcea61..5e80d3c0492cff 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1464,7 +1464,7 @@ def test_non_default_provider_of_multiple_virtuals(self): @pytest.mark.regression("27237") @pytest.mark.parametrize( "spec_str,expect_installed", - [("mpich", True), ("mpich+debug", False), ("mpich~debug", True)], + [("mpich", True), ("mpich+debug", False), ("mpich~debug", True), ("mpich++debug", False)], ) @pytest.mark.only_clingo("Use case not supported by the original concretizer") def test_concrete_specs_are_not_modified_on_reuse( From b407ae63f32b57c3091bb9b9f9f143e8c55b1302 Mon Sep 17 00:00:00 2001 From: RikkiButler20 <39577672+RikkiButler20@users.noreply.github.com> Date: Tue, 18 Jul 2023 17:27:16 +0000 Subject: [PATCH 010/408] [@spackbot] updating style on behalf of RikkiButler20 --- lib/spack/spack/spec.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index f269a5b509a1b2..75d4a4d02f98ca 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -4898,7 +4898,9 @@ def from_node_dict(cls, node): spec.compiler_flags.add_flag(name, val, False) else: if isinstance(values, PropagateValue): - spec.variants[name] = vt.AbstractVariant.from_node_dict(name, values.value, values.propagate) + spec.variants[name] = vt.AbstractVariant.from_node_dict( + name, values.value, values.propagate + ) else: spec.variants[name] = vt.AbstractVariant.from_node_dict(name, values) From e34ccbbf578a258a456be4a5b5c756cca89c67a9 Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Thu, 17 Aug 2023 14:58:16 -0700 Subject: [PATCH 011/408] Only propagate the specified variant --- lib/spack/spack/solver/asp.py | 6 +++--- lib/spack/spack/solver/concretize.lp | 14 +++++++++----- lib/spack/spack/test/concretize.py | 7 ++----- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 7b13b4baa386b8..82c7ba7c34778f 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1749,7 +1749,7 @@ class Head: node_flag = fn.attr("node_flag_set") node_flag_source = fn.attr("node_flag_source") node_flag_propagate = fn.attr("node_flag_propagate") - variant_propagate = fn.attr("variant_propagate") + variant_possible_prop = fn.attr("variant_possible_prop") class Body: node = fn.attr("node") @@ -1763,7 +1763,7 @@ class Body: node_flag = fn.attr("node_flag") node_flag_source = fn.attr("node_flag_source") node_flag_propagate = fn.attr("node_flag_propagate") - variant_propagate = fn.attr("variant_propagate") + variant_possible_prop = fn.attr("variant_possible_prop") f = Body if body else Head @@ -1812,7 +1812,7 @@ class Body: clauses.append(f.variant_value(spec.name, vname, value)) if variant.propagate: - clauses.append(f.variant_propagate(spec.name, vname, value, spec.name)) + clauses.append(f.variant_possible_prop(spec.name, vname, value, spec.name)) # Tell the concretizer that this is a possible value for the # variant, to account for things like int/str values where we diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 9d1b48b37945c1..461604ea98fb92 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -744,11 +744,15 @@ node_has_variant(node(ID, Package), Variant) :- pkg_fact(Package, variant(Variant)), attr("node", node(ID, Package)). -attr("variant_propagate", PackageNode, Variant, Value, Source) :- - attr("node", PackageNode), - depends_on(ParentNode, PackageNode), - attr("variant_propagate", ParentNode, Variant, Value, Source), - not attr("variant_set", PackageNode, Variant). +attr("variant_propagate", Package, Variant, Value, Source) :- + attr("variant_possible_prop", Package, Variant, Value, Source), + not attr("variant_set", Package, Variant). + +attr("variant_possible_prop", Package, Variant, Value, Source) :- + attr("node", Package), + depends_on(Parent, Package), + attr("variant_possible_prop", Parent, Variant, _, Source), + attr("variant_value", Source, Variant, Value). attr("variant_value", node(ID, Package), Variant, Value) :- attr("node", node(ID, Package)), diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 5e80d3c0492cff..859c267f99030f 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -17,7 +17,6 @@ import spack.concretize import spack.config import spack.detection -import spack.environment as ev import spack.error import spack.hash_types as ht import spack.platforms @@ -29,8 +28,6 @@ from spack.spec import CompilerSpec, Spec from spack.version import Version, ver -env = SpackCommand("env") - def check_spec(abstract, concrete): if abstract.versions.concrete: @@ -481,9 +478,9 @@ def test_concretize_environment_propagated_disabled_variant( """ ) - env("create", "test", str(path)) + SpackCommand("env")("create", "test", str(path)) - test = ev.read("test") + test = spack.environment.read("test") test.unify = unify test.concretize() From 32e15dad574883eb5371e5474c1ef532501c52ea Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Thu, 24 Aug 2023 11:04:02 -0700 Subject: [PATCH 012/408] Propagation to work with new concretizer --- lib/spack/spack/solver/concretize.lp | 19 ++++++------ test/.spack-env/transaction_lock | 0 .../builtin.mock/packages/splice-b/package.py | 29 +++++++++++++++++++ 3 files changed, 39 insertions(+), 9 deletions(-) create mode 100755 test/.spack-env/transaction_lock create mode 100644 var/spack/repos/builtin.mock/packages/splice-b/package.py diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 461604ea98fb92..c315782da282d5 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -744,15 +744,16 @@ node_has_variant(node(ID, Package), Variant) :- pkg_fact(Package, variant(Variant)), attr("node", node(ID, Package)). -attr("variant_propagate", Package, Variant, Value, Source) :- - attr("variant_possible_prop", Package, Variant, Value, Source), - not attr("variant_set", Package, Variant). - -attr("variant_possible_prop", Package, Variant, Value, Source) :- - attr("node", Package), - depends_on(Parent, Package), - attr("variant_possible_prop", Parent, Variant, _, Source), - attr("variant_value", Source, Variant, Value). +attr("variant_possible_prop", PackageNode, Variant, Value, Source) :- + attr("node", PackageNode), + depends_on(ParentNode, PackageNode), + attr("variant_value", node(ID, Source), Variant, Value), + attr("variant_possible_prop", ParentNode, Variant, _, Source). + +attr("variant_propagate", PackageNode, Variant, Value, Source) :- + attr("variant_possible_prop", PackageNode, Variant, Value, Source), + not attr("variant_set", PackageNode, Variant). + attr("variant_value", node(ID, Package), Variant, Value) :- attr("node", node(ID, Package)), diff --git a/test/.spack-env/transaction_lock b/test/.spack-env/transaction_lock new file mode 100755 index 00000000000000..e69de29bb2d1d6 diff --git a/var/spack/repos/builtin.mock/packages/splice-b/package.py b/var/spack/repos/builtin.mock/packages/splice-b/package.py new file mode 100644 index 00000000000000..549250f7b21005 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/splice-b/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class SpliceB(Package): + """Simple package with one optional dependency""" + + homepage = "http://www.example.com" + url = "http://www.example.com/splice-b-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789efghij") + + variant("foo", default=False, description="nope") + variant("bar", default=False, description="nope") + variant("baz", default=False, description="nope") + + depends_on("splice-a") + + provides("something") + provides("somethingelse") + + def install(self, spec, prefix): + with open(prefix.join("splice-b"), "w") as f: + f.write("splice-b: {0}".format(prefix)) + f.write("splice-z: {0}".format(spec["splice-z"].prefix)) From af0bef76628e8ae25871843e65ae12576ce1ddb4 Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Mon, 28 Aug 2023 16:04:42 -0700 Subject: [PATCH 013/408] Test that only specified variant propagates --- lib/spack/spack/test/cmd/pkg.py | 2 +- lib/spack/spack/test/concretize.py | 12 ++++++++++++ .../repos/builtin.mock/packages/splice-b/package.py | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/test/cmd/pkg.py b/lib/spack/spack/test/cmd/pkg.py index 56a1f1f78676e8..6394ed4d31bab7 100644 --- a/lib/spack/spack/test/cmd/pkg.py +++ b/lib/spack/spack/test/cmd/pkg.py @@ -303,7 +303,7 @@ def test_pkg_grep(mock_packages, capfd): output, _ = capfd.readouterr() assert output.strip() == "\n".join( spack.repo.PATH.get_pkg_class(name).module.__file__ - for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-z"] + for name in ["splice-a", "splice-b", "splice-h", "splice-t", "splice-vh", "splice-z"] ) # ensure that this string isn't fouhnd diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 859c267f99030f..1eee017808e5de 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -514,6 +514,18 @@ def test_concretize_propagated_variant_is_not_passed_to_dependent(self): assert spec.satisfies("^openblas+shared") assert spec.satisfies("^zlib~shared") + + def test_concretize_propagate_specified_variant(self): + """Test that only the specified variant is propagated to the dependencies""" + spec = Spec("splice-b++bar") + spec.concretize() + + assert spec.satisfies("^splice-a+bar") + assert spec.satisfies("^splice-z+bar") + assert not spec.satisfies("^splice-a+foo") + assert not spec.satisfies("^splice-z+foo") + + def test_no_matching_compiler_specs(self, mock_low_high_config): # only relevant when not building compilers as needed with spack.concretize.enable_compiler_existence_check(): diff --git a/var/spack/repos/builtin.mock/packages/splice-b/package.py b/var/spack/repos/builtin.mock/packages/splice-b/package.py index 549250f7b21005..b539b9bcb6422d 100644 --- a/var/spack/repos/builtin.mock/packages/splice-b/package.py +++ b/var/spack/repos/builtin.mock/packages/splice-b/package.py @@ -14,7 +14,7 @@ class SpliceB(Package): version("1.0", md5="0123456789abcdef0123456789efghij") - variant("foo", default=False, description="nope") + variant("foo", default=True, description="nope") variant("bar", default=False, description="nope") variant("baz", default=False, description="nope") From 83a1690f1ce356a6cdbacb3b9e84db58e8b8dfbf Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Tue, 29 Aug 2023 13:08:25 -0700 Subject: [PATCH 014/408] Remove extra spaces --- lib/spack/spack/solver/concretize.lp | 1 - lib/spack/spack/test/concretize.py | 6 ++++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index c315782da282d5..0ed907c8c00031 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -754,7 +754,6 @@ attr("variant_propagate", PackageNode, Variant, Value, Source) :- attr("variant_possible_prop", PackageNode, Variant, Value, Source), not attr("variant_set", PackageNode, Variant). - attr("variant_value", node(ID, Package), Variant, Value) :- attr("node", node(ID, Package)), node_has_variant(node(ID, Package), Variant), diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 1eee017808e5de..db64c6c3953b40 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -514,7 +514,10 @@ def test_concretize_propagated_variant_is_not_passed_to_dependent(self): assert spec.satisfies("^openblas+shared") assert spec.satisfies("^zlib~shared") - + @pytest.mark.skipif( + os.environ.get("SPACK_TEST_SOLVER") == "original", + reason="Optional compiler propagation isn't deprecated for original concretizer", + ) def test_concretize_propagate_specified_variant(self): """Test that only the specified variant is propagated to the dependencies""" spec = Spec("splice-b++bar") @@ -525,7 +528,6 @@ def test_concretize_propagate_specified_variant(self): assert not spec.satisfies("^splice-a+foo") assert not spec.satisfies("^splice-z+foo") - def test_no_matching_compiler_specs(self, mock_low_high_config): # only relevant when not building compilers as needed with spack.concretize.enable_compiler_existence_check(): From d93c4e52b4a7cde4dbd4066ea851a1181104f9fb Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Wed, 6 Sep 2023 10:11:50 -0700 Subject: [PATCH 015/408] Remove unnneeded file --- test/.spack-env/transaction_lock | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100755 test/.spack-env/transaction_lock diff --git a/test/.spack-env/transaction_lock b/test/.spack-env/transaction_lock deleted file mode 100755 index e69de29bb2d1d6..00000000000000 From 4fe044a2fd6738a7869e76bbeee62ef9ca3959c1 Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Thu, 5 Oct 2023 11:21:32 -0700 Subject: [PATCH 016/408] Test should only run if using clingo --- lib/spack/spack/test/concretize.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index fc52806a46d506..d9512d4b29bdd5 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -350,9 +350,8 @@ def test_compiler_flags_differ_identical_compilers(self): spec.concretize() assert spec.satisfies("cflags=-O2") - @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original", - reason="Optional compiler propagation isn't deprecated for original concretizer", + @pytest.mark.only_clingo( + "Optional compiler propagation isn't deprecated for original concretizer" ) def test_concretize_compiler_flag_propagate(self): spec = Spec("hypre cflags=='-g' ^openblas") @@ -467,6 +466,7 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): def test_concretize_environment_propagated_disabled_variant( self, unify, tmpdir, mutable_mock_env_path ): + """Ensure that variants are propagated in a concrete environment""" path = tmpdir.join("spack.yaml") with tmpdir.as_cwd(): @@ -491,9 +491,8 @@ def test_concretize_environment_propagated_disabled_variant( assert dep.satisfies("~shared") assert dep.satisfies("^zlib ~shared") - @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original", - reason="Optional compiler propagation isn't deprecated for original concretizer", + @pytest.mark.only_clingo( + "Optional compiler propagation isn't deprecated for original concretizer" ) def test_concretize_propagate_disabled_variant(self): """Test a package variant value was passed from its parent.""" @@ -503,9 +502,8 @@ def test_concretize_propagate_disabled_variant(self): assert spec.satisfies("^openblas~shared") assert spec.satisfies("^zlib~shared") - @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original", - reason="Optional compiler propagation isn't deprecated for original concretizer", + @pytest.mark.only_clingo( + "Optional compiler propagation isn't deprecated for original concretizer" ) def test_concretize_propagated_variant_is_not_passed_to_dependent(self): """Test a package variant value was passed from its parent.""" @@ -515,9 +513,8 @@ def test_concretize_propagated_variant_is_not_passed_to_dependent(self): assert spec.satisfies("^openblas+shared") assert spec.satisfies("^zlib~shared") - @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original", - reason="Optional compiler propagation isn't deprecated for original concretizer", + @pytest.mark.only_clingo( + "Optional compiler propagation isn't deprecated for original concretizer" ) def test_concretize_propagate_specified_variant(self): """Test that only the specified variant is propagated to the dependencies""" From f3d2d06bd6a808c6f7e9b60c3bb9cdc0fe503276 Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Thu, 5 Oct 2023 13:58:30 -0700 Subject: [PATCH 017/408] Create new mock packages Instead of modifying existing ones --- lib/spack/spack/test/concretize.py | 20 ++++++++-------- .../builtin.mock/packages/adios2/package.py | 24 +++++++++++++++++++ .../builtin.mock/packages/ascent/package.py | 20 ++++++++++++++++ .../builtin.mock/packages/bzip2/package.py | 18 ++++++++++++++ .../builtin.mock/packages/openblas/package.py | 2 -- 5 files changed, 72 insertions(+), 12 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/adios2/package.py create mode 100644 var/spack/repos/builtin.mock/packages/ascent/package.py create mode 100644 var/spack/repos/builtin.mock/packages/bzip2/package.py diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index d9512d4b29bdd5..5cf2e46166fd51 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -466,7 +466,7 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): def test_concretize_environment_propagated_disabled_variant( self, unify, tmpdir, mutable_mock_env_path ): - """Ensure that variants are propagated in a concrete environment""" + """Ensure that variants are propagated in a concrete environment""" path = tmpdir.join("spack.yaml") with tmpdir.as_cwd(): @@ -475,7 +475,7 @@ def test_concretize_environment_propagated_disabled_variant( """\ spack: specs: - - hypre ~~shared ^openblas + - ascent ~~shared +adios2 """ ) @@ -487,31 +487,31 @@ def test_concretize_environment_propagated_disabled_variant( for spec in test.specs_by_hash.values(): for dep in spec.dependencies(): - if dep.name == "openblas": + if dep.name == "adios2": assert dep.satisfies("~shared") - assert dep.satisfies("^zlib ~shared") + assert dep.satisfies("^bzip2 ~shared") @pytest.mark.only_clingo( "Optional compiler propagation isn't deprecated for original concretizer" ) def test_concretize_propagate_disabled_variant(self): """Test a package variant value was passed from its parent.""" - spec = Spec("hypre~~shared ^openblas") + spec = Spec("ascent~~shared +adios2") spec.concretize() - assert spec.satisfies("^openblas~shared") - assert spec.satisfies("^zlib~shared") + for dep in spec.traverse(): + assert dep.satisfies("~shared") @pytest.mark.only_clingo( "Optional compiler propagation isn't deprecated for original concretizer" ) def test_concretize_propagated_variant_is_not_passed_to_dependent(self): """Test a package variant value was passed from its parent.""" - spec = Spec("hypre~~shared ^openblas+shared") + spec = Spec("ascent~~shared +adios2 ^adios2+shared") spec.concretize() - assert spec.satisfies("^openblas+shared") - assert spec.satisfies("^zlib~shared") + assert spec.satisfies("^adios2+shared") + assert spec.satisfies("^bzip2~shared") @pytest.mark.only_clingo( "Optional compiler propagation isn't deprecated for original concretizer" diff --git a/var/spack/repos/builtin.mock/packages/adios2/package.py b/var/spack/repos/builtin.mock/packages/adios2/package.py new file mode 100644 index 00000000000000..392a4a19810400 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/adios2/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Adios2(Package): + """The Adaptable Input Output System version 2, + developed in the Exascale Computing Program""" + + homepage = "https://someplace.com" + url = "https://anotherplace.com" + + version( + "2.9.1", + sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9") + + variant("shared", default=True, description="Build shared libraries") + variant("bzip2", default=True, description="Enable BZip2 compression") + + depends_on("bzip2") diff --git a/var/spack/repos/builtin.mock/packages/ascent/package.py b/var/spack/repos/builtin.mock/packages/ascent/package.py new file mode 100644 index 00000000000000..92b074a5f981d7 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/ascent/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Ascent(Package): + """Fake ascent package.""" + + homepage = "https://github.com/Alpine-DAV/ascent" + url = "someplace" + + version("0.9.2", sha256="44cd954aa5db478ab40042cd54fd6fcedf25000c3bb510ca23fcff8090531b91") + + variant("adios2", default=False, description="Build Adios2 filter support") + variant("shared", default=True, description="Build Ascent as shared libs") + + depends_on("adios2", when="+adios2") diff --git a/var/spack/repos/builtin.mock/packages/bzip2/package.py b/var/spack/repos/builtin.mock/packages/bzip2/package.py new file mode 100644 index 00000000000000..cb49cedd3f35f2 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/bzip2/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Bzip2(Package): + """This is a thing""" + + homepage = "https://someplace.com" + url = "https://anotherplace.com" + + version("1.0.8", sha256="ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269") + + variant("shared", default=True, description="Enables the build of shared libraries.") diff --git a/var/spack/repos/builtin.mock/packages/openblas/package.py b/var/spack/repos/builtin.mock/packages/openblas/package.py index 51ba404b559da2..33a6e20d514d63 100644 --- a/var/spack/repos/builtin.mock/packages/openblas/package.py +++ b/var/spack/repos/builtin.mock/packages/openblas/package.py @@ -17,8 +17,6 @@ class Openblas(Package): version("0.2.14", md5="b1190f3d3471685f17cfd1ec1d252ac9") version("0.2.13", md5="b1190f3d3471685f17cfd1ec1d252ac9") - depends_on("zlib") - variant("shared", default=True, description="Build shared libraries") # See #20019 for this conflict From 328fbe2faf051c390ca8f8faff6aecaba746d8b6 Mon Sep 17 00:00:00 2001 From: RikkiButler20 <39577672+RikkiButler20@users.noreply.github.com> Date: Thu, 5 Oct 2023 21:07:27 +0000 Subject: [PATCH 018/408] [@spackbot] updating style on behalf of RikkiButler20 --- var/spack/repos/builtin.mock/packages/adios2/package.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/var/spack/repos/builtin.mock/packages/adios2/package.py b/var/spack/repos/builtin.mock/packages/adios2/package.py index 392a4a19810400..2bd71d6b4dd883 100644 --- a/var/spack/repos/builtin.mock/packages/adios2/package.py +++ b/var/spack/repos/builtin.mock/packages/adios2/package.py @@ -14,9 +14,7 @@ class Adios2(Package): homepage = "https://someplace.com" url = "https://anotherplace.com" - version( - "2.9.1", - sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9") + version("2.9.1", sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9") variant("shared", default=True, description="Build shared libraries") variant("bzip2", default=True, description="Enable BZip2 compression") From 1d95144948b1b66331f918e2a94994910fb2f6f2 Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Thu, 5 Oct 2023 14:12:43 -0700 Subject: [PATCH 019/408] Remove old code --- lib/spack/spack/spec.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 4930cd48661019..b0f79b68b3bc0c 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -180,8 +180,6 @@ PropagateValue = collections.namedtuple("PropagateValue", ["value", "propagate"]) -# InstallStatus is used to map install statuses to symbols for display -# Options are artificially disjoint for dispay purposes class InstallStatus(enum.Enum): """Maps install statuses to symbols for display. From 9bbb75c2b14ff85fb99be5a503759696d292c761 Mon Sep 17 00:00:00 2001 From: Richarda Butler <39577672+RikkiButler20@users.noreply.github.com> Date: Thu, 5 Oct 2023 14:18:52 -0700 Subject: [PATCH 020/408] Update lib/spack/spack/test/concretize.py Make assertions for test more readable Co-authored-by: Massimiliano Culpo --- lib/spack/spack/test/concretize.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 5cf2e46166fd51..b4d94a2610575e 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -521,10 +521,10 @@ def test_concretize_propagate_specified_variant(self): spec = Spec("splice-b++bar") spec.concretize() - assert spec.satisfies("^splice-a+bar") - assert spec.satisfies("^splice-z+bar") - assert not spec.satisfies("^splice-a+foo") - assert not spec.satisfies("^splice-z+foo") + assert spec.satisfies("+bar") and spec.satisfies("^splice-a+bar") + assert spec.satisfies("+bar") and spec.satisfies("^splice-z+bar") + assert spec.satisfies("+foo") and not spec.satisfies("^splice-a+foo") + assert spec.satisfies("+foo") and not spec.satisfies("^splice-z+foo") def test_no_matching_compiler_specs(self, mock_low_high_config): # only relevant when not building compilers as needed From 9242f08a7daa82584c87f8abac100b4f93e4bfe4 Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Mon, 9 Oct 2023 12:42:29 -0700 Subject: [PATCH 021/408] Revert AbstractVariant change --- lib/spack/spack/solver/concretize.lp | 2 +- lib/spack/spack/spec.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 5131f15374800a..26d167ce69afc9 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -755,7 +755,7 @@ node_has_variant(node(ID, Package), Variant) :- attr("variant_possible_prop", PackageNode, Variant, Value, Source) :- attr("node", PackageNode), depends_on(ParentNode, PackageNode), - attr("variant_value", node(ID, Source), Variant, Value), + attr("variant_value", node(_, Source), Variant, Value), attr("variant_possible_prop", ParentNode, Variant, _, Source). attr("variant_propagate", PackageNode, Variant, Value, Source) :- diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index b0f79b68b3bc0c..4df6c1eb857dbc 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -4886,11 +4886,11 @@ def from_node_dict(cls, node): spec.compiler_flags.add_flag(name, val, False) else: if isinstance(values, PropagateValue): - spec.variants[name] = vt.AbstractVariant.from_node_dict( + spec.variants[name] = vt.MultiValuedVariant.from_node_dict( name, values.value, values.propagate ) else: - spec.variants[name] = vt.AbstractVariant.from_node_dict(name, values) + spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values) spec.external_path = None spec.external_modules = None From d0d4d03ebfb43af66fcb5d761dddd4b8610a0002 Mon Sep 17 00:00:00 2001 From: Sinan Date: Tue, 3 Oct 2023 12:21:51 -0700 Subject: [PATCH 022/408] fix_qgis_build_with_pysip5 (#39941) * fix_qgis_build_with_pysip5 * build fails with newer protobuf * somehow findgdal can figure this out. * Update var/spack/repos/builtin/packages/qgis/package.py Co-authored-by: Adam J. Stewart * fix gdal lib again * qgis needs QtPositioning provided by qt+location option * fix FindPyQt5 cmake file * fix bug * fix qsci sip issue * fix bug * blackify * improve * add latest LTR * add build dep * revert until bug is fixed * specify proj version for qgis 3.28 * improve gdal libs search via indicating gdal-config * make flake happy * improve deps * add 3.28.11, improve style * fix style * [@spackbot] updating style on behalf of Sinan81 --------- Co-authored-by: Sinan81 Co-authored-by: Adam J. Stewart Co-authored-by: Sinan81 --- .../repos/builtin/packages/qgis/package.py | 54 ++++++++++++++++--- 1 file changed, 46 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/qgis/package.py b/var/spack/repos/builtin/packages/qgis/package.py index a269ba95a1e8a6..45bb05639910f2 100644 --- a/var/spack/repos/builtin/packages/qgis/package.py +++ b/var/spack/repos/builtin/packages/qgis/package.py @@ -17,15 +17,18 @@ class Qgis(CMakePackage): maintainers("adamjstewart", "Sinan81") - version("3.28.3", sha256="a09124f46465a520f6d735306ba3954c339b84aa396d6f52b476b82edcc4fe0e") # Prefer latest long term release version( - "3.22.16", - sha256="dbd1f8a639291bb2492eea61e4ef96079d7b27d3dfa538dab8cd98f31429254a", + "3.28.11", + sha256="c5eb703893c7f98de051c45d677c4a34b40f986db51782a4930ddefad4e193b4", preferred=True, ) + version("3.28.10", sha256="cff867e97909bbc2facce6343770dcb1b61fc6e4855f57783e30bf63d51c5218") + version("3.28.3", sha256="a09124f46465a520f6d735306ba3954c339b84aa396d6f52b476b82edcc4fe0e") + version("3.22.16", sha256="dbd1f8a639291bb2492eea61e4ef96079d7b27d3dfa538dab8cd98f31429254a") version("3.22.0", sha256="cf0c169863f332aab67d8c4943e14b73a564f0254bf54015f5826c6427e6785b") version("3.18.2", sha256="1913e4d5596bbc8b7d143f3defb18bf376f750a71f334f69d76af5deca7ecc5d") + version("3.16.16", sha256="ccd2f404534fcb00b5e17863375462090c9575e68b32ce50b2e7e925d1e01a49") version("3.16.12", sha256="65e9634b5c885c98f3555cf77bc2e3fae5e19279aa17e3f6626ff5d7455fd2b9") version("3.16.5", sha256="525f469ad6e40dd7a8f09ebab5eb6a2dffc45939b99b7d937750cc04ed78d61c") version("3.14.16", sha256="c9915c2e577f1812a2b35b678b123c58407e07824d73e5ec0dda13db7ca75c04") @@ -110,9 +113,12 @@ class Qgis(CMakePackage): depends_on("proj@4.4.0:") depends_on("proj@4.9.3:", when="@3.8.2:") depends_on("proj@7.2:", when="@3.28:") + depends_on("proj@:8", when="@3.28") # build fails with proj@9 depends_on("py-psycopg2", type=("build", "run")) # TODO: is build dependency necessary? depends_on("py-pyqt4", when="@2") depends_on("py-pyqt5@5.3:", when="@3") + depends_on("py-sip", type="build") + depends_on("py-pyqt-builder", type="build", when="^py-sip@5:") depends_on("py-requests", type=("build", "run")) # TODO: is build dependency necessary? depends_on("python@3.0.0:", type=("build", "run"), when="@3") depends_on("python@3.6:", type=("build", "run"), when="@3.18:") @@ -120,14 +126,15 @@ class Qgis(CMakePackage): depends_on("qca@2.2.1:") depends_on("qjson") depends_on("qscintilla +python") - depends_on("qt+dbus") - depends_on("qt+dbus@5.12.0:", when="@3.20:") - depends_on("qt+dbus@5.14.0:", when="@3.28:") + depends_on("qt+dbus+location") + depends_on("qt+dbus+location@5.12.0:", when="@3.20:") + depends_on("qt+dbus+location@5.14.0:", when="@3.28:") depends_on("qtkeychain@0.5:", when="@3:") depends_on("qwt@5:") depends_on("qwtpolar") depends_on("sqlite@3.0.0: +column_metadata") depends_on("protobuf", when="@3.16.4:") + depends_on("protobuf@:3.21", when="@:3.28") depends_on("zstd", when="@3.22:") # Runtime python dependencies, not mentioned in install instructions @@ -163,8 +170,38 @@ class Qgis(CMakePackage): depends_on("qt@:4", when="@2") patch("pyqt5.patch", when="@:3.14 ^qt@5") - patch("pyqt5_3165x.patch", when="@3.16.5:3.21 ^qt@5") - patch("pyqt5_322x.patch", when="@3.22: ^qt@5") + patch("pyqt5_3165x.patch", when="@3.16.5:3.21 ^qt@5 ^py-sip@4") + patch("pyqt5_322x.patch", when="@3.22: ^qt@5 ^py-sip@4") + + @run_before("cmake", when="^py-pyqt5") + def fix_pyqt5_cmake(self): + cmfile = FileFilter(join_path("cmake", "FindPyQt5.cmake")) + pyqtpath = join_path( + self.spec["py-pyqt5"].prefix, self.spec["python"].package.platlib, "PyQt5" + ) + cmfile.filter( + 'SET(PYQT5_MOD_DIR "${Python_SITEARCH}/PyQt5")', + 'SET(PYQT5_MOD_DIR "' + pyqtpath + '")', + string=True, + ) + cmfile.filter( + 'SET(PYQT5_SIP_DIR "${Python_SITEARCH}/PyQt5/bindings")', + 'SET(PYQT5_SIP_DIR "' + pyqtpath + '/bindings")', + string=True, + ) + + @run_before("build") + def fix_qsci_sip(self): + if "^py-pyqt5" in self.spec: + pyqtx = "PyQt5" + elif "^py-pyqt6" in self.spec: + pyqtx = "PyQt6" + + sip_inc_dir = join_path( + self.spec["qscintilla"].prefix, self.spec["python"].package.platlib, pyqtx, "bindings" + ) + with open(join_path("python", "gui", "pyproject.toml.in"), "a") as tomlfile: + tomlfile.write(f'\n[tool.sip.project]\nsip-include-dirs = ["{sip_inc_dir}"]\n') def cmake_args(self): spec = self.spec @@ -185,6 +222,7 @@ def cmake_args(self): "-DLIBZIP_INCLUDE_DIR=" + self.spec["libzip"].prefix.include, "-DLIBZIP_CONF_INCLUDE_DIR=" + self.spec["libzip"].prefix.lib.libzip.include, "-DGDAL_CONFIG_PREFER_PATH=" + self.spec["gdal"].prefix.bin, + "-DGDAL_CONFIG=" + join_path(self.spec["gdal"].prefix.bin, "gdal-config"), "-DGEOS_CONFIG_PREFER_PATH=" + self.spec["geos"].prefix.bin, "-DGSL_CONFIG_PREFER_PATH=" + self.spec["gsl"].prefix.bin, "-DPOSTGRES_CONFIG_PREFER_PATH=" + self.spec["postgresql"].prefix.bin, From 8d66b06bfa848001eeb32ca4d98230315518c688 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Tue, 3 Oct 2023 15:16:41 -0600 Subject: [PATCH 023/408] ci: Change how job names appear in gitlab (#39963) --- lib/spack/spack/ci.py | 48 +++++++++------------------------- lib/spack/spack/test/cmd/ci.py | 3 +-- 2 files changed, 13 insertions(+), 38 deletions(-) diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index 6d555798ce335e..bf5aaa79a3fba6 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -50,6 +50,9 @@ TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror" SPACK_RESERVED_TAGS = ["public", "protected", "notary"] SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror" +JOB_NAME_FORMAT = ( + "{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}" +) spack_gpg = spack.main.SpackCommand("gpg") spack_compiler = spack.main.SpackCommand("compiler") @@ -69,48 +72,23 @@ def __exit__(self, exc_type, exc_value, exc_traceback): return False -def get_job_name(spec, osarch, build_group): - """Given the necessary parts, format the gitlab job name +def get_job_name(spec: spack.spec.Spec, build_group: str = ""): + """Given a spec and possibly a build group, return the job name. If the + resulting name is longer than 255 characters, it will be truncated. Arguments: spec (spack.spec.Spec): Spec job will build - osarch: Architecture TODO: (this is a spack.spec.ArchSpec, - but sphinx doesn't recognize the type and fails). build_group (str): Name of build group this job belongs to (a CDash notion) Returns: The job name """ - item_idx = 0 - format_str = "" - format_args = [] - - format_str += "{{{0}}}".format(item_idx) - format_args.append(spec.name) - item_idx += 1 - - format_str += "/{{{0}}}".format(item_idx) - format_args.append(spec.dag_hash(7)) - item_idx += 1 - - format_str += " {{{0}}}".format(item_idx) - format_args.append(spec.version) - item_idx += 1 - - format_str += " {{{0}}}".format(item_idx) - format_args.append(spec.compiler) - item_idx += 1 - - format_str += " {{{0}}}".format(item_idx) - format_args.append(osarch) - item_idx += 1 + job_name = spec.format(JOB_NAME_FORMAT) if build_group: - format_str += " {{{0}}}".format(item_idx) - format_args.append(build_group) - item_idx += 1 + job_name = "{0} {1}".format(job_name, build_group) - return format_str.format(*format_args) + return job_name[:255] def _remove_reserved_tags(tags): @@ -337,7 +315,7 @@ def _spec_matches(spec, match_string): def _format_job_needs( - dep_jobs, osname, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache + dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache ): needs_list = [] for dep_job in dep_jobs: @@ -347,7 +325,7 @@ def _format_job_needs( if not prune_dag or rebuild: needs_list.append( { - "job": get_job_name(dep_job, dep_job.architecture, build_group), + "job": get_job_name(dep_job, build_group), "artifacts": enable_artifacts_buildcache, } ) @@ -1023,8 +1001,7 @@ def main_script_replacements(cmd): if "after_script" in job_object: job_object["after_script"] = _unpack_script(job_object["after_script"]) - osname = str(release_spec.architecture) - job_name = get_job_name(release_spec, osname, build_group) + job_name = get_job_name(release_spec, build_group) job_vars = job_object.setdefault("variables", {}) job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash @@ -1051,7 +1028,6 @@ def main_script_replacements(cmd): job_object["needs"].extend( _format_job_needs( dep_jobs, - osname, build_group, prune_dag, rebuild_decisions, diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 25dc15e33197bf..a0de63517af1e8 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -1990,8 +1990,7 @@ def test_ci_reproduce( ci_cmd("generate", "--output-file", pipeline_path, "--artifacts-root", artifacts_root) - target_name = spack.platforms.test.Test.default - job_name = ci.get_job_name(job_spec, "test-debian6-%s" % target_name, None) + job_name = ci.get_job_name(job_spec) repro_file = os.path.join(working_dir.strpath, "repro.json") repro_details = { From a6f19dd6694e10b049c6a0314ab58f9c71d00548 Mon Sep 17 00:00:00 2001 From: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> Date: Tue, 3 Oct 2023 16:45:24 -0500 Subject: [PATCH 024/408] ADIOS2: v2.8 is not compatible with HDF5 v1.14: (#40258) --- var/spack/repos/builtin/packages/adios2/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py index ddd33862173798..47affe6f8a2cac 100644 --- a/var/spack/repos/builtin/packages/adios2/package.py +++ b/var/spack/repos/builtin/packages/adios2/package.py @@ -130,6 +130,7 @@ class Adios2(CMakePackage, CudaPackage): depends_on("libzmq", when="+dataman") depends_on("dataspaces@1.8.0:", when="+dataspaces") + depends_on("hdf5@:1.12", when="@:2.8 +hdf5") depends_on("hdf5~mpi", when="+hdf5~mpi") depends_on("hdf5+mpi", when="+hdf5+mpi") From a89c639efe986b0792726024d8a44fbf5601ee56 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Wed, 4 Oct 2023 00:51:14 -0700 Subject: [PATCH 025/408] e4s arm: disable bricks due to target=aarch64 not being respected (#40308) --- .../gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml index 4a15e62b9baf49..ebb254995deda7 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml @@ -81,7 +81,6 @@ spack: - axom - bolt - boost - - bricks ~cuda - butterflypack - cabana - caliper @@ -90,6 +89,7 @@ spack: - conduit - datatransferkit - dyninst + - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +visit: ? - exaworks - flecsi - flit @@ -189,21 +189,21 @@ spack: - vtk-m - zfp # -- - # - dealii # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. - # - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs@0.7.3: gcc: error: unrecognized command-line option '-mno-sse2' # - archer # part of llvm +omp_tsan - - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +visit: ? + # - bricks ~cuda # not respecting target=aarch64? + # - dealii # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. # - geopm # geopm: https://github.com/spack/spack/issues/38795 + # - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs@0.7.3: gcc: error: unrecognized command-line option '-mno-sse2' # - variorum # variorum: https://github.com/spack/spack/issues/38786 # CUDA NOARCH - - bricks +cuda - flux-core +cuda - hpctoolkit +cuda - papi +cuda - tau +mpi +cuda # -- - # - legion +cuda # legion: needs NVIDIA driver + # - bricks +cuda # not respecting target=aarch64? + # - legion +cuda # legion: needs NVIDIA driver # CUDA 75 - amrex +cuda cuda_arch=75 From eed7ab5feef09a519dd8ba7bcaf56b4fde5b941f Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Wed, 4 Oct 2023 01:55:06 -0600 Subject: [PATCH 026/408] ci: pull E4S images from github instead of dockerhub (#40307) --- share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml | 10 +++++----- .../cloud_pipelines/stacks/e4s-arm/spack.yaml | 14 +++++++------- .../cloud_pipelines/stacks/e4s-oneapi/spack.yaml | 4 ++-- .../cloud_pipelines/stacks/e4s-power/spack.yaml | 4 ++-- .../stacks/e4s-rocm-external/spack.yaml | 6 +++--- .../gitlab/cloud_pipelines/stacks/e4s/spack.yaml | 4 ++-- 6 files changed, 21 insertions(+), 21 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index fe801e68f03d73..df2c7f85ca2cca 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -287,7 +287,7 @@ protected-publish: e4s-generate: extends: [ ".e4s", ".generate-x86_64"] - image: ecpe4s/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01 + image: ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01 e4s-build: extends: [ ".e4s", ".build" ] @@ -310,7 +310,7 @@ e4s-build: e4s-arm-generate: extends: [ ".e4s-arm", ".generate-aarch64" ] - image: ecpe4s/ubuntu20.04-runner-arm64-gcc-11.4:2023.08.01 + image: ghcr.io/spack/ubuntu20.04-runner-arm64-gcc-11.4:2023.08.01 e4s-arm-build: extends: [ ".e4s-arm", ".build" ] @@ -333,7 +333,7 @@ e4s-arm-build: e4s-rocm-external-generate: extends: [ ".e4s-rocm-external", ".generate-x86_64"] - image: ecpe4s/ubuntu20.04-runner-amd64-gcc-11.4-rocm5.4.3:2023.08.01 + image: ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4-rocm5.4.3:2023.08.01 e4s-rocm-external-build: extends: [ ".e4s-rocm-external", ".build" ] @@ -379,7 +379,7 @@ gpu-tests-build: e4s-oneapi-generate: extends: [ ".e4s-oneapi", ".generate-x86_64"] - image: ecpe4s/ubuntu20.04-runner-amd64-oneapi-2023.2.1:2023.08.01 + image: ghcr.io/spack/ubuntu20.04-runner-amd64-oneapi-2023.2.1:2023.08.01 e4s-oneapi-build: extends: [ ".e4s-oneapi", ".build" ] @@ -396,7 +396,7 @@ e4s-oneapi-build: # E4S on Power ######################################## .e4s-power-generate-tags-and-image: - image: { "name": "ecpe4s/ubuntu20.04-runner-ppc64-gcc-11.4:2023.08.01", "entrypoint": [""] } + image: { "name": "ghcr.io/spack/ubuntu20.04-runner-ppc64-gcc-11.4:2023.08.01", "entrypoint": [""] } tags: ["spack", "public", "large", "ppc64le"] .e4s-power: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml index ebb254995deda7..02aafd6addd1dc 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml @@ -187,7 +187,7 @@ spack: - veloc # - visit # silo: https://github.com/spack/spack/issues/39538 - vtk-m - - zfp + - zfp # -- # - archer # part of llvm +omp_tsan # - bricks ~cuda # not respecting target=aarch64? @@ -205,7 +205,7 @@ spack: # - bricks +cuda # not respecting target=aarch64? # - legion +cuda # legion: needs NVIDIA driver - # CUDA 75 + # CUDA 75 - amrex +cuda cuda_arch=75 - arborx +cuda cuda_arch=75 ^kokkos +wrapper - cabana +cuda cuda_arch=75 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=75 @@ -245,7 +245,7 @@ spack: # - ecp-data-vis-sdk +adios2 +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=75 # embree: https://github.com/spack/spack/issues/39534 # - lammps +cuda cuda_arch=75 # lammps: needs NVIDIA driver # - lbann +cuda cuda_arch=75 # lbann: https://github.com/spack/spack/issues/38788 - # - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=75 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787 + # - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=75 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787 # - py-torch +cuda cuda_arch=75 # skipped, installed by other means # - slepc +cuda cuda_arch=75 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. # - upcxx +cuda cuda_arch=75 # upcxx: needs NVIDIA driver @@ -290,7 +290,7 @@ spack: # - ecp-data-vis-sdk +adios2 +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=80 # embree: https://github.com/spack/spack/issues/39534 # - lammps +cuda cuda_arch=80 # lammps: needs NVIDIA driver # - lbann +cuda cuda_arch=80 # lbann: https://github.com/spack/spack/issues/38788 - # - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=80 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787 + # - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=80 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787 # - py-torch +cuda cuda_arch=80 # skipped, installed by other means # - slepc +cuda cuda_arch=80 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. # - upcxx +cuda cuda_arch=80 # upcxx: needs NVIDIA driver @@ -333,7 +333,7 @@ spack: # - hypre +cuda cuda_arch=90 # concretizer: hypre +cuda requires cuda@:11, but cuda_arch=90 requires cuda@12: # - lammps +cuda cuda_arch=90 # lammps: needs NVIDIA driver # - lbann +cuda cuda_arch=90 # concretizer: Cannot select a single "version" for package "lbann" - # - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=90 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787 + # - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=90 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787 # - omega-h +cuda cuda_arch=90 # omega-h: https://github.com/spack/spack/issues/39535 # - py-torch +cuda cuda_arch=90 # skipped, installed by other means # - slepc +cuda cuda_arch=90 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. @@ -345,7 +345,7 @@ spack: ci: pipeline-gen: - build-job: - image: "ecpe4s/ubuntu20.04-runner-arm64-gcc-11.4:2023.08.01" + image: "ghcr.io/spack/ubuntu20.04-runner-arm64-gcc-11.4:2023.08.01" cdash: - build-group: E4S ARM \ No newline at end of file + build-group: E4S ARM diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 160b079eb5d4bb..ec86c35b33d055 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -135,7 +135,7 @@ spack: - lammps - lbann - legion - - libnrm + - libnrm - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp - libquo - libunwind @@ -241,7 +241,7 @@ spack: ci: pipeline-gen: - build-job: - image: ecpe4s/ubuntu20.04-runner-amd64-oneapi-2023.2.1:2023.08.01 + image: ghcr.io/spack/ubuntu20.04-runner-amd64-oneapi-2023.2.1:2023.08.01 cdash: build-group: E4S OneAPI diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index 1fcde50ca63fb1..72e06b060d2f1c 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -189,7 +189,7 @@ spack: # - visit # libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo - vtk-m - zfp - # -- + # -- # - archer # part of llvm +omp_tsan # - dealii # fltk: https://github.com/spack/spack/issues/38791 # - geopm # geopm: https://github.com/spack/spack/issues/38798 @@ -256,7 +256,7 @@ spack: ci: pipeline-gen: - build-job: - image: ecpe4s/ubuntu20.04-runner-ppc64-gcc-11.4:2023.08.01 + image: ghcr.io/spack/ubuntu20.04-runner-ppc64-gcc-11.4:2023.08.01 cdash: build-group: E4S Power diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml index dba404e9b3f910..cd9addbef0548c 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml @@ -230,7 +230,7 @@ spack: buildable: false externals: - spec: hipsolver@5.4.3 - prefix: /opt/rocm-5.4.3 + prefix: /opt/rocm-5.4.3 rocsolver: buildable: false externals: @@ -340,7 +340,7 @@ spack: ci: pipeline-gen: - build-job: - image: "ecpe4s/ubuntu20.04-runner-amd64-gcc-11.4-rocm5.4.3:2023.08.01" + image: "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4-rocm5.4.3:2023.08.01" cdash: - build-group: E4S ROCm External \ No newline at end of file + build-group: E4S ROCm External diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 8b5947da34f366..65ab32e80d0579 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -387,7 +387,7 @@ spack: ci: pipeline-gen: - build-job: - image: "ecpe4s/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01" + image: "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01" cdash: - build-group: E4S \ No newline at end of file + build-group: E4S From c0479557a2aafda9674514934ffcb5bd4283f21c Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 4 Oct 2023 09:59:59 +0200 Subject: [PATCH 027/408] petsc: add conflict on rocm 5.6: for now (#40300) hipsparse@5.6.0 changed hipsparseSpSV_solve() API, but reverted in 5.6.1 --- var/spack/repos/builtin/packages/petsc/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 4a8ac8657bb818..9d0d3a9016aa59 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -159,6 +159,10 @@ class Petsc(Package, CudaPackage, ROCmPackage): # https://github.com/spack/spack/issues/37416 conflicts("^rocprim@5.3.0:5.3.2", when="+rocm") + # petsc 3.20 has workaround for breaking change in hipsparseSpSV_solve api, + # but it seems to misdetect hipsparse@5.6.1 as 5.6.0, so the workaround + # only makes things worse + conflicts("^hipsparse@5.6", when="+rocm @3.20.0") # 3.8.0 has a build issue with MKL - so list this conflict explicitly conflicts("^intel-mkl", when="@3.8.0") From 2325e05739697cef1ba9d6b44f35f5069fc0c214 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 4 Oct 2023 04:28:49 -0500 Subject: [PATCH 028/408] py-einops: add v0.7.0 (#40296) --- var/spack/repos/builtin/packages/py-einops/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-einops/package.py b/var/spack/repos/builtin/packages/py-einops/package.py index 9394cdfc4621a7..d5d8eedebe6cf4 100644 --- a/var/spack/repos/builtin/packages/py-einops/package.py +++ b/var/spack/repos/builtin/packages/py-einops/package.py @@ -14,10 +14,12 @@ class PyEinops(PythonPackage): homepage = "https://github.com/arogozhnikov/einops" pypi = "einops/einops-0.3.2.tar.gz" + version("0.7.0", sha256="b2b04ad6081a3b227080c9bf5e3ace7160357ff03043cd66cc5b2319eb7031d1") version("0.6.1", sha256="f95f8d00f4ded90dbc4b19b6f98b177332614b0357dde66997f3ae5d474dc8c8") version("0.6.0", sha256="6f6c78739316a2e3ccbce8052310497e69da092935e4173f2e76ec4e3a336a35") version("0.5.0", sha256="8b7a83cffc1ea88e306df099b7cbb9c3ba5003bd84d05ae44be5655864abb8d3") version("0.3.2", sha256="5200e413539f0377f4177ef00dc019968f4177c49b1db3e836c7883df2a5fe2e") + depends_on("python@3.8:", when="@0.7:", type=("build", "run")) depends_on("py-hatchling@1.10:", when="@0.5:", type="build") depends_on("py-setuptools", when="@:0.4", type="build") From c7e437a0ae46c456d0f68b82c1af14504eca9eed Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 4 Oct 2023 11:31:46 +0200 Subject: [PATCH 029/408] py-isort: needs setuptools build dep before v5 (#40234) * py-isort: needs setuptools build dep before v5 Detected in #40224. In the past, system setuptools could be picked up when using an external python, so py-isort@4 would install fine. With the linked PR, pip can only consider packages that Spack controls from PYTHONPATH, so the issue of missing py-setuptools showed up. * py-importlib-metadata: fix lowerbounds on python * review * py-isort unconditionally add optional setuptools dep to prevent picking up user package at runtime * style * drop optional py-setuptools run dep --- .../repos/builtin/packages/py-importlib-metadata/package.py | 3 +++ var/spack/repos/builtin/packages/py-isort/package.py | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-importlib-metadata/package.py b/var/spack/repos/builtin/packages/py-importlib-metadata/package.py index 910b287ac82e6b..0abad1fe97ef49 100644 --- a/var/spack/repos/builtin/packages/py-importlib-metadata/package.py +++ b/var/spack/repos/builtin/packages/py-importlib-metadata/package.py @@ -32,6 +32,9 @@ class PyImportlibMetadata(PythonPackage): version("0.19", sha256="23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8") version("0.18", sha256="cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db") + depends_on("python@3.8:", when="@6.8.0:", type=("build", "run")) + # lowerbound needed as spack itself supports python 3.6 (can be dropped in spack 0.21) + depends_on("python@3.7:", when="@4.9.0:", type=("build", "run")) depends_on("py-setuptools@56:", when="@4.6.4:", type="build") depends_on("py-setuptools", type="build") depends_on("py-setuptools-scm@3.4.1:+toml", when="@3:", type="build") diff --git a/var/spack/repos/builtin/packages/py-isort/package.py b/var/spack/repos/builtin/packages/py-isort/package.py index ce49a3e1bc4ec7..aca4dd29047753 100644 --- a/var/spack/repos/builtin/packages/py-isort/package.py +++ b/var/spack/repos/builtin/packages/py-isort/package.py @@ -23,7 +23,8 @@ class PyIsort(PythonPackage): depends_on("python@3.8:", when="@5.12:", type=("build", "run")) depends_on("python@3.6.1:3", when="@5:5.10", type=("build", "run")) - depends_on("py-poetry-core@1:", type="build") + depends_on("py-setuptools", when="@:4", type=("build", "run")) + depends_on("py-poetry-core@1:", when="@5:", type="build") depends_on("py-colorama@0.4.3:", when="+colors @5.12:", type=("build", "run")) depends_on("py-colorama@0.4.3:0.4", when="+colors @:5.11", type=("build", "run")) From ee8de90eb03157a3e2625ab46a4a56966d947fb2 Mon Sep 17 00:00:00 2001 From: Dom Heinzeller Date: Wed, 4 Oct 2023 09:34:23 -0600 Subject: [PATCH 030/408] Add new package awscli-v2 and its missing dependency awscrt (#40288) * Add new package awscli-v2 and its missing dependency awscrt * Remove boilerplate comments from awscli-v2 and awscrt packages * Fix typos in var/spack/repos/builtin/packages/awscli-v2/package.py * Update var/spack/repos/builtin/packages/awscli-v2/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/awscli-v2/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/awscli-v2/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/awscli-v2/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/awscli-v2/package.py Co-authored-by: Adam J. Stewart * Address reviewer comments * Remove py-pip version dependency from var/spack/repos/builtin/packages/awscli-v2/package.py --------- Co-authored-by: Adam J. Stewart --- .../builtin/packages/awscli-v2/package.py | 31 +++++++++++++++++++ .../builtin/packages/py-awscrt/package.py | 19 ++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 var/spack/repos/builtin/packages/awscli-v2/package.py create mode 100644 var/spack/repos/builtin/packages/py-awscrt/package.py diff --git a/var/spack/repos/builtin/packages/awscli-v2/package.py b/var/spack/repos/builtin/packages/awscli-v2/package.py new file mode 100644 index 00000000000000..49436945f9956e --- /dev/null +++ b/var/spack/repos/builtin/packages/awscli-v2/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class AwscliV2(PythonPackage): + """This package provides a unified command line interface to Amazon Web Services.""" + + homepage = "https://docs.aws.amazon.com/cli" + url = "https://github.com/aws/aws-cli/archive/refs/tags/2.13.22.tar.gz" + + maintainers("climbfuji") + + version("2.13.22", sha256="dd731a2ba5973f3219f24c8b332a223a29d959493c8a8e93746d65877d02afc1") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-flit-core@3.7.1:3.8.0", type=("build")) + depends_on("py-colorama@0.2.5:0.4.6", type=("build", "run")) + depends_on("py-docutils@0.10:0.19", type=("build", "run")) + depends_on("py-cryptography@3.3.2:40.0.1", type=("build", "run")) + depends_on("py-ruamel-yaml@0.15:0.17.21", type=("build", "run")) + depends_on("py-ruamel-yaml-clib@0.2:0.2.7", type=("build", "run")) + depends_on("py-prompt-toolkit@3.0.24:3.0.38", type=("build", "run")) + depends_on("py-distro@1.5:1.8", type=("build", "run")) + depends_on("py-awscrt@0.16.4:0.16.16", type=("build", "run")) + depends_on("py-python-dateutil@2.1:2", type=("build", "run")) + depends_on("py-jmespath@0.7.1:1.0", type=("build", "run")) + depends_on("py-urllib3@1.25.4:1.26", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-awscrt/package.py b/var/spack/repos/builtin/packages/py-awscrt/package.py new file mode 100644 index 00000000000000..652e7164a6f445 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-awscrt/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyAwscrt(PythonPackage): + """Python 3 bindings for the AWS Common Runtime.""" + + homepage = "https://docs.aws.amazon.com/sdkref/latest/guide/common-runtime.html" + pypi = "awscrt/awscrt-0.16.16.tar.gz" + + maintainers("climbfuji") + + version("0.16.16", sha256="13075df2c1d7942fe22327b6483274517ee0f6ae765c4e6b6ae9ef5b4c43a827") + + depends_on("py-setuptools", type=("build")) From d1a1981278f5ee350ea64b50a06d99d680711398 Mon Sep 17 00:00:00 2001 From: Josh Bowden Date: Wed, 4 Oct 2023 17:34:43 +0200 Subject: [PATCH 031/408] Damaris release v1.9.2 (#40285) * Update to latest dot versions and improved installation of Damaris python module damaris4py * fix for visit dependency typo * whitespace check * whitespace check * fix for style issue * reviewer suggestions for integrating Python added * suggestion for boost depends statement added --- var/spack/repos/builtin/packages/damaris/package.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/damaris/package.py b/var/spack/repos/builtin/packages/damaris/package.py index 44f56877b948b6..a93bbece1318c3 100644 --- a/var/spack/repos/builtin/packages/damaris/package.py +++ b/var/spack/repos/builtin/packages/damaris/package.py @@ -16,6 +16,8 @@ class Damaris(CMakePackage): maintainers("jcbowden") version("master", branch="master") + version("1.9.2", tag="v1.9.2") + version("1.9.1", tag="v1.9.1") version("1.9.0", tag="v1.9.0") version("1.8.2", tag="v1.8.2") version("1.8.1", tag="v1.8.1") @@ -44,13 +46,14 @@ class Damaris(CMakePackage): default=False, description="Enables building of Python enabled Damaris library using Boost::python", ) + extends("python", when="+python") depends_on("xsd") depends_on("xerces-c") depends_on("mpi") depends_on("cmake@3.18.0:", type=("build")) - depends_on("boost+thread+log+filesystem+date_time" "@1.67:") - depends_on("boost+thread+log+filesystem+date_time+python+numpy" "@1.67:", when="+python") + depends_on("boost@1.67:+thread+log+filesystem+date_time+system") + depends_on("boost+python", when="+python") depends_on("py-mpi4py", when="+python", type=("build", "run")) depends_on("hdf5@1.8.20:", when="+hdf5") depends_on("paraview+python+mpi+development_files", when="+catalyst") @@ -87,6 +90,8 @@ def cmake_args(self): if self.spec.variants["python"].value: args.extend(["-DENABLE_PYTHON:BOOL=ON"]) + args.extend(["-DENABLE_PYTHONMOD:BOOL=ON"]) + args.append(self.define("PYTHON_MODULE_INSTALL_PATH", python_platlib)) if self.spec.variants["visit"].value: args.extend(["-DENABLE_VISIT:BOOL=ON"]) From 9777d6ddc3b3b4ecb03d9ff3816db772be63248a Mon Sep 17 00:00:00 2001 From: Ken Raffenetti Date: Wed, 4 Oct 2023 11:37:58 -0500 Subject: [PATCH 032/408] yaksa: Allow unsupported host compiler with CUDA (#40298) Fixes #40272. --- var/spack/repos/builtin/packages/yaksa/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/yaksa/package.py b/var/spack/repos/builtin/packages/yaksa/package.py index b696c00e1263a6..46366659be0ffc 100644 --- a/var/spack/repos/builtin/packages/yaksa/package.py +++ b/var/spack/repos/builtin/packages/yaksa/package.py @@ -47,6 +47,8 @@ def configure_args(self): cuda_archs = spec.variants["cuda_arch"].value if "none" not in cuda_archs: config_args.append("--with-cuda-sm={0}".format(",".join(cuda_archs))) + if "^cuda+allow-unsupported-compilers" in self.spec: + config_args.append("NVCC_FLAGS=-allow-unsupported-compiler") if "+rocm" in spec: config_args.append("--with-hip={0}".format(spec["hip"].prefix)) From 9b2df24e10b8b64ba88c0aa16c9f4928ff27f3bb Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 4 Oct 2023 21:38:50 +0200 Subject: [PATCH 033/408] Improve build isolation in PythonPipBuilder (#40224) We run pip with `--no-build-isolation` because we don't wanna let pip install build deps. As a consequence, when pip runs hooks, it runs hooks of *any* package it can find in `sys.path`. For Spack-built Python this includes user site packages -- there shouldn't be any system site packages. So in this case it suffices to set the environment variable PYTHONNOUSERSITE=1. For external Python, more needs to be done, cause there is no env variable that disables both system and user site packages; setting the `python -S` flag doesn't work because pip runs subprocesses that don't inherit this flag (and there is no API to know if -S was passed) So, for external Python, an empty venv is created before invoking pip in Spack's build env ensures that pip can no longer see anything but standard libraries and `PYTHONPATH`. The downside of this is that pip will generate shebangs that point to the python executable from the venv. So, for external python an extra step is necessary where we fix up shebangs post install. --- lib/spack/spack/build_systems/python.py | 100 +++++++++++++++++++++++- 1 file changed, 99 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index 0d03813ddff8cf..fa27f8de495b67 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -6,6 +6,7 @@ import os import re import shutil +import stat from typing import Optional import archspec @@ -25,6 +26,7 @@ from spack.directives import build_system, depends_on, extends, maintainers from spack.error import NoHeadersError, NoLibrariesError, SpecError from spack.install_test import test_part +from spack.util.executable import Executable from spack.version import Version from ._checks import BaseBuilder, execute_install_time_tests @@ -351,6 +353,51 @@ def libs(self): raise NoLibrariesError(msg.format(self.spec.name, root)) +def fixup_shebangs(path: str, old_interpreter: bytes, new_interpreter: bytes): + # Recurse into the install prefix and fixup shebangs + exe = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH + dirs = [path] + hardlinks = set() + + while dirs: + with os.scandir(dirs.pop()) as entries: + for entry in entries: + if entry.is_dir(follow_symlinks=False): + dirs.append(entry.path) + continue + + # Only consider files, not symlinks + if not entry.is_file(follow_symlinks=False): + continue + + lstat = entry.stat(follow_symlinks=False) + + # Skip over files that are not executable + if not (lstat.st_mode & exe): + continue + + # Don't modify hardlinks more than once + if lstat.st_nlink > 1: + key = (lstat.st_ino, lstat.st_dev) + if key in hardlinks: + continue + hardlinks.add(key) + + # Finally replace shebangs if any. + with open(entry.path, "rb+") as f: + contents = f.read(2) + if contents != b"#!": + continue + contents += f.read() + + if old_interpreter not in contents: + continue + + f.seek(0) + f.write(contents.replace(old_interpreter, new_interpreter)) + f.truncate() + + @spack.builder.builder("python_pip") class PythonPipBuilder(BaseBuilder): phases = ("install",) @@ -447,8 +494,36 @@ def global_options(self, spec, prefix): """ return [] + @property + def _build_venv_path(self): + """Return the path to the virtual environment used for building when + python is external.""" + return os.path.join(self.spec.package.stage.path, "build_env") + + @property + def _build_venv_python(self) -> Executable: + """Return the Python executable in the build virtual environment when + python is external.""" + return Executable(os.path.join(self._build_venv_path, "bin", "python")) + def install(self, pkg, spec, prefix): """Install everything from build directory.""" + python: Executable = spec["python"].command + # Since we invoke pip with --no-build-isolation, we have to make sure that pip cannot + # execute hooks from user and system site-packages. + if spec["python"].external: + # There are no environment variables to disable the system site-packages, so we use a + # virtual environment instead. The downside of this approach is that pip produces + # incorrect shebangs that refer to the virtual environment, which we have to fix up. + python("-m", "venv", "--without-pip", self._build_venv_path) + pip = self._build_venv_python + else: + # For a Spack managed Python, system site-packages is empty/unused by design, so it + # suffices to disable user site-packages, for which there is an environment variable. + pip = python + pip.add_default_env("PYTHONNOUSERSITE", "1") + pip.add_default_arg("-m") + pip.add_default_arg("pip") args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix] @@ -472,8 +547,31 @@ def install(self, pkg, spec, prefix): else: args.append(".") - pip = inspect.getmodule(pkg).pip with fs.working_dir(self.build_directory): pip(*args) + @spack.builder.run_after("install") + def fixup_shebangs_pointing_to_build(self): + """When installing a package using an external python, we use a temporary virtual + environment which improves build isolation. The downside is that pip produces shebangs + that point to the temporary virtual environment. This method fixes them up to point to the + underlying Python.""" + # No need to fixup shebangs if no build venv was used. (this post install function also + # runs when install was overridden in another package, so check existence of the venv path) + if not os.path.exists(self._build_venv_path): + return + + # Use sys.executable, since that's what pip uses. + interpreter = ( + lambda python: python("-c", "import sys; print(sys.executable)", output=str) + .strip() + .encode("utf-8") + ) + + fixup_shebangs( + path=self.spec.prefix, + old_interpreter=interpreter(self._build_venv_python), + new_interpreter=interpreter(self.spec["python"].command), + ) + spack.builder.run_after("install")(execute_install_time_tests) From 0b3d17e96e2591f554a1c49fb8c59b85594f7090 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Wed, 4 Oct 2023 14:32:30 -0700 Subject: [PATCH 034/408] trilinos: add variant to build tests (#40284) * trilinos: add variant: testing * trilinos: rename +testing to +test --- var/spack/repos/builtin/packages/trilinos/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 28f68b1b488571..b6ee39282e44c8 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -148,6 +148,7 @@ class Trilinos(CMakePackage, CudaPackage, ROCmPackage): variant("stratimikos", default=False, description="Compile with Stratimikos") variant("teko", default=False, description="Compile with Teko") variant("tempus", default=False, description="Compile with Tempus") + variant("test", default=False, description="Enable testing") variant("thyra", default=False, description="Compile with Thyra") variant("tpetra", default=True, description="Compile with Tpetra") variant("trilinoscouplings", default=False, description="Compile with TrilinosCouplings") @@ -616,6 +617,12 @@ def define_enable(suffix, value=None): ] ) + if "+test" in spec: + options.append(define_trilinos_enable("TESTS", True)) + options.append(define("BUILD_TESTING", True)) + else: + options.append(define_trilinos_enable("TESTS", False)) + if spec.version >= Version("13"): options.append(define_from_variant("CMAKE_CXX_STANDARD", "cxxstd")) else: From dfe5d37eba097f3cd68c14a4e45f2514ffbd2fbf Mon Sep 17 00:00:00 2001 From: Cameron Rutherford Date: Wed, 4 Oct 2023 21:57:17 -0700 Subject: [PATCH 035/408] exago: add and logging variant. (#40188) --- var/spack/repos/builtin/packages/exago/package.py | 12 ++++++++---- var/spack/repos/builtin/packages/hiop/package.py | 2 +- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index e5f789fa8376ba..d28b4fa1f82018 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + from spack.package import * @@ -13,7 +15,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): homepage = "https://github.com/pnnl/ExaGO" git = "https://github.com/pnnl/ExaGO.git" - maintainers("ryandanehy", "CameronRutherford", "pelesh") + maintainers("ryandanehy", "cameronrutherford", "pelesh") version("1.5.1", commit="7abe482c8da0e247f9de4896f5982c4cacbecd78", submodules=True) version("1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True) @@ -45,6 +47,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): conflicts( "+python", when="+ipopt+rocm", msg="Python bindings require -fPIC with Ipopt for rocm." ) + variant("logging", default=False, description="Enable/Disable spdlog based logging") # Solver options variant("hiop", default=False, description="Enable/Disable HiOp") @@ -175,17 +178,18 @@ def cmake_args(self): args.extend( [ self.define("EXAGO_ENABLE_GPU", "+cuda" in spec or "+rocm" in spec), + self.define("PETSC_DIR", spec["petsc"].prefix), + self.define("EXAGO_RUN_TESTS", self.run_tests), + self.define("LAPACK_LIBRARIES", spec["lapack"].libs + spec["blas"].libs), self.define_from_variant("EXAGO_ENABLE_CUDA", "cuda"), self.define_from_variant("EXAGO_ENABLE_HIP", "rocm"), - self.define("PETSC_DIR", spec["petsc"].prefix), - self.define("EXAGO_RUN_TESTS", True), + self.define_from_variant("EXAGO_ENABLE_LOGGING", "logging"), self.define_from_variant("EXAGO_ENABLE_MPI", "mpi"), self.define_from_variant("EXAGO_ENABLE_RAJA", "raja"), self.define_from_variant("EXAGO_ENABLE_HIOP", "hiop"), self.define_from_variant("EXAGO_ENABLE_IPOPT", "ipopt"), self.define_from_variant("EXAGO_ENABLE_PYTHON", "python"), self.define_from_variant("EXAGO_ENABLE_LOGGING", "logging"), - self.define("LAPACK_LIBRARIES", spec["lapack"].libs + spec["blas"].libs), ] ) diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index e23fc44380502f..0ee82f9c32a0e8 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -19,7 +19,7 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): homepage = "https://github.com/LLNL/hiop" git = "https://github.com/LLNL/hiop.git" - maintainers("ryandanehy", "CameronRutherford", "pelesh") + maintainers("ryandanehy", "cameronrutherford", "pelesh") # Most recent tagged snapshot is the preferred version when profiling. version("1.0.0", commit="10b7d3ee0a15cb4949ccee8c905d447b9528794f", submodules=True) From 8f7da83977c7ef3723cc9e279a099e6f70505ff9 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 5 Oct 2023 08:45:33 +0200 Subject: [PATCH 036/408] cray rhel: disable due to runner issues (#40324) --- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index df2c7f85ca2cca..6dbc0d1581aadd 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -846,24 +846,24 @@ deprecated-ci-build: ####################################### # E4S - Cray RHEL ####################################### -.e4s-cray-rhel: - extends: [ ".cray_rhel_zen4" ] - variables: - SPACK_CI_STACK_NAME: e4s-cray-rhel +# .e4s-cray-rhel: +# extends: [ ".cray_rhel_zen4" ] +# variables: +# SPACK_CI_STACK_NAME: e4s-cray-rhel -e4s-cray-rhel-generate: - extends: [ ".generate-cray-rhel", ".e4s-cray-rhel" ] +# e4s-cray-rhel-generate: +# extends: [ ".generate-cray-rhel", ".e4s-cray-rhel" ] -e4s-cray-rhel-build: - extends: [ ".build", ".e4s-cray-rhel" ] - trigger: - include: - - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: e4s-cray-rhel-generate - strategy: depend - needs: - - artifacts: True - job: e4s-cray-rhel-generate +# e4s-cray-rhel-build: +# extends: [ ".build", ".e4s-cray-rhel" ] +# trigger: +# include: +# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml +# job: e4s-cray-rhel-generate +# strategy: depend +# needs: +# - artifacts: True +# job: e4s-cray-rhel-generate ####################################### # E4S - Cray SLES From a4f3ff170573a5cdfd6054529af97605840c06cf Mon Sep 17 00:00:00 2001 From: Auriane R <48684432+aurianer@users.noreply.github.com> Date: Thu, 5 Oct 2023 09:34:22 +0200 Subject: [PATCH 037/408] Add pika 0.19.0 (#40313) --- var/spack/repos/builtin/packages/pika/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index 35a0436bcef05a..f581a6cd0c8602 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -17,6 +17,7 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pika-org/pika.git" maintainers("msimberg", "albestro", "teonnik", "aurianer") + version("0.19.0", sha256="f45cc16e4e50cbb183ed743bdc8b775d49776ee33c13ea39a650f4230a5744cb") version("0.18.0", sha256="f34890e0594eeca6ac57f2b988d0807b502782817e53a7f7043c3f921b08c99f") version("0.17.0", sha256="717429fc1bc986d62cbec190a69939e91608122d09d54bda1b028871c9ca9ad4") version("0.16.0", sha256="59f2baec91cc9bf71ca96d21d0da1ec0092bf59da106efa51789089e0d7adcbb") From 3c88abbc345416ee9e34746a6742ae0da2c660a1 Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Thu, 5 Oct 2023 17:53:56 +0200 Subject: [PATCH 038/408] whizard: Make sure to detect LCIO if requested (#40316) --- var/spack/repos/builtin/packages/whizard/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/whizard/package.py b/var/spack/repos/builtin/packages/whizard/package.py index a9ccf9de2ec987..3297c2eddbe5f1 100644 --- a/var/spack/repos/builtin/packages/whizard/package.py +++ b/var/spack/repos/builtin/packages/whizard/package.py @@ -103,6 +103,8 @@ def setup_build_environment(self, env): # and seems incompatible with # filter_compiler_wrappers, thus the # actual compilers need to be used to build + if self.spec.satisfies("+lcio"): + env.set("LCIO", self.spec["lcio"].prefix) env.set("CC", self.compiler.cc) env.set("CXX", self.compiler.cxx) env.set("FC", self.compiler.fc) From f902ee94b7660a292ee1c1d3611f8f3a03070550 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 5 Oct 2023 18:52:23 +0200 Subject: [PATCH 039/408] unparse: drop python 3.4 remnants (#40333) --- lib/spack/spack/util/unparse/unparser.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/lib/spack/spack/util/unparse/unparser.py b/lib/spack/spack/util/unparse/unparser.py index 27dba8f2873b59..c2e6376e509537 100644 --- a/lib/spack/spack/util/unparse/unparser.py +++ b/lib/spack/spack/util/unparse/unparser.py @@ -398,21 +398,6 @@ def visit_ClassDef(self, node): else: comma = True self.dispatch(e) - if sys.version_info[:2] < (3, 5): - if node.starargs: - if comma: - self.write(", ") - else: - comma = True - self.write("*") - self.dispatch(node.starargs) - if node.kwargs: - if comma: - self.write(", ") - else: - comma = True - self.write("**") - self.dispatch(node.kwargs) with self.block(): self.dispatch(node.body) From 0beec04d67e5dd1e3d8b2fa4f8ab8771161a5699 Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Thu, 5 Oct 2023 12:13:50 -0500 Subject: [PATCH 040/408] pbmpi: adding new version and maintainer (#40319) --- var/spack/repos/builtin/packages/pbmpi/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/pbmpi/package.py b/var/spack/repos/builtin/packages/pbmpi/package.py index a2739c8a41251c..1fbb07177a0398 100644 --- a/var/spack/repos/builtin/packages/pbmpi/package.py +++ b/var/spack/repos/builtin/packages/pbmpi/package.py @@ -10,8 +10,13 @@ class Pbmpi(MakefilePackage): """A Bayesian software for phylogenetic reconstruction using mixture models""" homepage = "https://megasun.bch.umontreal.ca/People/lartillot/www/index.htm" + url = "https://github.com/bayesiancook/pbmpi/archive/refs/tags/v1.8c.tar.gz" git = "https://github.com/bayesiancook/pbmpi.git" + maintainers("snehring") + + version("1.9", sha256="567d8db995f23b2b0109c1e6088a7e5621e38fec91d6b2f27abd886b90ea31ce") + version("1.8c", sha256="2a80ec4a98d92ace61c67ff9ba78249d45d03094b364959d490b1ad05797a279") version("partition", branch="partition") depends_on("mpi") From 84cfb18b561e95cc554f3dd0a224c67716370293 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Thu, 5 Oct 2023 10:32:52 -0700 Subject: [PATCH 041/408] Revert "cray rhel: disable due to runner issues (#40324)" (#40335) This reverts commit bf7f54449ba8ed157c9ee258007e0a7a509600cf. --- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 6dbc0d1581aadd..df2c7f85ca2cca 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -846,24 +846,24 @@ deprecated-ci-build: ####################################### # E4S - Cray RHEL ####################################### -# .e4s-cray-rhel: -# extends: [ ".cray_rhel_zen4" ] -# variables: -# SPACK_CI_STACK_NAME: e4s-cray-rhel +.e4s-cray-rhel: + extends: [ ".cray_rhel_zen4" ] + variables: + SPACK_CI_STACK_NAME: e4s-cray-rhel -# e4s-cray-rhel-generate: -# extends: [ ".generate-cray-rhel", ".e4s-cray-rhel" ] +e4s-cray-rhel-generate: + extends: [ ".generate-cray-rhel", ".e4s-cray-rhel" ] -# e4s-cray-rhel-build: -# extends: [ ".build", ".e4s-cray-rhel" ] -# trigger: -# include: -# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml -# job: e4s-cray-rhel-generate -# strategy: depend -# needs: -# - artifacts: True -# job: e4s-cray-rhel-generate +e4s-cray-rhel-build: + extends: [ ".build", ".e4s-cray-rhel" ] + trigger: + include: + - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml + job: e4s-cray-rhel-generate + strategy: depend + needs: + - artifacts: True + job: e4s-cray-rhel-generate ####################################### # E4S - Cray SLES From d6c5c07b8366275f417f47235f2c7916b7b37db7 Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Thu, 5 Oct 2023 20:04:44 -0700 Subject: [PATCH 042/408] ecFlow update (#40305) * Support static openssl for ecflow * Update ecflow/static openssl * Update ssl settings in ecflow * add pic variant for ecflow * style fix * Update package.py * Update package.py --- .../repos/builtin/packages/ecflow/package.py | 46 +++++++++++++++++-- 1 file changed, 41 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/ecflow/package.py b/var/spack/repos/builtin/packages/ecflow/package.py index 2c30dbbd0cbf17..05519fc04d4963 100644 --- a/var/spack/repos/builtin/packages/ecflow/package.py +++ b/var/spack/repos/builtin/packages/ecflow/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + from spack.package import * from spack.pkg.builtin.boost import Boost @@ -19,7 +21,7 @@ class Ecflow(CMakePackage): homepage = "https://confluence.ecmwf.int/display/ECFLOW/" url = "https://confluence.ecmwf.int/download/attachments/8650755/ecFlow-4.11.1-Source.tar.gz" - maintainers("climbfuji") + maintainers("climbfuji", "AlexanderRichert-NOAA") # https://confluence.ecmwf.int/download/attachments/8650755/ecFlow-5.8.3-Source.tar.gz?api=v2 version("5.8.3", sha256="1d890008414017da578dbd5a95cb1b4d599f01d5a3bb3e0297fe94a87fbd81a6") @@ -32,6 +34,7 @@ class Ecflow(CMakePackage): "static_boost", default=False, description="Use also static boost libraries when compiling" ) variant("ui", default=False, description="Enable ecflow_ui") + variant("pic", default=False, description="Enable position-independent code (PIC)") extends("python") @@ -59,6 +62,7 @@ class Ecflow(CMakePackage): ) depends_on("openssl@1:", when="@5:") + depends_on("pkgconfig", type="build", when="+ssl ^openssl ~shared") depends_on("qt@5:", when="+ui") # Requirement to use the Python3_EXECUTABLE variable depends_on("cmake@3.16:", type="build") @@ -72,15 +76,47 @@ def patch(self): "Pyext/CMakeLists.txt", ) + @when("+ssl ^openssl~shared") + def setup_build_environment(self, env): + env.set("LIBS", self.spec["zlib"].libs.search_flags) + def cmake_args(self): - boost_lib = self.spec["boost"].prefix.lib - return [ + spec = self.spec + boost_lib = spec["boost"].prefix.lib + args = [ self.define("Boost_PYTHON_LIBRARY_RELEASE", boost_lib), self.define_from_variant("ENABLE_UI", "ui"), self.define_from_variant("ENABLE_GUI", "ui"), self.define_from_variant("ENABLE_SSL", "ssl"), # https://jira.ecmwf.int/browse/SUP-2641#comment-208943 self.define_from_variant("ENABLE_STATIC_BOOST_LIBS", "static_boost"), - self.define("Python3_EXECUTABLE", self.spec["python"].package.command), - self.define("BOOST_ROOT", self.spec["boost"].prefix), + self.define("Python3_EXECUTABLE", spec["python"].package.command), + self.define("BOOST_ROOT", spec["boost"].prefix), + self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"), ] + + if spec.satisfies("+ssl ^openssl ~shared"): + ssllibs = ";".join(spec["openssl"].libs + spec["zlib"].libs) + args.append(self.define("OPENSSL_CRYPTO_LIBRARY", ssllibs)) + + return args + + # A recursive link in the ecflow source code causes the binary cache + # creation to fail. This file is only in the install tree if the + # --source option is set when installing the package, but force_remove + # acts like "rm -f" and won't abort if the file doesn't exist. + @run_after("install") + def remove_recursive_symlink_in_source_code(self): + force_remove(join_path(self.prefix, "share/ecflow/src/cereal/cereal")) + + @when("+ssl ^openssl~shared") + def patch(self): + pkgconf = which("pkg-config") + liblist_l = pkgconf("--libs-only-l", "--static", "openssl", output=str).split() + liblist = " ".join([ll.replace("-l", "") for ll in liblist_l]) + for sdir in ["Client", "Server"]: + filter_file( + "(target_link_libraries.*pthread)", + f"\\1 {liblist}", + os.path.join(sdir, "CMakeLists.txt"), + ) From 9eadf4785afe35c804fa8cb268895f5f38a853f1 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 6 Oct 2023 09:44:09 +0200 Subject: [PATCH 043/408] python: add 3.12.0 (but keep 3.11 preferred) (#40282) --- lib/spack/spack/build_systems/meson.py | 9 +- .../builtin/packages/flux-core/package.py | 2 + .../repos/builtin/packages/glib/package.py | 3 +- .../repos/builtin/packages/mesa/package.py | 3 +- .../repos/builtin/packages/meson/package.py | 4 + .../packages/meson/python-3.12-support.patch | 283 ++++++++++++++++++ .../repos/builtin/packages/py-cffi/package.py | 4 + .../packages/py-cryptography/package.py | 5 + .../repos/builtin/packages/python/package.py | 11 +- 9 files changed, 318 insertions(+), 6 deletions(-) create mode 100644 var/spack/repos/builtin/packages/meson/python-3.12-support.patch diff --git a/lib/spack/spack/build_systems/meson.py b/lib/spack/spack/build_systems/meson.py index 1a5e69e081b274..38939dc7adc323 100644 --- a/lib/spack/spack/build_systems/meson.py +++ b/lib/spack/spack/build_systems/meson.py @@ -10,7 +10,7 @@ import spack.builder import spack.package_base -from spack.directives import build_system, depends_on, variant +from spack.directives import build_system, conflicts, depends_on, variant from spack.multimethod import when from ._checks import BaseBuilder, execute_build_time_tests @@ -47,6 +47,13 @@ class MesonPackage(spack.package_base.PackageBase): variant("strip", default=False, description="Strip targets on install") depends_on("meson", type="build") depends_on("ninja", type="build") + # Python detection in meson requires distutils to be importable, but distutils no longer + # exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement, + # because the distutils-precedence.pth startup file that setuptools ships with is not run + # when setuptools is in PYTHONPATH; it has to be in system site-packages. In a future meson + # release, the distutils requirement will be dropped, so this conflict can be relaxed. + # We have patches to make it work with meson 1.1 and above. + conflicts("^python@3.12:", when="^meson@:1.0") def flags_to_build_system_args(self, flags): """Produces a list of all command line arguments to pass the specified diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py index 2e5126fc54b2a0..70d8cc9873c24c 100644 --- a/var/spack/repos/builtin/packages/flux-core/package.py +++ b/var/spack/repos/builtin/packages/flux-core/package.py @@ -141,6 +141,8 @@ class FluxCore(AutotoolsPackage): # `link` dependency on python due to Flux's `pymod` module depends_on("python@3.6:", when="@0.17:", type=("build", "link", "run")) depends_on("python@2.7:", type=("build", "link", "run")) + # Use of distutils in configure script dropped in v0.55 + depends_on("python@:3.11", when="@:0.54", type=("build", "link", "run")) depends_on("py-cffi@1.1:", type=("build", "run")) depends_on("py-six@1.9:", when="@:0.24", type=("build", "run")) depends_on("py-pyyaml@3.10:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index e1744d1d23af95..4f8d1415f1304b 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -139,7 +139,8 @@ class Glib(MesonPackage, AutotoolsPackage): depends_on("zlib-api") depends_on("gettext") depends_on("perl", type=("build", "run")) - depends_on("python", type=("build", "run"), when="@2.53.4:") + # Uses distutils in gio/gdbus-2.0/codegen/utils.py + depends_on("python@:3.11", type=("build", "run"), when="@2.53.4:") depends_on("pcre2", when="@2.73.2:") depends_on("pcre2@10.34:", when="@2.74:") depends_on("pcre+utf", when="@2.48:2.73.1") diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py index 60fb95beb63740..e1e666a6bcd212 100644 --- a/var/spack/repos/builtin/packages/mesa/package.py +++ b/var/spack/repos/builtin/packages/mesa/package.py @@ -52,7 +52,8 @@ class Mesa(MesonPackage): depends_on("cmake", type="build") depends_on("flex", type="build") depends_on("gettext", type="build") - depends_on("python@3:", type="build") + # Upperbound on 3.11 because distutils is used for checking py-mako + depends_on("python@3:3.11", type="build") depends_on("py-mako@0.8.0:", type="build") depends_on("unwind") depends_on("expat") diff --git a/var/spack/repos/builtin/packages/meson/package.py b/var/spack/repos/builtin/packages/meson/package.py index 9cc6a63723951a..cabafbe0011b7b 100644 --- a/var/spack/repos/builtin/packages/meson/package.py +++ b/var/spack/repos/builtin/packages/meson/package.py @@ -18,6 +18,7 @@ class Meson(PythonPackage): maintainers("eli-schwartz", "michaelkuhn") + version("1.2.2", sha256="1caa0ef6082e311bdca9836e7907f548b8c3f041a42ed41f0ff916b83ac7dddd") version("1.2.1", sha256="e1f3b32b636cc86496261bd89e63f00f206754697c7069788b62beed5e042713") version("1.2.0", sha256="603489f0aaa6305f806c6cc4a4455a965f22290fc74f65871f589b002110c790") version("1.1.1", sha256="1c3b9e1a3a36b51adb5de498d582fd5cbf6763fadbcf151de9f2a762e02bd2e6") @@ -85,6 +86,9 @@ class Meson(PythonPackage): # https://github.com/mesonbuild/meson/pull/9850 patch("oneapi.patch", when="@0.62:0.63 %oneapi") + # Python 3.12 detection support + patch("python-3.12-support.patch", when="@1.1:1.2.2") + executables = ["^meson$"] @classmethod diff --git a/var/spack/repos/builtin/packages/meson/python-3.12-support.patch b/var/spack/repos/builtin/packages/meson/python-3.12-support.patch new file mode 100644 index 00000000000000..727976ff26234b --- /dev/null +++ b/var/spack/repos/builtin/packages/meson/python-3.12-support.patch @@ -0,0 +1,283 @@ +From 5f96e35b873d6230970fd63ba2e706bbd3f4e26f Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Fri, 8 Sep 2023 16:54:48 -0400 +Subject: [PATCH 1/6] python dependency: ensure that setuptools doesn't inject + itself into distutils + +We do not use setuptools for anything, and only lightly use distutils. +Unpredictable issues can occur due to setuptools monkey-patching, which +interferes with our intended use. Tell setuptools to simply never get +involved. + +Note: while it's otherwise possible to check if the probe is run using +sys.executable and avoid forking, setuptools unconditionally injects +itself at startup in a way that requires subprocess isolation to +disable. + +(cherry picked from commit 9f610ad5b72ea91de2d7aeb6f3266d0a7477062e) +--- + mesonbuild/dependencies/python.py | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py +index 160772888..f04494674 100644 +--- a/mesonbuild/dependencies/python.py ++++ b/mesonbuild/dependencies/python.py +@@ -113,7 +113,9 @@ class BasicPythonExternalProgram(ExternalProgram): + + with importlib.resources.path('mesonbuild.scripts', 'python_info.py') as f: + cmd = self.get_command() + [str(f)] +- p, stdout, stderr = mesonlib.Popen_safe(cmd) ++ env = os.environ.copy() ++ env['SETUPTOOLS_USE_DISTUTILS'] = 'stdlib' ++ p, stdout, stderr = mesonlib.Popen_safe(cmd, env=env) + + try: + info = json.loads(stdout) +-- +2.39.2 + + +From cb4e62a8c55118988babac8b8254e0af1dc9698b Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Mon, 21 Nov 2022 20:47:14 -0500 +Subject: [PATCH 2/6] python module: stop using distutils schemes on + sufficiently new Debian + +Since 3.10.3, Debian finally started patching sysconfig with custom +paths, instead of just distutils. This means we can now go use that +instead. It reduces our reliance on the deprecated distutils module. + +Partial fix for #7702 + +(cherry picked from commit 40f897fa92f7d3cc43788d3000733310ce77cf0c) +--- + mesonbuild/scripts/python_info.py | 32 +++++++++++++++++++++++-------- + 1 file changed, 24 insertions(+), 8 deletions(-) + +diff --git a/mesonbuild/scripts/python_info.py b/mesonbuild/scripts/python_info.py +index 9c3a0791a..65597b121 100755 +--- a/mesonbuild/scripts/python_info.py ++++ b/mesonbuild/scripts/python_info.py +@@ -13,7 +13,6 @@ if sys.path[0].endswith('scripts'): + del sys.path[0] + + import json, os, sysconfig +-import distutils.command.install + + def get_distutils_paths(scheme=None, prefix=None): + import distutils.dist +@@ -37,15 +36,32 @@ def get_distutils_paths(scheme=None, prefix=None): + # default scheme to a custom one pointing to /usr/local and replacing + # site-packages with dist-packages. + # See https://github.com/mesonbuild/meson/issues/8739. +-# XXX: We should be using sysconfig, but Debian only patches distutils. ++# ++# We should be using sysconfig, but before 3.10.3, Debian only patches distutils. ++# So we may end up falling back. + +-if 'deb_system' in distutils.command.install.INSTALL_SCHEMES: +- paths = get_distutils_paths(scheme='deb_system') +- install_paths = get_distutils_paths(scheme='deb_system', prefix='') +-else: +- paths = sysconfig.get_paths() ++def get_install_paths(): ++ if sys.version_info >= (3, 10): ++ scheme = sysconfig.get_default_scheme() ++ else: ++ scheme = sysconfig._get_default_scheme() ++ ++ if sys.version_info >= (3, 10, 3): ++ if 'deb_system' in sysconfig.get_scheme_names(): ++ scheme = 'deb_system' ++ else: ++ import distutils.command.install ++ if 'deb_system' in distutils.command.install.INSTALL_SCHEMES: ++ paths = get_distutils_paths(scheme='deb_system') ++ install_paths = get_distutils_paths(scheme='deb_system', prefix='') ++ return paths, install_paths ++ ++ paths = sysconfig.get_paths(scheme=scheme) + empty_vars = {'base': '', 'platbase': '', 'installed_base': ''} +- install_paths = sysconfig.get_paths(vars=empty_vars) ++ install_paths = sysconfig.get_paths(scheme=scheme, vars=empty_vars) ++ return paths, install_paths ++ ++paths, install_paths = get_install_paths() + + def links_against_libpython(): + from distutils.core import Distribution, Extension +-- +2.39.2 + + +From c179c18765514d5c37737dec996b4c91cb31477f Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Mon, 2 Oct 2023 16:40:15 -0400 +Subject: [PATCH 3/6] python module: refactor pypy detection into a consistent + variable + +(cherry picked from commit 3d3a10ef022284c8377bd9f8e1b1adec73c50d95) +--- + mesonbuild/scripts/python_info.py | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/mesonbuild/scripts/python_info.py b/mesonbuild/scripts/python_info.py +index 65597b121..d17b3a376 100755 +--- a/mesonbuild/scripts/python_info.py ++++ b/mesonbuild/scripts/python_info.py +@@ -72,6 +72,8 @@ def links_against_libpython(): + variables = sysconfig.get_config_vars() + variables.update({'base_prefix': getattr(sys, 'base_prefix', sys.prefix)}) + ++is_pypy = '__pypy__' in sys.builtin_module_names ++ + if sys.version_info < (3, 0): + suffix = variables.get('SO') + elif sys.version_info < (3, 8, 7): +@@ -88,7 +90,7 @@ print(json.dumps({ + 'install_paths': install_paths, + 'version': sysconfig.get_python_version(), + 'platform': sysconfig.get_platform(), +- 'is_pypy': '__pypy__' in sys.builtin_module_names, ++ 'is_pypy': is_pypy, + 'is_venv': sys.prefix != variables['base_prefix'], + 'link_libpython': links_against_libpython(), + 'suffix': suffix, +-- +2.39.2 + + +From 3c493dae4bd8410bfb09e8f654605f65e15d8e66 Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Tue, 22 Nov 2022 22:56:10 -0500 +Subject: [PATCH 4/6] python module: stop using distutils "link to libpython" + probe on recent python + +On python >=3.8, this information is expected to be encoded in the +sysconfig vars. + +In distutils, it is always necessary to link to libpython on Windows; +for posix platforms, it depends on the value of LIBPYTHON (which is the +library to link to, possibly the empty string) as generated by +configure.ac and embedded into python.pc and python-config.sh, and then +coded a second time in the distutils python sources. + +There are a couple of caveats which have ramifications for Cygwin and +Android: + +- python.pc and python-config.sh disagree with distutils when python is + not built shared. In that case, the former act the same as a shared + build, while the latter *never* links to libpython + +- python.pc disagrees with python-config.sh and distutils when python is + built shared. The former never links to libpython, while the latter do + +The disagreement is resolved in favor of distutils' behavior in all +cases, and python.pc is correct for our purposes on python 3.12; see: +https://github.com/python/cpython/pull/100356 +https://github.com/python/cpython/pull/100967 + +Although it was not backported to older releases, Cygwin at least has +always patched in a fix for python.pc, which behavior is now declared +canonical. We can reliably assume it is always correct. + +This is the other half of the fix for #7702 + +(cherry picked from commit 2d6c10908b3771216e7ce086af1ee4dc77e698c2) +--- + mesonbuild/scripts/python_info.py | 17 +++++++++++++---- + 1 file changed, 13 insertions(+), 4 deletions(-) + +diff --git a/mesonbuild/scripts/python_info.py b/mesonbuild/scripts/python_info.py +index d17b3a376..a3f3d3535 100755 +--- a/mesonbuild/scripts/python_info.py ++++ b/mesonbuild/scripts/python_info.py +@@ -64,10 +64,19 @@ def get_install_paths(): + paths, install_paths = get_install_paths() + + def links_against_libpython(): +- from distutils.core import Distribution, Extension +- cmd = Distribution().get_command_obj('build_ext') +- cmd.ensure_finalized() +- return bool(cmd.get_libraries(Extension('dummy', []))) ++ # on versions supporting python-embed.pc, this is the non-embed lib ++ # ++ # PyPy is not yet up to 3.12 and work is still pending to export the ++ # relevant information (it doesn't automatically provide arbitrary ++ # Makefile vars) ++ if sys.version_info >= (3, 8) and not is_pypy: ++ variables = sysconfig.get_config_vars() ++ return bool(variables.get('LIBPYTHON', 'yes')) ++ else: ++ from distutils.core import Distribution, Extension ++ cmd = Distribution().get_command_obj('build_ext') ++ cmd.ensure_finalized() ++ return bool(cmd.get_libraries(Extension('dummy', []))) + + variables = sysconfig.get_config_vars() + variables.update({'base_prefix': getattr(sys, 'base_prefix', sys.prefix)}) +-- +2.39.2 + + +From ae44d9a379faca6274db819be44ffca3e0159f56 Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Mon, 2 Oct 2023 23:51:57 -0400 +Subject: [PATCH 5/6] tests: fix test case to not import distutils on python + 3.12 + +Testing the correctness of the `modules: ` kwarg can be done with other +guaranteed stdlib modules that are even more guaranteed since they +didn't get deprecated for removal. + +(cherry picked from commit ecf261330c498783760cbde00b613b7469f8d3c0) +--- + test cases/python/5 modules kwarg/meson.build | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/test cases/python/5 modules kwarg/meson.build b/test cases/python/5 modules kwarg/meson.build +index 9751adaab..41a9a4fae 100644 +--- a/test cases/python/5 modules kwarg/meson.build ++++ b/test cases/python/5 modules kwarg/meson.build +@@ -1,7 +1,7 @@ + project('python kwarg') + + py = import('python') +-prog_python = py.find_installation('python3', modules : ['distutils']) ++prog_python = py.find_installation('python3', modules : ['os', 'sys', 're']) + assert(prog_python.found() == true, 'python not found when should be') + prog_python = py.find_installation('python3', modules : ['thisbetternotexistmod'], required : false) + assert(prog_python.found() == false, 'python not found but reported as found') +-- +2.39.2 + + +From d9abf4a97dc182b3c57204a792000d620f9f941e Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Tue, 3 Oct 2023 00:22:25 -0400 +Subject: [PATCH 6/6] mark the PyPI metadata as supporting python 3.12 + +meson itself runs okay on 3.12, and the last issue for *probing* against +3.12 is solved. Tests pass here locally. + +(cherry picked from commit 880f21281ee359e01de659fe7d45549d19e6b84d) +--- + setup.cfg | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/setup.cfg b/setup.cfg +index dfaba76dd..2f2962eed 100644 +--- a/setup.cfg ++++ b/setup.cfg +@@ -30,6 +30,7 @@ classifiers = + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 ++ Programming Language :: Python :: 3.12 + Topic :: Software Development :: Build Tools + long_description = Meson is a cross-platform build system designed to be both as fast and as user friendly as possible. It supports many languages and compilers, including GCC, Clang, PGI, Intel, and Visual Studio. Its build definitions are written in a simple non-Turing complete DSL. + +-- +2.39.2 + diff --git a/var/spack/repos/builtin/packages/py-cffi/package.py b/var/spack/repos/builtin/packages/py-cffi/package.py index 606c678369f49a..ccbe262a159b7d 100644 --- a/var/spack/repos/builtin/packages/py-cffi/package.py +++ b/var/spack/repos/builtin/packages/py-cffi/package.py @@ -24,6 +24,10 @@ class PyCffi(PythonPackage): version("1.10.0", sha256="b3b02911eb1f6ada203b0763ba924234629b51586f72a21faacc638269f4ced5") version("1.1.2", sha256="390970b602708c91ddc73953bb6929e56291c18a4d80f360afa00fad8b6f3339") + # ./spack-src/cffi/ffiplatform.py has _hack_at_distutils which imports + # setuptools before distutils, but only on Windows. This could be made + # unconditional to support Python 3.12 + depends_on("python@:3.11", type=("build", "run")) depends_on("pkgconfig", type="build") depends_on("py-setuptools", type="build") depends_on("py-pycparser", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cryptography/package.py b/var/spack/repos/builtin/packages/py-cryptography/package.py index caadcf16f394a0..c17e07377755a0 100644 --- a/var/spack/repos/builtin/packages/py-cryptography/package.py +++ b/var/spack/repos/builtin/packages/py-cryptography/package.py @@ -13,6 +13,7 @@ class PyCryptography(PythonPackage): homepage = "https://github.com/pyca/cryptography" pypi = "cryptography/cryptography-1.8.1.tar.gz" + version("41.0.3", sha256="6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34") version("40.0.2", sha256="c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99") version("38.0.1", sha256="1db3d807a14931fa317f96435695d9ec386be7b84b618cc61cfa5d08b0ae33d7") version("37.0.4", sha256="63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82") @@ -28,12 +29,16 @@ class PyCryptography(PythonPackage): variant("idna", default=False, when="@2.5:3.0", description="Deprecated U-label support") + # distutils required in version <= 40 + depends_on("python@:3.11", when="@:40", type=("build", "run")) + depends_on("py-setuptools@61.0:", when="@41:", type="build") depends_on("py-setuptools@40.6:60.8,60.9.1:", when="@37:", type="build") depends_on("py-setuptools@40.6:", when="@2.7:36", type="build") depends_on("py-setuptools@18.5:", when="@2.2:2.6", type="build") depends_on("py-setuptools@11.3:", when="@:2.1", type="build") depends_on("py-setuptools-rust@0.11.4:", when="@3.4.2:", type="build") depends_on("py-setuptools-rust@0.11.4:", when="@3.4:3.4.1", type=("build", "run")) + depends_on("rust@1.56:", when="@41:", type="build") depends_on("rust@1.48:", when="@38:", type="build") depends_on("rust@1.41:", when="@3.4.5:", type="build") depends_on("rust@1.45:", when="@3.4.3:3.4.4", type="build") diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 2a904c738644d9..c07ad5d6ad461e 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -40,7 +40,12 @@ class Python(Package): install_targets = ["install"] build_targets: List[str] = [] - version("3.11.4", sha256="85c37a265e5c9dd9f75b35f954e31fbfc10383162417285e30ad25cc073a0d63") + version("3.12.0", sha256="51412956d24a1ef7c97f1cb5f70e185c13e3de1f50d131c0aac6338080687afb") + version( + "3.11.4", + sha256="85c37a265e5c9dd9f75b35f954e31fbfc10383162417285e30ad25cc073a0d63", + preferred=True, + ) version("3.11.3", sha256="1a79f3df32265d9e6625f1a0b31c28eb1594df911403d11f3320ee1da1b3e048") version("3.11.2", sha256="2411c74bda5bbcfcddaf4531f66d1adc73f247f529aee981b029513aefdbf849") version("3.11.1", sha256="baed518e26b337d4d8105679caf68c5c32630d702614fc174e98cb95c46bdfa4") @@ -272,7 +277,7 @@ class Python(Package): patch("python-3.7.2-distutils-C++.patch", when="@3.7.2") patch("python-3.7.3-distutils-C++.patch", when="@3.7.3") patch("python-3.7.4+-distutils-C++.patch", when="@3.7.4:3.10") - patch("python-3.7.4+-distutils-C++-testsuite.patch", when="@3.7.4:") + patch("python-3.7.4+-distutils-C++-testsuite.patch", when="@3.7.4:3.11") patch("python-3.11-distutils-C++.patch", when="@3.11.0:3.11") patch("cpython-windows-externals.patch", when="@:3.9.6 platform=windows") patch("tkinter-3.7.patch", when="@3.7 platform=darwin") @@ -287,7 +292,7 @@ class Python(Package): # Ensure that distutils chooses correct compiler option for RPATH on fj: patch("fj-rpath-3.1.patch", when="@:3.9.7,3.10.0 %fj") - patch("fj-rpath-3.9.patch", when="@3.9.8:3.9,3.10.1: %fj") + patch("fj-rpath-3.9.patch", when="@3.9.8:3.9,3.10.1:3.11 %fj") # Fixes build with the Intel compilers # https://github.com/python/cpython/pull/16717 From d4d608f9981c9eb2c7aa8c0c41de6d2d86228304 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 6 Oct 2023 10:24:21 +0200 Subject: [PATCH 044/408] Make "minimal" the default duplicate strategy (#39621) * Allow branching out of the "generic build" unification set For cases like the one in https://github.com/spack/spack/pull/39661 we need to relax rules on unification sets. The issue is that, right now, nodes in the "generic build" unification set are unified together with their build dependencies. This was done out of caution to avoid the risk of circular dependencies, which would ultimately cause a very slow solve. For build-tools like Cython, however, the build dependencies is masked by a long chain of "build, run" dependencies that belong in the "generic build" unification space. To allow splitting on cases like this, we relax the rule disallowing branching out of the "generic build" unification set. * Fix issue with pure build virtual dependencies Pure build virtual dependencies were not accounted properly in the list of possible virtuals. This caused some facts connecting virtuals to the corresponding providers to not be emitted, and in the end lead to unsat problems. * Fixed a few issues in packages py-gevent: restore dependency on py-cython@3 jsoncpp: fix typo in build dependency ecp-data-vis-sdk: update spack.yaml and cmake recipe py-statsmodels: add v0.13.5 * Make dependency on "blt" of type "build" --- etc/spack/defaults/concretizer.yaml | 2 +- lib/spack/docs/build_settings.rst | 167 +- lib/spack/docs/images/shapely_duplicates.svg | 2784 +++++++++++++++++ lib/spack/spack/solver/asp.py | 1 + lib/spack/spack/solver/concretize.lp | 19 +- lib/spack/spack/solver/counter.py | 15 +- lib/spack/spack/spec.py | 11 +- lib/spack/spack/test/concretize.py | 28 +- .../spack/test/data/config/concretizer.yaml | 2 +- .../stacks/data-vis-sdk/spack.yaml | 16 +- .../repos/builtin/packages/camp/package.py | 2 +- .../repos/builtin/packages/cmake/package.py | 2 +- .../repos/builtin/packages/jsoncpp/package.py | 2 +- .../repos/builtin/packages/mgard/package.py | 2 +- .../builtin/packages/py-gevent/package.py | 3 +- .../packages/py-statsmodels/package.py | 12 +- .../repos/builtin/packages/raja/package.py | 2 +- .../repos/builtin/packages/umpire/package.py | 2 +- .../packages/pkg-config/package.py | 16 + .../packages/virtual-build/package.py | 16 + 20 files changed, 2984 insertions(+), 120 deletions(-) create mode 100644 lib/spack/docs/images/shapely_duplicates.svg create mode 100644 var/spack/repos/duplicates.test/packages/pkg-config/package.py create mode 100644 var/spack/repos/duplicates.test/packages/virtual-build/package.py diff --git a/etc/spack/defaults/concretizer.yaml b/etc/spack/defaults/concretizer.yaml index 598bb8c349d0b7..edefa552cee227 100644 --- a/etc/spack/defaults/concretizer.yaml +++ b/etc/spack/defaults/concretizer.yaml @@ -41,4 +41,4 @@ concretizer: # "none": allows a single node for any package in the DAG. # "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.) # "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG) - strategy: none \ No newline at end of file + strategy: minimal diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index 0bbd27e8c32258..402b33f6a2585b 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -3,6 +3,103 @@ SPDX-License-Identifier: (Apache-2.0 OR MIT) + +.. _concretizer-options: + +========================================== +Concretization Settings (concretizer.yaml) +========================================== + +The ``concretizer.yaml`` configuration file allows to customize aspects of the +algorithm used to select the dependencies you install. The default configuration +is the following: + +.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml + :language: yaml + +-------------------------------- +Reuse already installed packages +-------------------------------- + +The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or +whether it will do a "fresh" installation and prefer the latest settings from +``package.py`` files and ``packages.yaml`` (``false``). +You can use: + +.. code-block:: console + + % spack install --reuse + +to enable reuse for a single installation, and you can use: + +.. code-block:: console + + spack install --fresh + +to do a fresh install if ``reuse`` is enabled by default. +``reuse: true`` is the default. + +------------------------------------------ +Selection of the target microarchitectures +------------------------------------------ + +The options under the ``targets`` attribute control which targets are considered during a solve. +Currently the options in this section are only configurable from the ``concretizer.yaml`` file +and there are no corresponding command line arguments to enable them for a single solve. + +The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``. +If set to: + +.. code-block:: yaml + + concretizer: + targets: + granularity: microarchitectures + +Spack will consider all the microarchitectures known to ``archspec`` to label nodes for +compatibility. If instead the option is set to: + +.. code-block:: yaml + + concretizer: + targets: + granularity: generic + +Spack will consider only generic microarchitectures. For instance, when running on an +Haswell node, Spack will consider ``haswell`` as the best target in the former case and +``x86_64_v3`` as the best target in the latter case. + +The ``host_compatible`` option is a Boolean option that determines whether or not the +microarchitectures considered during the solve are constrained to be compatible with the +host Spack is currently running on. For instance, if this option is set to ``true``, a +user cannot concretize for ``target=icelake`` while running on an Haswell node. + +--------------- +Duplicate nodes +--------------- + +The ``duplicates`` attribute controls whether the DAG can contain multiple configurations of +the same package. This is mainly relevant for build dependencies, which may have their version +pinned by some nodes, and thus be required at different versions by different nodes in the same +DAG. + +The ``strategy`` option controls how the solver deals with duplicates. If the value is ``none``, +then a single configuration per package is allowed in the DAG. This means, for instance, that only +a single ``cmake`` or a single ``py-setuptools`` version is allowed. The result would be a slightly +faster concretization, at the expense of making a few specs unsolvable. + +If the value is ``minimal`` Spack will allow packages tagged as ``build-tools`` to have duplicates. +This allows, for instance, to concretize specs whose nodes require different, and incompatible, ranges +of some build tool. For instance, in the figure below the latest `py-shapely` requires a newer `py-setuptools`, +while `py-numpy` still needs an older version: + +.. figure:: images/shapely_duplicates.svg + :scale: 70 % + :align: center + +Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the +default behavior is ``duplicates:strategy:minimal``. + .. _build-settings: ================================ @@ -232,76 +329,6 @@ Specific limitations include: then Spack will not add a new external entry (``spack config blame packages`` can help locate all external entries). -.. _concretizer-options: - ----------------------- -Concretizer options ----------------------- - -``packages.yaml`` gives the concretizer preferences for specific packages, -but you can also use ``concretizer.yaml`` to customize aspects of the -algorithm it uses to select the dependencies you install: - -.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml - :language: yaml - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Reuse already installed packages -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or -whether it will do a "fresh" installation and prefer the latest settings from -``package.py`` files and ``packages.yaml`` (``false``). -You can use: - -.. code-block:: console - - % spack install --reuse - -to enable reuse for a single installation, and you can use: - -.. code-block:: console - - spack install --fresh - -to do a fresh install if ``reuse`` is enabled by default. -``reuse: true`` is the default. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Selection of the target microarchitectures -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The options under the ``targets`` attribute control which targets are considered during a solve. -Currently the options in this section are only configurable from the ``concretizer.yaml`` file -and there are no corresponding command line arguments to enable them for a single solve. - -The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``. -If set to: - -.. code-block:: yaml - - concretizer: - targets: - granularity: microarchitectures - -Spack will consider all the microarchitectures known to ``archspec`` to label nodes for -compatibility. If instead the option is set to: - -.. code-block:: yaml - - concretizer: - targets: - granularity: generic - -Spack will consider only generic microarchitectures. For instance, when running on an -Haswell node, Spack will consider ``haswell`` as the best target in the former case and -``x86_64_v3`` as the best target in the latter case. - -The ``host_compatible`` option is a Boolean option that determines whether or not the -microarchitectures considered during the solve are constrained to be compatible with the -host Spack is currently running on. For instance, if this option is set to ``true``, a -user cannot concretize for ``target=icelake`` while running on an Haswell node. - .. _package-requirements: -------------------- diff --git a/lib/spack/docs/images/shapely_duplicates.svg b/lib/spack/docs/images/shapely_duplicates.svg new file mode 100644 index 00000000000000..912f03b2e526cd --- /dev/null +++ b/lib/spack/docs/images/shapely_duplicates.svg @@ -0,0 +1,2784 @@ + + + + + + + + + + image/svg+xml + + + + + + + + G + + + + bqm4trdmbbqhrthe6flwnxp57cfbbser + + nghttp2@1.52.0/bqm4trd + + + + hsp7usvecwby6o6kszujxywbux5f5qc4 + + pkgconf@1.9.5/hsp7usv + + + + bqm4trdmbbqhrthe6flwnxp57cfbbser->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + s3mykqnlex5ygursynhv4cfu4p4jcp5c + + diffutils@3.9/s3mykqn + + + + gpd7yevon44acblslmgorfsxufgk3nhz + + libiconv@1.17/gpd7yev + + + + s3mykqnlex5ygursynhv4cfu4p4jcp5c->gpd7yevon44acblslmgorfsxufgk3nhz + + + + + + + al63766ivhemwb3bxsklvqmhdptf34fn + + geos@3.12.0/al63766 + + + + ma6zn6mykr7xe226v2hvu4ye7jltnddb + + cmake@3.27.4/ma6zn6m + + + + al63766ivhemwb3bxsklvqmhdptf34fn->ma6zn6mykr7xe226v2hvu4ye7jltnddb + + + + + + revhbmcsddofjb7jt3fql7fawtxjihvc + + ninja@1.11.1/revhbmc + + + + al63766ivhemwb3bxsklvqmhdptf34fn->revhbmcsddofjb7jt3fql7fawtxjihvc + + + + + + 7vxac6cvfyqggxsvd7votisi72rdfvoh + + ca-certificates-mozilla@2023-05-30/7vxac6c + + + + wuse2zg2p4ujfbsks4znlwyqumsa476w + + py-cython@0.29.36/wuse2zg + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + python@3.11.4/7ftqkn3 + + + + wuse2zg2p4ujfbsks4znlwyqumsa476w->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + 33tg442mk3uy52ocdgd7uxbusdtkozlq + + py-pip@23.1.2/33tg442 + + + + wuse2zg2p4ujfbsks4znlwyqumsa476w->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + isprdjk4hdva3owdr6bgzavgaqzyjwyj + + py-wheel@0.37.1/isprdjk + + + + wuse2zg2p4ujfbsks4znlwyqumsa476w->isprdjk4hdva3owdr6bgzavgaqzyjwyj + + + + + + esl2253adih4qsbluhmzdtsxfrws4fnt + + py-setuptools@59.4.0/esl2253 + + + + wuse2zg2p4ujfbsks4znlwyqumsa476w->esl2253adih4qsbluhmzdtsxfrws4fnt + + + + + + + e3xjka5zk6vtoen2oexuzxyorp6um5rv + + openssl@3.1.3/e3xjka5 + + + + e3xjka5zk6vtoen2oexuzxyorp6um5rv->7vxac6cvfyqggxsvd7votisi72rdfvoh + + + + + + ez3cm4rogbx7at45wfi6gquti6fbo3zz + + zlib-ng@2.1.3/ez3cm4r + + + + e3xjka5zk6vtoen2oexuzxyorp6um5rv->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + 7bvgd7zcvk3hglqgbqczma5h4urvrdjb + + perl@5.38.0/7bvgd7z + + + + e3xjka5zk6vtoen2oexuzxyorp6um5rv->7bvgd7zcvk3hglqgbqczma5h4urvrdjb + + + + + + ys6bcgmvdayitnod74ppxvzbn75e7227 + + py-shapely@2.0.1/ys6bcgm + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->al63766ivhemwb3bxsklvqmhdptf34fn + + + + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->wuse2zg2p4ujfbsks4znlwyqumsa476w + + + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p + + py-numpy@1.25.2/ca3noh6 + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->ca3noh6upxuh3hdx2lnrsdvw7blgcj5p + + + + + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->isprdjk4hdva3owdr6bgzavgaqzyjwyj + + + + + + 2ok2ozl5i2qphhfsbxkdtq3iezemvpsv + + py-setuptools@68.0.0/2ok2ozl + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->2ok2ozl5i2qphhfsbxkdtq3iezemvpsv + + + + + + aoucvoqqeft4hsw3poydbf4mvong4nry + + ncurses@6.4/aoucvoq + + + + aoucvoqqeft4hsw3poydbf4mvong4nry->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + 7aawlyt3hu24znvpgwedu2s3jmz46dkn + + xz@5.4.1/7aawlyt + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->e3xjka5zk6vtoen2oexuzxyorp6um5rv + + + + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->aoucvoqqeft4hsw3poydbf4mvong4nry + + + + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->7aawlyt3hu24znvpgwedu2s3jmz46dkn + + + + + + + ygkrrpeszr4j377qqtqqecmwt27pm2ho + + expat@2.5.0/ygkrrpe + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->ygkrrpeszr4j377qqtqqecmwt27pm2ho + + + + + + + qlqyzklm3yyv6tkqgnj4tzoy7g72ejyu + + sqlite@3.42.0/qlqyzkl + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->qlqyzklm3yyv6tkqgnj4tzoy7g72ejyu + + + + + + + ihtvssgtl7yz2wj7wdla4hsi7nqfny42 + + util-linux-uuid@2.38.1/ihtvssg + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->ihtvssgtl7yz2wj7wdla4hsi7nqfny42 + + + + + + + bvcsrijbs7lp5jvlyooahoxc3zfapwfp + + gdbm@1.23/bvcsrij + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->bvcsrijbs7lp5jvlyooahoxc3zfapwfp + + + + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv + + gettext@0.21.1/3o2rmrx + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->3o2rmrxpwkmmetxmzvba6sizei5womzv + + + + + + + bcjm3vxlgrjgewpdakhpfea3y2kzcspe + + bzip2@1.0.8/bcjm3vx + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->bcjm3vxlgrjgewpdakhpfea3y2kzcspe + + + + + + + 7pjirtey2xqww2bbkil3yj3mtmasruaw + + readline@8.2/7pjirte + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->7pjirtey2xqww2bbkil3yj3mtmasruaw + + + + + + + d24pqmu7ayswej2jfwwcgnw26t4gatgv + + libxcrypt@4.4.35/d24pqmu + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->d24pqmu7ayswej2jfwwcgnw26t4gatgv + + + + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + rei73bcylffduxjtuwt5sbibc2cbvuyt + + libffi@3.4.4/rei73bc + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->rei73bcylffduxjtuwt5sbibc2cbvuyt + + + + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + mof23fyk5qdmzll42yrtvvjeafzn45rl + + libbsd@0.11.7/mof23fy + + + + ygkrrpeszr4j377qqtqqecmwt27pm2ho->mof23fyk5qdmzll42yrtvvjeafzn45rl + + + + + + + pbpdelsw4pyldezsnide5zcc4ym5rrzg + + re2c@2.2/pbpdels + + + + qlqyzklm3yyv6tkqgnj4tzoy7g72ejyu->7pjirtey2xqww2bbkil3yj3mtmasruaw + + + + + + + qlqyzklm3yyv6tkqgnj4tzoy7g72ejyu->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + 33tg442mk3uy52ocdgd7uxbusdtkozlq->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + ihtvssgtl7yz2wj7wdla4hsi7nqfny42->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->wuse2zg2p4ujfbsks4znlwyqumsa476w + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->isprdjk4hdva3owdr6bgzavgaqzyjwyj + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->esl2253adih4qsbluhmzdtsxfrws4fnt + + + + + + + buscwcl7gy7xqmrsmtewcustpjoa3jy6 + + openblas@0.3.24/buscwcl + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->buscwcl7gy7xqmrsmtewcustpjoa3jy6 + + + + + + + bvcsrijbs7lp5jvlyooahoxc3zfapwfp->7pjirtey2xqww2bbkil3yj3mtmasruaw + + + + + + + isprdjk4hdva3owdr6bgzavgaqzyjwyj->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + isprdjk4hdva3owdr6bgzavgaqzyjwyj->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->aoucvoqqeft4hsw3poydbf4mvong4nry + + + + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->7aawlyt3hu24znvpgwedu2s3jmz46dkn + + + + + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->gpd7yevon44acblslmgorfsxufgk3nhz + + + + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->bcjm3vxlgrjgewpdakhpfea3y2kzcspe + + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33 + + tar@1.34/jofugpd + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->jofugpdt2lki4tvw3xa56pxz4kzmjb33 + + + + + + + yry2pcjkl2hcfeexfi2yvnar2lyplbyg + + libxml2@2.10.3/yry2pcj + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->yry2pcjkl2hcfeexfi2yvnar2lyplbyg + + + + + + + 2ok2ozl5i2qphhfsbxkdtq3iezemvpsv->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + 2ok2ozl5i2qphhfsbxkdtq3iezemvpsv->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + ma7u5unvr5auweq7clkgz75hca33j6eb + + pigz@2.7/ma7u5un + + + + ma7u5unvr5auweq7clkgz75hca33j6eb->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + ma6zn6mykr7xe226v2hvu4ye7jltnddb->aoucvoqqeft4hsw3poydbf4mvong4nry + + + + + + + omxtm4xh3xbta4le4ehihd26gi3qn2hc + + curl@8.1.2/omxtm4x + + + + ma6zn6mykr7xe226v2hvu4ye7jltnddb->omxtm4xh3xbta4le4ehihd26gi3qn2hc + + + + + + + ma6zn6mykr7xe226v2hvu4ye7jltnddb->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + bcjm3vxlgrjgewpdakhpfea3y2kzcspe->s3mykqnlex5ygursynhv4cfu4p4jcp5c + + + + + + 7pjirtey2xqww2bbkil3yj3mtmasruaw->aoucvoqqeft4hsw3poydbf4mvong4nry + + + + + + + esl2253adih4qsbluhmzdtsxfrws4fnt->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + esl2253adih4qsbluhmzdtsxfrws4fnt->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + r3mipc2ezzxleb6g3yjy2rgio44tpsnr + + libmd@1.0.4/r3mipc2 + + + + tolbgopadusf5fpqzmhm7qfsnhpluyvv + + zstd@1.5.5/tolbgop + + + + omxtm4xh3xbta4le4ehihd26gi3qn2hc->bqm4trdmbbqhrthe6flwnxp57cfbbser + + + + + + + omxtm4xh3xbta4le4ehihd26gi3qn2hc->e3xjka5zk6vtoen2oexuzxyorp6um5rv + + + + + + + omxtm4xh3xbta4le4ehihd26gi3qn2hc->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + omxtm4xh3xbta4le4ehihd26gi3qn2hc->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + revhbmcsddofjb7jt3fql7fawtxjihvc->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + revhbmcsddofjb7jt3fql7fawtxjihvc->pbpdelsw4pyldezsnide5zcc4ym5rrzg + + + + + + d24pqmu7ayswej2jfwwcgnw26t4gatgv->7bvgd7zcvk3hglqgbqczma5h4urvrdjb + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33->7aawlyt3hu24znvpgwedu2s3jmz46dkn + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33->gpd7yevon44acblslmgorfsxufgk3nhz + + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33->ma7u5unvr5auweq7clkgz75hca33j6eb + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33->bcjm3vxlgrjgewpdakhpfea3y2kzcspe + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33->tolbgopadusf5fpqzmhm7qfsnhpluyvv + + + + + + buscwcl7gy7xqmrsmtewcustpjoa3jy6->7bvgd7zcvk3hglqgbqczma5h4urvrdjb + + + + + + yry2pcjkl2hcfeexfi2yvnar2lyplbyg->7aawlyt3hu24znvpgwedu2s3jmz46dkn + + + + + + + yry2pcjkl2hcfeexfi2yvnar2lyplbyg->gpd7yevon44acblslmgorfsxufgk3nhz + + + + + + + yry2pcjkl2hcfeexfi2yvnar2lyplbyg->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + yry2pcjkl2hcfeexfi2yvnar2lyplbyg->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + lcvuenzomq3fdqabnz22ih3kpt4g2nyd + + berkeley-db@18.1.40/lcvuenz + + + + 7bvgd7zcvk3hglqgbqczma5h4urvrdjb->bvcsrijbs7lp5jvlyooahoxc3zfapwfp + + + + + + + 7bvgd7zcvk3hglqgbqczma5h4urvrdjb->bcjm3vxlgrjgewpdakhpfea3y2kzcspe + + + + + + + 7bvgd7zcvk3hglqgbqczma5h4urvrdjb->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + 7bvgd7zcvk3hglqgbqczma5h4urvrdjb->lcvuenzomq3fdqabnz22ih3kpt4g2nyd + + + + + + + mof23fyk5qdmzll42yrtvvjeafzn45rl->r3mipc2ezzxleb6g3yjy2rgio44tpsnr + + + + + + diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index f3a8cfae6b0143..18328acd62235b 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -2595,6 +2595,7 @@ class SpecBuilder: r"^node_compiler$", r"^package_hash$", r"^root$", + r"^variant_default_value_from_cli$", r"^virtual_node$", r"^virtual_root$", ] diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 26d167ce69afc9..fcd546cbeef0d9 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -20,7 +20,7 @@ % Integrity constraints on DAG nodes :- attr("root", PackageNode), not attr("node", PackageNode). -:- attr("version", PackageNode), not attr("node", PackageNode). +:- attr("version", PackageNode, _), not attr("node", PackageNode), not attr("virtual_node", PackageNode). :- attr("node_version_satisfies", PackageNode), not attr("node", PackageNode). :- attr("hash", PackageNode, _), not attr("node", PackageNode). :- attr("node_platform", PackageNode, _), not attr("node", PackageNode). @@ -58,7 +58,6 @@ unification_set(SetID, ChildNode) :- attr("depends_on", ParentNode, ChildNode, T unification_set(("build", node(X, Child)), node(X, Child)) :- attr("depends_on", ParentNode, node(X, Child), Type), Type == "build", - SetID != "generic_build", multiple_unification_sets(Child), unification_set(SetID, ParentNode). @@ -68,18 +67,18 @@ unification_set("generic_build", node(X, Child)) not multiple_unification_sets(Child), unification_set(_, ParentNode). -% Any dependency of type "build" in a unification set that is in the leaf unification set, -% stays in that unification set -unification_set(SetID, ChildNode) - :- attr("depends_on", ParentNode, ChildNode, Type), - Type == "build", - SetID == "generic_build", - unification_set(SetID, ParentNode). - unification_set(SetID, VirtualNode) :- provider(PackageNode, VirtualNode), unification_set(SetID, PackageNode). +% Do not allow split dependencies, for now. This ensures that we don't construct graphs where e.g. +% a python extension depends on setuptools@63.4 as a run dependency, but uses e.g. setuptools@68 +% as a build dependency. +% +% We'll need to relax the rule before we get to actual cross-compilation +:- depends_on(ParentNode, node(X, Dependency)), depends_on(ParentNode, node(Y, Dependency)), X < Y. + + #defined multiple_unification_sets/1. %---- diff --git a/lib/spack/spack/solver/counter.py b/lib/spack/spack/solver/counter.py index b238f60d8c0882..28883817dfe564 100644 --- a/lib/spack/spack/solver/counter.py +++ b/lib/spack/spack/solver/counter.py @@ -5,6 +5,8 @@ import collections from typing import List, Set +from llnl.util import lang + import spack.deptypes as dt import spack.package_base import spack.repo @@ -95,8 +97,17 @@ def _compute_cache_values(self): ) self._link_run_virtuals.update(self._possible_virtuals) for x in self._link_run: - current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD) - self._direct_build.update(current) + build_dependencies = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD) + virtuals, reals = lang.stable_partition( + build_dependencies, spack.repo.PATH.is_virtual_safe + ) + + self._possible_virtuals.update(virtuals) + for virtual_dep in virtuals: + providers = spack.repo.PATH.providers_for(virtual_dep) + self._direct_build.update(str(x) for x in providers) + + self._direct_build.update(reals) self._total_build = set( spack.package_base.possible_dependencies( diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 4df6c1eb857dbc..0f6589f0e6be16 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1607,13 +1607,20 @@ def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[ try: dspec = next(dspec for dspec in orig if depflag == dspec.depflag) except StopIteration: - raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec) + current_deps = ", ".join( + dt.flag_to_chars(x.depflag) + " " + x.spec.short_spec for x in orig + ) + raise DuplicateDependencyError( + f"{self.short_spec} cannot depend on '{spec.short_spec}' multiple times.\n" + f"\tRequired: {dt.flag_to_chars(depflag)}\n" + f"\tDependency: {current_deps}" + ) try: dspec.spec.constrain(spec) except spack.error.UnsatisfiableSpecError: raise DuplicateDependencyError( - "Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec) + f"Cannot depend on incompatible specs '{dspec.spec}' and '{spec}'" ) def add_dependency_edge( diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index b4d94a2610575e..a794c8f1fd6b4c 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -2175,12 +2175,9 @@ def duplicates_test_repository(): @pytest.mark.usefixtures("mutable_config", "duplicates_test_repository") +@pytest.mark.only_clingo("Not supported by the original concretizer") class TestConcretizeSeparately: @pytest.mark.parametrize("strategy", ["minimal", "full"]) - @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original", - reason="Not supported by the original concretizer", - ) def test_two_gmake(self, strategy): """Tests that we can concretize a spec with nodes using the same build dependency pinned at different versions. @@ -2205,10 +2202,6 @@ def test_two_gmake(self, strategy): assert len(pinned_gmake) == 1 and pinned_gmake[0].satisfies("@=3.0") @pytest.mark.parametrize("strategy", ["minimal", "full"]) - @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original", - reason="Not supported by the original concretizer", - ) def test_two_setuptools(self, strategy): """Tests that we can concretize separate build dependencies, when we are dealing with extensions. @@ -2245,10 +2238,6 @@ def test_two_setuptools(self, strategy): gmake = s["python"].dependencies(name="gmake", deptype="build") assert len(gmake) == 1 and gmake[0].satisfies("@=3.0") - @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original", - reason="Not supported by the original concretizer", - ) def test_solution_without_cycles(self): """Tests that when we concretize a spec with cycles, a fallback kicks in to recompute a solution without cycles. @@ -2261,6 +2250,21 @@ def test_solution_without_cycles(self): assert s["cycle-a"].satisfies("~cycle") assert s["cycle-b"].satisfies("+cycle") + @pytest.mark.parametrize("strategy", ["minimal", "full"]) + def test_pure_build_virtual_dependency(self, strategy): + """Tests that we can concretize a pure build virtual dependency, and ensures that + pure build virtual dependencies are accounted in the list of possible virtual + dependencies. + + virtual-build@1.0 + | [type=build, virtual=pkgconfig] + pkg-config@1.0 + """ + spack.config.CONFIG.set("concretizer:duplicates:strategy", strategy) + + s = Spec("virtual-build").concretized() + assert s["pkgconfig"].name == "pkg-config" + @pytest.mark.parametrize( "v_str,v_opts,checksummed", diff --git a/lib/spack/spack/test/data/config/concretizer.yaml b/lib/spack/spack/test/data/config/concretizer.yaml index ecf121a9170563..0dd810163dd77a 100644 --- a/lib/spack/spack/test/data/config/concretizer.yaml +++ b/lib/spack/spack/test/data/config/concretizer.yaml @@ -4,4 +4,4 @@ concretizer: granularity: microarchitectures host_compatible: false duplicates: - strategy: none + strategy: minimal diff --git a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml index b4534f0814f8cc..9963f4b777c4e9 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml @@ -4,22 +4,16 @@ spack: cmake: variants: ~ownlibs ecp-data-vis-sdk: - require: - - one_of: - - +ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf +sensei +sz +unifyfs - +veloc +vtkm +zfp - - one_of: - - +paraview ~visit - - ~paraview +visit + require: "+ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf +sensei +sz +unifyfs +veloc +vtkm +zfp" hdf5: require: - one_of: ['@1.14', '@1.12'] mesa: - require: +glx +osmesa +opengl ~opengles +llvm + require: "+glx +osmesa +opengl ~opengles +llvm" libosmesa: - require: mesa +osmesa + require: "mesa +osmesa" libglx: - require: mesa +glx + require: "mesa +glx" ospray: require: '@2.8.0 +denoiser +mpi' llvm: @@ -57,9 +51,11 @@ spack: # Test ParaView and VisIt builds with different GL backends - matrix: - [$sdk_base_spec] + - ["+paraview ~visit"] - [$^paraview_specs] - matrix: - [$sdk_base_spec] + - ["~paraview +visit"] - [$^visit_specs] mirrors: {mirror: s3://spack-binaries/develop/data-vis-sdk} diff --git a/var/spack/repos/builtin/packages/camp/package.py b/var/spack/repos/builtin/packages/camp/package.py index 8d7b4f62b1d2da..a1bdc830d36eab 100644 --- a/var/spack/repos/builtin/packages/camp/package.py +++ b/var/spack/repos/builtin/packages/camp/package.py @@ -53,7 +53,7 @@ class Camp(CMakePackage, CudaPackage, ROCmPackage): depends_on("cub", when="+cuda") - depends_on("blt") + depends_on("blt", type="build") conflicts("^blt@:0.3.6", when="+rocm") diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index c4c61aee82a2f9..796eb4c397045f 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -239,7 +239,7 @@ class Cmake(Package): depends_on("libuv@1.10.0:1.10", when="@3.11.0:3.11") depends_on("libuv@1.10.0:", when="@3.12.0:") depends_on("rhash", when="@3.8.0:") - depends_on("jsoncpp", when="@3.2:") + depends_on("jsoncpp build_system=meson", when="@3.2:") depends_on("ncurses", when="+ncurses") diff --git a/var/spack/repos/builtin/packages/jsoncpp/package.py b/var/spack/repos/builtin/packages/jsoncpp/package.py index 55c2f22a028d3e..bd20e2b22e1a23 100644 --- a/var/spack/repos/builtin/packages/jsoncpp/package.py +++ b/var/spack/repos/builtin/packages/jsoncpp/package.py @@ -47,7 +47,7 @@ class Jsoncpp(CMakePackage, MesonPackage): with when("build_system=cmake"): depends_on("cmake@3.1:", type="build") - depends_on("cmake@1.9:", when="@1.9:", type="build") + depends_on("cmake@3.9:", when="@1.9:", type="build") with when("build_system=meson"): depends_on("meson@0.49.0:", type="build") diff --git a/var/spack/repos/builtin/packages/mgard/package.py b/var/spack/repos/builtin/packages/mgard/package.py index ca9f2f46e44b9d..b58f4c0bba0aa6 100644 --- a/var/spack/repos/builtin/packages/mgard/package.py +++ b/var/spack/repos/builtin/packages/mgard/package.py @@ -50,7 +50,7 @@ class Mgard(CMakePackage, CudaPackage): depends_on("libarchive", when="@2021-11-12:") depends_on("tclap", when="@2021-11-12") depends_on("yaml-cpp", when="@2021-11-12:") - depends_on("cmake@3.19:") + depends_on("cmake@3.19:", type="build") depends_on("nvcomp@2.2.0:", when="@2022-11-18:+cuda") depends_on("nvcomp@2.0.2", when="@:2021-11-12+cuda") conflicts("cuda_arch=none", when="+cuda") diff --git a/var/spack/repos/builtin/packages/py-gevent/package.py b/var/spack/repos/builtin/packages/py-gevent/package.py index b684829f5406f6..c41b815ef24320 100644 --- a/var/spack/repos/builtin/packages/py-gevent/package.py +++ b/var/spack/repos/builtin/packages/py-gevent/package.py @@ -24,8 +24,7 @@ class PyGevent(PythonPackage): depends_on("py-setuptools@40.8:", when="@20.5.1:", type=("build", "run")) depends_on("py-setuptools@40.8:", when="@1.5:", type="build") depends_on("py-setuptools@24.2:", when="@:1.4", type="build") - # TODO: relax this until we support separate concretization of build deps by default - # depends_on("py-cython@3:", when="@20.5.1:", type="build") + depends_on("py-cython@3:", when="@20.5.1:", type="build") depends_on("py-cython@0.29.14:", when="@1.5:", type="build") depends_on("py-cffi@1.12.3:", type=("build", "run")) depends_on("py-greenlet@3:", when="@23.7: ^python@3.12:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-statsmodels/package.py b/var/spack/repos/builtin/packages/py-statsmodels/package.py index c99901d9ae6bd5..36968986a33abc 100644 --- a/var/spack/repos/builtin/packages/py-statsmodels/package.py +++ b/var/spack/repos/builtin/packages/py-statsmodels/package.py @@ -14,6 +14,7 @@ class PyStatsmodels(PythonPackage): homepage = "https://www.statsmodels.org" pypi = "statsmodels/statsmodels-0.8.0.tar.gz" + version("0.13.5", sha256="593526acae1c0fda0ea6c48439f67c3943094c542fe769f8b90fe9e6c6cc4871") version("0.13.2", sha256="77dc292c9939c036a476f1770f9d08976b05437daa229928da73231147cde7d4") version("0.13.1", sha256="006ec8d896d238873af8178d5475203844f2c391194ed8d42ddac37f5ff77a69") version("0.13.0", sha256="f2efc02011b7240a9e851acd76ab81150a07d35c97021cb0517887539a328f8a") @@ -25,12 +26,15 @@ class PyStatsmodels(PythonPackage): depends_on("python@2.7:2.8,3.4:", when="@0.10.1:", type=("build", "link", "run")) depends_on("python@3.6:", when="@0.12.1:", type=("build", "link", "run")) - # according to https://www.statsmodels.org/dev/install.html earlier versions - # might work. - depends_on("py-setuptools@0.6c5:", type="build") - depends_on("py-cython@0.29:", type="build") + # according to https://www.statsmodels.org/dev/install.html earlier versions might work. + depends_on("py-setuptools", type="build") + depends_on("py-setuptools@59.2.0:", type="build", when="@0.13.5:") + + # https://github.com/statsmodels/statsmodels/blob/01b19d7d111b29c183f620ff0a949ef6391ff8ee/pyproject.toml + depends_on("py-cython@0", type="build") depends_on("py-cython@0.29.14:", type="build", when="@0.12.0:") depends_on("py-cython@0.29.22:", type="build", when="@0.13.0:") + depends_on("py-cython@0.29.32:", type="build", when="@0.13.5:") # patsy@0.5.1 works around a Python change # https://github.com/statsmodels/statsmodels/issues/5343 and diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index 88505b3a81be37..cc1ede76be35f4 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -137,7 +137,7 @@ class Raja(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@:3.20", when="@:2022.03+rocm", type="build") depends_on("cmake@3.23:", when="@2022.10:+rocm", type="build") - depends_on("cmake@3.14:", when="@2022.03.0:") + depends_on("cmake@3.14:", when="@2022.03.0:", type="build") depends_on("llvm-openmp", when="+openmp %apple-clang") diff --git a/var/spack/repos/builtin/packages/umpire/package.py b/var/spack/repos/builtin/packages/umpire/package.py index 51f0c034b2f66b..c64bfdf256db78 100644 --- a/var/spack/repos/builtin/packages/umpire/package.py +++ b/var/spack/repos/builtin/packages/umpire/package.py @@ -168,7 +168,7 @@ class Umpire(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@3.8:", type="build") depends_on("cmake@3.9:", when="+cuda", type="build") - depends_on("cmake@3.14:", when="@2022.03.0:") + depends_on("cmake@3.14:", when="@2022.03.0:", type="build") depends_on("blt@0.5.2:", type="build", when="@2022.10.0:") depends_on("blt@0.5.0:", type="build", when="@2022.03.0:") diff --git a/var/spack/repos/duplicates.test/packages/pkg-config/package.py b/var/spack/repos/duplicates.test/packages/pkg-config/package.py new file mode 100644 index 00000000000000..eb7b84b88fc87c --- /dev/null +++ b/var/spack/repos/duplicates.test/packages/pkg-config/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PkgConfig(Package): + """A package providing a virtual, which is frequently used as a pure build dependency.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/tdep-1.0.tar.gz" + + version("1.0.0", md5="0123456789abcdef0123456789abcdef") + + provides("pkgconfig") diff --git a/var/spack/repos/duplicates.test/packages/virtual-build/package.py b/var/spack/repos/duplicates.test/packages/virtual-build/package.py new file mode 100644 index 00000000000000..17fc60955d9b4f --- /dev/null +++ b/var/spack/repos/duplicates.test/packages/virtual-build/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class VirtualBuild(Package): + """A package that has a pure build virtual dependency""" + + homepage = "http://www.example.com" + url = "http://www.example.com/tdep-1.0.tar.gz" + + version("1.0.0", md5="0123456789abcdef0123456789abcdef") + + depends_on("pkgconfig", type="build") From a0a270d4088a66ec5bcae560b9f5408c3794cbcd Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 6 Oct 2023 18:37:42 +0200 Subject: [PATCH 045/408] openssh: 9.5p1 (#40354) --- var/spack/repos/builtin/packages/openssh/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/openssh/package.py b/var/spack/repos/builtin/packages/openssh/package.py index 49a14138934c97..64da3202e3a77b 100755 --- a/var/spack/repos/builtin/packages/openssh/package.py +++ b/var/spack/repos/builtin/packages/openssh/package.py @@ -23,6 +23,7 @@ class Openssh(AutotoolsPackage): tags = ["core-packages"] + version("9.5p1", sha256="f026e7b79ba7fb540f75182af96dc8a8f1db395f922bbc9f6ca603672686086b") version("9.4p1", sha256="3608fd9088db2163ceb3e600c85ab79d0de3d221e59192ea1923e23263866a85") version("9.3p1", sha256="e9baba7701a76a51f3d85a62c383a3c9dcd97fa900b859bc7db114c1868af8a8") version("9.2p1", sha256="3f66dbf1655fb45f50e1c56da62ab01218c228807b21338d634ebcdf9d71cf46") From 87094bddcc532547f23e95ed3c50d96716a81ea9 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 6 Oct 2023 18:42:47 +0200 Subject: [PATCH 046/408] unparse: drop python 2 remnants (#40329) --- lib/spack/spack/util/unparse/unparser.py | 40 ------------------------ 1 file changed, 40 deletions(-) diff --git a/lib/spack/spack/util/unparse/unparser.py b/lib/spack/spack/util/unparse/unparser.py index c2e6376e509537..375fb543a31494 100644 --- a/lib/spack/spack/util/unparse/unparser.py +++ b/lib/spack/spack/util/unparse/unparser.py @@ -270,16 +270,6 @@ def visit_Assert(self, node): self.write(", ") self.dispatch(node.msg) - def visit_Exec(self, node): - self.fill("exec ") - self.dispatch(node.body) - if node.globals: - self.write(" in ") - self.dispatch(node.globals) - if node.locals: - self.write(", ") - self.dispatch(node.locals) - def visit_Global(self, node): self.fill("global ") interleave(lambda: self.write(", "), self.write, node.names) @@ -338,31 +328,6 @@ def visit_Try(self, node): with self.block(): self.dispatch(node.finalbody) - def visit_TryExcept(self, node): - self.fill("try") - with self.block(): - self.dispatch(node.body) - - for ex in node.handlers: - self.dispatch(ex) - if node.orelse: - self.fill("else") - with self.block(): - self.dispatch(node.orelse) - - def visit_TryFinally(self, node): - if len(node.body) == 1 and isinstance(node.body[0], ast.TryExcept): - # try-except-finally - self.dispatch(node.body) - else: - self.fill("try") - with self.block(): - self.dispatch(node.body) - - self.fill("finally") - with self.block(): - self.dispatch(node.finalbody) - def visit_ExceptHandler(self, node): self.fill("except") if node.type: @@ -633,11 +598,6 @@ def visit_Name(self, node): def visit_NameConstant(self, node): self.write(repr(node.value)) - def visit_Repr(self, node): - self.write("`") - self.dispatch(node.value) - self.write("`") - def _write_constant(self, value): if isinstance(value, (float, complex)): # Substitute overflowing decimal literal for AST infinities. From 1dbe680e80f19f8850f60f87f6d90d770bb7f7b0 Mon Sep 17 00:00:00 2001 From: Sam Gillingham Date: Sat, 7 Oct 2023 05:49:06 +1100 Subject: [PATCH 047/408] py-tuiview: add recent versions of tuiview (#40244) * add recent versions of tuiview and remove Qt4 version * reformat * fix stray tabs * add back a deprecated 1.1.7 * tabs * more tabs * reformat * comma --- .../builtin/packages/py-tuiview/package.py | 22 +++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-tuiview/package.py b/var/spack/repos/builtin/packages/py-tuiview/package.py index 7824f834e704d3..40c94a6fe1394e 100644 --- a/var/spack/repos/builtin/packages/py-tuiview/package.py +++ b/var/spack/repos/builtin/packages/py-tuiview/package.py @@ -12,14 +12,28 @@ class PyTuiview(PythonPackage): """ homepage = "https://github.com/ubarsc/tuiview" - url = "https://github.com/ubarsc/tuiview/releases/download/tuiview-1.2.6/tuiview-1.2.6.tar.gz" + url = ( + "https://github.com/ubarsc/tuiview/releases/download/tuiview-1.2.13/TuiView-1.2.13.tar.gz" + ) + version("1.2.13", sha256="48c8d4175c324f70941dc49c5a119882c9d501bd20bc13c76bc2455dee5236a5") + version("1.2.12", sha256="3f0c1673f2f861db01726f3d7f6f1dde4a42ec57894a79b89457c398768dd25f") + version("1.2.11", sha256="81f870ad98ec1e3175f25028d261135b6198fa85038bfaa900789e04e3cf8517") + version("1.2.10", sha256="5ea777a4e89780488b03b346f00b586b46a0bd4c8a994e6def46a6494fa486ef") + version("1.2.9", sha256="b5d11e9501cf61cf62f1223416dfe408cf604ae48c06d697589dfc0a606ad6a9") + version("1.2.8", sha256="e75950908a2d1f7c7216dfeead82483e1d3b0267fff9561549d85ca00725456b") + version("1.2.7", sha256="35dfeb79b2bb57dfb5b8c90c3edf8c8a0a3f89cef85c33f9935e4a4add282aaf") version("1.2.6", sha256="61b136fa31c949d7a7a4dbf8562e6fc677d5b1845b152ec39e337f4eb2e91662") - version("1.1.7", sha256="fbf0bf29cc775357dad4f8a2f0c2ffa98bbf69d603a96353e75b321adef67573") + version( + "1.1.7", + sha256="fbf0bf29cc775357dad4f8a2f0c2ffa98bbf69d603a96353e75b321adef67573", + deprecated=True, + ) # pip silently replaces distutils with setuptools depends_on("py-setuptools", type="build") depends_on("py-pyqt4", type=("build", "run"), when="@:1.1") depends_on("py-pyqt5", type=("build", "run"), when="@1.2.0:") - depends_on("py-numpy", type=("build", "run")) - depends_on("gdal@1.11.0:+python") + depends_on("py-numpy", type=("build", "link", "run")) + depends_on("gdal+geos+python", type=("build", "run"), when="@1.2.0:") + depends_on("gdal@1.11.0:+python", when="@:1.1") From c72ed5cdebacc01949bbbff4689d151f49a6fa57 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Fri, 6 Oct 2023 20:49:59 +0200 Subject: [PATCH 048/408] [add] py-metomi-rose: new recipe, required by py-cylc-rose (#39981) * [add] py-metomi-rose: new recipe, required by py-cylc-rose * py-metomi-rose: remove version constraint on python --------- Co-authored-by: LydDeb --- .../packages/py-metomi-rose/package.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-metomi-rose/package.py diff --git a/var/spack/repos/builtin/packages/py-metomi-rose/package.py b/var/spack/repos/builtin/packages/py-metomi-rose/package.py new file mode 100644 index 00000000000000..161edeb582ce8e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-metomi-rose/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMetomiRose(PythonPackage): + """Rose, a framework for meteorological suites.""" + + homepage = "https://metomi.github.io/rose/doc/html/index.html" + pypi = "metomi-rose/metomi-rose-2.1.0.tar.gz" + + maintainers("LydDeb") + + version("2.1.0", sha256="1b60135a434fe4325d364a57e8f5e81e90f39b373b9d68733458c1adc2513c05") + + depends_on("py-setuptools", type="build") + depends_on("py-aiofiles", type=("build", "run")) + depends_on("py-jinja2@2.10.1:", type=("build", "run")) + depends_on("py-keyring@23", type=("build", "run")) + depends_on("py-ldap3", type=("build", "run")) + depends_on("py-metomi-isodatetime@3", type=("build", "run")) + depends_on("py-psutil@5.6.0:", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) + depends_on("py-sqlalchemy@1", type=("build", "run")) From ed70eef6ee3a5f05504b8e1c0216a319ed4922fe Mon Sep 17 00:00:00 2001 From: Sam Gillingham Date: Sat, 7 Oct 2023 05:52:15 +1100 Subject: [PATCH 049/408] RIOS: add recent versions (#40243) * add recent versions of RIOS * fix depends_on syntax * fix typo * fix sha and add parallel variant * remove self * try doing in one --- var/spack/repos/builtin/packages/py-rios/package.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-rios/package.py b/var/spack/repos/builtin/packages/py-rios/package.py index de2f54c2ae2211..a2716f648e703e 100644 --- a/var/spack/repos/builtin/packages/py-rios/package.py +++ b/var/spack/repos/builtin/packages/py-rios/package.py @@ -15,11 +15,20 @@ class PyRios(PythonPackage): """ homepage = "https://www.rioshome.org/en/latest/" - url = "https://github.com/ubarsc/rios/archive/rios-1.4.10.tar.gz" + url = "https://github.com/ubarsc/rios/releases/download/rios-1.4.16/rios-1.4.16.tar.gz" - version("1.4.10", sha256="7f11b54eb1f2ec551d7fc01c039b60bf2c67f0c2fc5b2946f8d986d6a9bc7063") + version("1.4.16", sha256="2f553d85ff4ff26bfda2a8c6bd3d9dcce5ace847f7d9bd2f072c8943f3758ded") + version("1.4.15", sha256="71670508dbffcd8f5d24fbb25e6a2b7e1d23b5e899ddc78c90d403bd65981cf4") + version("1.4.14", sha256="ea22fde3fe70004aa1ad46bd36fad58f3346e9c161ca44ac913518a6e4fcad82") + version("1.4.13", sha256="9f99f41f20ce769101e61bc8347aa96718e6e5ac37ccb47cb3e555dc4ca83427") + version("1.4.12", sha256="6d897488ce1ca77e470483472998afcb2eb3bb3307f392a924b85f88a16d73eb") + version("1.4.11", sha256="b7ae5311f987b32f1afe1fabc16f25586de8d15c17a69405d1950aeada7b748e") + version("1.4.10", sha256="6324acccc6018f9e06c40370bc366dc459890e8c09d26e0ebd245f6fd46dad71") + + variant("parallel", default=True, description="Enables the parallel processing module") # pip silently replaces distutils with setuptools depends_on("py-setuptools", type="build") depends_on("py-numpy", type=("build", "run")) depends_on("gdal+python", type=("build", "run")) + depends_on("py-cloudpickle", type=("build", "run"), when="@1.4.16:+parallel") From 1b6832f7fdca0bf0c6057482950469f740f78480 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 6 Oct 2023 20:54:25 +0200 Subject: [PATCH 050/408] py-zipp: add 3.17.0 (#40278) * py-zipp: add 3.17.0 * Re-add python@3.7 dependency --- .../repos/builtin/packages/py-zipp/package.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-zipp/package.py b/var/spack/repos/builtin/packages/py-zipp/package.py index 50a9abe35ae154..113506c3ea03c5 100644 --- a/var/spack/repos/builtin/packages/py-zipp/package.py +++ b/var/spack/repos/builtin/packages/py-zipp/package.py @@ -12,16 +12,19 @@ class PyZipp(PythonPackage): homepage = "https://github.com/jaraco/zipp" pypi = "zipp/zipp-0.6.0.tar.gz" + version("3.17.0", sha256="84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0") version("3.8.1", sha256="05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2") version("3.6.0", sha256="71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832") version("0.6.0", sha256="3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e") version("0.5.1", sha256="ca943a7e809cc12257001ccfb99e3563da9af99d52f261725e96dfe0f9275bc3") - depends_on("python@2.7:", type=("build", "run")) - depends_on("python@3.6:", type=("build", "run"), when="@2.0.0:") - depends_on("python@3.7:", type=("build", "run"), when="@3.8.1:") - depends_on("py-setuptools@34.4:", type="build", when="@0.3.3:") - depends_on("py-setuptools@56:", type="build", when="@3.5.1:") + depends_on("python@3.8:", when="@3.16:", type=("build", "run")) + # needed for spack bootstrap as spack itself supports python 3.6 + depends_on("python@3.7:", when="@3.8.1:", type=("build", "run")) + depends_on("py-setuptools@56:", when="@3.5.1:", type="build") + depends_on("py-setuptools@34.4:", when="@0.3.3:", type="build") + depends_on("py-setuptools-scm@3.4.1: +toml", when="@2.0.1:", type="build") depends_on("py-setuptools-scm@1.15.0:", type="build") - depends_on("py-setuptools-scm@3.4.1: +toml", type="build", when="@2.0.1:") + + # Historical dependencies depends_on("py-more-itertools", type=("build", "run"), when="@0.6.0:2.1.0") From 435eedc141593d2af9e6fe6a97ee5c87e8aba5ca Mon Sep 17 00:00:00 2001 From: George Young Date: Fri, 6 Oct 2023 20:00:03 +0100 Subject: [PATCH 051/408] topaz: new package @0.2.5 (#40352) * topaz: new package @0.2.5 * switching over to pypi --------- Co-authored-by: LMS Bioinformatics --- .../repos/builtin/packages/topaz/package.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 var/spack/repos/builtin/packages/topaz/package.py diff --git a/var/spack/repos/builtin/packages/topaz/package.py b/var/spack/repos/builtin/packages/topaz/package.py new file mode 100644 index 00000000000000..855cba4d6c90b1 --- /dev/null +++ b/var/spack/repos/builtin/packages/topaz/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Topaz(PythonPackage): + """topaz: Pipeline for particle picking in cryo-electron microscopy images using + convolutional neural networks trained from positive and unlabeled examples. Also + featuring micrograph and tomogram denoising with DNNs.""" + + homepage = "https://topaz-em.readthedocs.io/" + pypi = "topaz-em/topaz-em-0.2.5.tar.gz" + + version("0.2.5", sha256="002a6eb775598b6c4df0225f3a488bfe6a6da9246e8ca42eb4e7d58f694c25cc") + + depends_on("py-setuptools", type="build") + depends_on("py-torch@1:", type=("build", "run")) + depends_on("py-torchvision", type=("build", "run")) + depends_on("py-numpy@1.11:", type=("build", "run")) + depends_on("py-pandas", type=("build", "run")) + depends_on("py-scikit-learn@0.19.0:", type=("build", "run")) + depends_on("py-scipy@0.17.0:", type=("build", "run")) + depends_on("py-pillow@6.2.0:", type=("build", "run")) + depends_on("py-future", type=("build", "run")) From 031e498e6b506dae8223cd33ebed5595315c7db1 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 6 Oct 2023 21:27:53 +0200 Subject: [PATCH 052/408] py-anyio: add 4.0.0 (#40346) --- var/spack/repos/builtin/packages/py-anyio/package.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-anyio/package.py b/var/spack/repos/builtin/packages/py-anyio/package.py index 09b8581b62670a..f5b74226a2ecfa 100644 --- a/var/spack/repos/builtin/packages/py-anyio/package.py +++ b/var/spack/repos/builtin/packages/py-anyio/package.py @@ -13,17 +13,24 @@ class PyAnyio(PythonPackage): homepage = "https://github.com/agronholm/anyio" pypi = "anyio/anyio-3.2.1.tar.gz" + version("4.0.0", sha256="f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a") version("3.6.2", sha256="25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421") version("3.6.1", sha256="413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b") version("3.5.0", sha256="a0aeffe2fb1fdf374a8e4b471444f0f3ac4fb9f5a5b542b48824475e0042a5a6") version("3.3.4", sha256="67da67b5b21f96b9d3d65daa6ea99f5d5282cb09f50eb4456f8fb51dffefc3ff") version("3.2.1", sha256="07968db9fa7c1ca5435a133dc62f988d84ef78e1d9b22814a59d1c62618afbc5") + depends_on("python@3.8:", when="@4:", type=("build", "run")) depends_on("python@3.6.2:", type=("build", "run")) + depends_on("py-setuptools@64:", when="@3.7:", type="build") depends_on("py-setuptools@42:", type="build") - depends_on("py-wheel@0.29:", type="build") - depends_on("py-setuptools-scm+toml@3.4:", type="build") + depends_on("py-setuptools-scm@6.4:", when="@3.7:", type="build") + depends_on("py-setuptools-scm+toml@3.4:", when="@:3.6", type="build") + depends_on("py-exceptiongroup@1.0.2:", when="@4: ^python@:3.10", type=("build", "run")) depends_on("py-idna@2.8:", type=("build", "run")) depends_on("py-sniffio@1.1:", type=("build", "run")) + + # Historical dependencies + depends_on("py-wheel@0.29:", when="@:3.6", type="build") depends_on("py-typing-extensions", when="^python@:3.7", type=("build", "run")) From 21078a7623e3952a16d320b0b3b843f2f04790b6 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 6 Oct 2023 21:32:21 +0200 Subject: [PATCH 053/408] py-argcomplete: add 3.1.2 (#40348) --- var/spack/repos/builtin/packages/py-argcomplete/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-argcomplete/package.py b/var/spack/repos/builtin/packages/py-argcomplete/package.py index 92e1319b2e049e..146562369311ae 100644 --- a/var/spack/repos/builtin/packages/py-argcomplete/package.py +++ b/var/spack/repos/builtin/packages/py-argcomplete/package.py @@ -12,13 +12,17 @@ class PyArgcomplete(PythonPackage): homepage = "https://github.com/kislyuk/argcomplete" pypi = "argcomplete/argcomplete-1.12.0.tar.gz" + version("3.1.2", sha256="d5d1e5efd41435260b8f85673b74ea2e883affcbec9f4230c582689e8e78251b") version("3.0.8", sha256="b9ca96448e14fa459d7450a4ab5a22bbf9cee4ba7adddf03e65c398b5daeea28") version("2.0.0", sha256="6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20") version("1.12.3", sha256="2c7dbffd8c045ea534921e63b0be6fe65e88599990d8dc408ac8c542b72a5445") version("1.12.0", sha256="2fbe5ed09fd2c1d727d4199feca96569a5b50d44c71b16da9c742201f7cc295c") version("1.1.1", sha256="cca45b5fe07000994f4f06a0b95bd71f7b51b04f81c3be0b4ea7b666e4f1f084") + depends_on("py-setuptools@67.7.2:", when="@3.1:", type="build") depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm+toml@6.2:", when="@3.1:", type="build") + depends_on("py-importlib-metadata@0.23:6", when="@3.0.6: ^python@:3.7", type=("build", "run")) depends_on( "py-importlib-metadata@0.23:4", when="@1.12.3:2 ^python@:3.7", type=("build", "run") From fc5692097d285ff999fe47e9784cfdf8b9a86412 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 6 Oct 2023 21:33:56 +0200 Subject: [PATCH 054/408] py-asttokens: add 2.4.0 (#40349) --- var/spack/repos/builtin/packages/py-asttokens/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-asttokens/package.py b/var/spack/repos/builtin/packages/py-asttokens/package.py index 7bad57ade670b6..9f42ec37acec6e 100644 --- a/var/spack/repos/builtin/packages/py-asttokens/package.py +++ b/var/spack/repos/builtin/packages/py-asttokens/package.py @@ -12,6 +12,7 @@ class PyAsttokens(PythonPackage): homepage = "https://github.com/gristlabs/asttokens" pypi = "asttokens/asttokens-2.0.5.tar.gz" + version("2.4.0", sha256="2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e") version("2.2.1", sha256="4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3") version("2.0.8", sha256="c61e16246ecfb2cde2958406b4c8ebc043c9e6d73aaa83c941673b35e5d3a76b") version("2.0.5", sha256="9a54c114f02c7a9480d56550932546a3f1fe71d8a02f1bc7ccd0ee3ee35cf4d5") @@ -19,4 +20,5 @@ class PyAsttokens(PythonPackage): depends_on("py-setuptools@44:", type="build") depends_on("py-setuptools-scm+toml@3.4.3:", type="build") + depends_on("py-six@1.12:", when="@2.3:", type=("build", "run")) depends_on("py-six", type=("build", "run")) From 981595a721ff49f9341e2c32f4d5580b2a80a62f Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 6 Oct 2023 21:34:47 +0200 Subject: [PATCH 055/408] py-expecttest: new package (#40347) --- .../builtin/packages/py-expecttest/package.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-expecttest/package.py diff --git a/var/spack/repos/builtin/packages/py-expecttest/package.py b/var/spack/repos/builtin/packages/py-expecttest/package.py new file mode 100644 index 00000000000000..60857d5a9bdb8a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-expecttest/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyExpecttest(PythonPackage): + """This library implements expect tests (also known as "golden" tests).""" + + homepage = "https://github.com/ezyang/expecttest" + pypi = "expecttest/expecttest-0.1.6.tar.gz" + + version("0.1.6", sha256="fd49563b6703b9c060a0bc946dfafc62bad74898867432192927eb1e5f9d8952") + + depends_on("python@:3", type=("build", "run")) + depends_on("py-poetry-core@1:", type="build") From c272fc35f210202ef754f0fb928b9a5f8bd9503b Mon Sep 17 00:00:00 2001 From: Edward Hartnett <38856240+edwardhartnett@users.noreply.github.com> Date: Fri, 6 Oct 2023 14:02:17 -0600 Subject: [PATCH 056/408] g2: updated for 3.4.8 release (#40366) * updated for 3.4.8 release --- var/spack/repos/builtin/packages/g2/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/g2/package.py b/var/spack/repos/builtin/packages/g2/package.py index 7441f764964fe1..63f6cd3ea82310 100644 --- a/var/spack/repos/builtin/packages/g2/package.py +++ b/var/spack/repos/builtin/packages/g2/package.py @@ -20,6 +20,7 @@ class G2(CMakePackage): maintainers("AlexanderRichert-NOAA", "Hang-Lei-NOAA", "edwardhartnett") version("develop", branch="develop") + version("3.4.8", sha256="071a6f799c4c4fdfd5d0478152a0cbb9d668d12d71c78d5bda71845fc5580a7f") version("3.4.7", sha256="d6530611e3a515122f11ed4aeede7641f6f8932ef9ee0d4828786572767304dc") version("3.4.6", sha256="c4b03946365ce0bacf1e10e8412a5debd72d8671d1696aa4fb3f3adb119175fe") version("3.4.5", sha256="c18e991c56964953d778632e2d74da13c4e78da35e8d04cb742a2ca4f52737b6") @@ -36,7 +37,8 @@ class G2(CMakePackage): ) variant("w3emc", default=True, description="Enable GRIB1 through w3emc", when="@3.4.6:") - depends_on("jasper@:2.0.32") + depends_on("jasper@:2.0.32", when="@:3.4.7") + depends_on("jasper") depends_on("libpng") depends_on("bacio", when="@3.4.6:") with when("+w3emc"): @@ -62,3 +64,7 @@ def setup_run_environment(self, env): lib = find_libraries("libg2_" + suffix, root=self.prefix, shared=False, recursive=True) env.set("G2_LIB" + suffix, lib[0]) env.set("G2_INC" + suffix, join_path(self.prefix, "include_" + suffix)) + + def check(self): + with working_dir(self.builder.build_directory): + make("test") From f7c8ced3a6e287e28e8dbc3ba6ba52ddbb8e4e73 Mon Sep 17 00:00:00 2001 From: Howard Pritchard Date: Fri, 6 Oct 2023 14:06:37 -0600 Subject: [PATCH 057/408] openmpi: add 4.1.6 release (#40361) related to #40232 Signed-off-by: Howard Pritchard --- var/spack/repos/builtin/packages/openmpi/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index edc67c16a451a4..87e5bc4f2bfee5 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -42,10 +42,13 @@ class Openmpi(AutotoolsPackage, CudaPackage): # Current version( - "4.1.5", sha256="a640986bc257389dd379886fdae6264c8cfa56bc98b71ce3ae3dfbd8ce61dbe3" - ) # libmpi.so.40.30.5 + "4.1.6", sha256="f740994485516deb63b5311af122c265179f5328a0d857a567b85db00b11e415" + ) # libmpi.so.40.30.6 # Still supported + version( + "4.1.5", sha256="a640986bc257389dd379886fdae6264c8cfa56bc98b71ce3ae3dfbd8ce61dbe3" + ) # libmpi.so.40.30.5 version( "4.1.4", sha256="92912e175fd1234368c8730c03f4996fe5942e7479bb1d10059405e7f2b3930d" ) # libmpi.so.40.30.4 From 39896e7f912b83960f7d0db52546d5d1c0b92774 Mon Sep 17 00:00:00 2001 From: George Young Date: Fri, 6 Oct 2023 22:14:45 +0100 Subject: [PATCH 058/408] metal: new package @2020-05-05 (#40355) * metal: new package * style --------- Co-authored-by: LMS Bioinformatics --- .../repos/builtin/packages/metal/package.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 var/spack/repos/builtin/packages/metal/package.py diff --git a/var/spack/repos/builtin/packages/metal/package.py b/var/spack/repos/builtin/packages/metal/package.py new file mode 100644 index 00000000000000..24325c744b753e --- /dev/null +++ b/var/spack/repos/builtin/packages/metal/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Metal(CMakePackage): + """METAL is a tool for the meta-analysis of genome-wide association studies""" + + homepage = "https://genome.sph.umich.edu/wiki/METAL" + url = "https://github.com/statgen/METAL/archive/refs/tags/2020-05-05.tar.gz" + + version( + "2020-05-05", sha256="0ffa2419ca2ab43766e7e6e8c97822c8ce1f5b6233fb5f992d1b1be1955fede7" + ) + + depends_on("cmake@3.1:", type="build") + depends_on("zlib-ng") + + @run_after("install") + def mv_binary(self): + with working_dir(self.build_directory): + install_tree("bin", self.prefix.bin) From 369ee31d24ef18cd545b1c7437cc1feb63a933c0 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 6 Oct 2023 23:19:49 +0200 Subject: [PATCH 059/408] VersionRange: improve error message for empty range (#40345) --- lib/spack/spack/test/versions.py | 5 +++-- lib/spack/spack/version/__init__.py | 2 ++ lib/spack/spack/version/common.py | 4 ++++ lib/spack/spack/version/version_types.py | 8 ++++++-- 4 files changed, 15 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py index c606fad8edcd4a..1dcf28cd711040 100644 --- a/lib/spack/spack/test/versions.py +++ b/lib/spack/spack/test/versions.py @@ -17,6 +17,7 @@ import spack.package_base import spack.spec from spack.version import ( + EmptyRangeError, GitVersion, StandardVersion, Version, @@ -695,9 +696,9 @@ def test_version_range_nonempty(): def test_empty_version_range_raises(): - with pytest.raises(ValueError): + with pytest.raises(EmptyRangeError, match="2:1.0 is an empty range"): assert VersionRange("2", "1.0") - with pytest.raises(ValueError): + with pytest.raises(EmptyRangeError, match="2:1.0 is an empty range"): assert ver("2:1.0") diff --git a/lib/spack/spack/version/__init__.py b/lib/spack/spack/version/__init__.py index 25745a94fd89d3..b25048f0523f9b 100644 --- a/lib/spack/spack/version/__init__.py +++ b/lib/spack/spack/version/__init__.py @@ -16,6 +16,7 @@ """ from .common import ( + EmptyRangeError, VersionChecksumError, VersionError, VersionLookupError, @@ -54,5 +55,6 @@ "VersionError", "VersionChecksumError", "VersionLookupError", + "EmptyRangeError", "any_version", ] diff --git a/lib/spack/spack/version/common.py b/lib/spack/spack/version/common.py index e26339d1320c1f..28dced815c3329 100644 --- a/lib/spack/spack/version/common.py +++ b/lib/spack/spack/version/common.py @@ -35,3 +35,7 @@ class VersionChecksumError(VersionError): class VersionLookupError(VersionError): """Raised for errors looking up git commits as versions.""" + + +class EmptyRangeError(VersionError): + """Raised when constructing an empty version range.""" diff --git a/lib/spack/spack/version/version_types.py b/lib/spack/spack/version/version_types.py index 223cad40d67ca2..87f4d26308cfff 100644 --- a/lib/spack/spack/version/version_types.py +++ b/lib/spack/spack/version/version_types.py @@ -12,6 +12,7 @@ from .common import ( COMMIT_VERSION, + EmptyRangeError, VersionLookupError, infinity_versions, is_git_version, @@ -595,14 +596,17 @@ def up_to(self, index) -> StandardVersion: class ClosedOpenRange: def __init__(self, lo: StandardVersion, hi: StandardVersion): if hi < lo: - raise ValueError(f"{lo}:{hi} is an empty range") + raise EmptyRangeError(f"{lo}..{hi} is an empty range") self.lo: StandardVersion = lo self.hi: StandardVersion = hi @classmethod def from_version_range(cls, lo: StandardVersion, hi: StandardVersion): """Construct ClosedOpenRange from lo:hi range.""" - return ClosedOpenRange(lo, next_version(hi)) + try: + return ClosedOpenRange(lo, next_version(hi)) + except EmptyRangeError as e: + raise EmptyRangeError(f"{lo}:{hi} is an empty range") from e def __str__(self): # This simplifies 3.1:<3.2 to 3.1:3.1 to 3.1 From fabf47bfef427ef5b01490681d9b454864cfea1b Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Fri, 6 Oct 2023 16:24:44 -0500 Subject: [PATCH 060/408] wise2: adding new package wise2 (#40341) --- .../repos/builtin/packages/wise2/package.py | 58 +++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 var/spack/repos/builtin/packages/wise2/package.py diff --git a/var/spack/repos/builtin/packages/wise2/package.py b/var/spack/repos/builtin/packages/wise2/package.py new file mode 100644 index 00000000000000..153305896befc8 --- /dev/null +++ b/var/spack/repos/builtin/packages/wise2/package.py @@ -0,0 +1,58 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Wise2(MakefilePackage): + """The Wise2 package is now a rather stately bioinformatics package that + has be around for a while. Its key programs are genewise, a program + for aligning proteins or protein HMMs to DNA, and dynamite a rather + cranky "macro language" which automates the production of dynamic + programming.""" + + homepage = "https://www.ebi.ac.uk/~birney/wise2/" + url = "https://www.ebi.ac.uk/~birney/wise2/wise2.4.1.tar.gz" + + maintainers("snehring") + + version("2.4.1", sha256="240e2b12d6cd899040e2efbcb85b0d3c10245c255f3d07c1db45d0af5a4d5fa1") + + depends_on("gettext") + depends_on("glib") + depends_on("libiconv") + depends_on("pcre2") + + build_directory = "src" + + build_targets = ["all"] + + def edit(self, spec, prefix): + glib_include_include = join_path( + spec["glib"].prefix.include, "glib-" + str(spec["glib"].version[0]) + ".0" + ) + glib_lib_include = join_path( + spec["glib"].prefix.lib, "glib-" + str(spec["glib"].version[0]) + ".0", "include" + ) + glib_lib = spec["glib"].prefix.lib + glib_config_files = ["src/makefile", "src/network/makefile", "src/models/makefile"] + for f in glib_config_files: + filter_file( + "`glib-config --cflags`", + f"-I{glib_include_include} -I{glib_lib_include}", + f, + string=True, + ) + filter_file("`glib-config --libs`", f"-L{glib_lib} -lglib-2.0", f, string=True) + filter_file('"glib.h"', "", "src/dynlibsrc/subseqhash.h", string=True) + filter_file("getline", "getlineseq", "src/HMMer2/sqio.c", string=True) + filter_file("isnumber", "isdigit", "src/models/phasemodel.c", string=True) + filter_file(r".*welcome.csh.*", "", "src/makefile") + + def install(self, spec, prefix): + with working_dir("src"): + install_tree("bin", prefix.bin) + mkdirp(prefix.share.wise2) + install_tree("wisecfg", prefix.share.wise2) From 49dea5cec2d6fe6645a1d631a1cbcb3d682ec80b Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Fri, 6 Oct 2023 15:04:51 -0700 Subject: [PATCH 061/408] Update bufr recipe (#40033) * Update bufr recipe * Add v12.0.1 * style fixes * remove test-related functionality for bufr * Re-add testing --------- Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --- .../builtin/packages/bufr/c-tests-libm.patch | 11 ++ .../bufr/cmakelists-apple-llvm-ranlib.patch | 15 +++ .../repos/builtin/packages/bufr/package.py | 102 +++++++++++++++--- .../packages/bufr/python-version.patch | 12 +++ 4 files changed, 126 insertions(+), 14 deletions(-) create mode 100644 var/spack/repos/builtin/packages/bufr/c-tests-libm.patch create mode 100644 var/spack/repos/builtin/packages/bufr/cmakelists-apple-llvm-ranlib.patch create mode 100644 var/spack/repos/builtin/packages/bufr/python-version.patch diff --git a/var/spack/repos/builtin/packages/bufr/c-tests-libm.patch b/var/spack/repos/builtin/packages/bufr/c-tests-libm.patch new file mode 100644 index 00000000000000..1183659116013b --- /dev/null +++ b/var/spack/repos/builtin/packages/bufr/c-tests-libm.patch @@ -0,0 +1,11 @@ +--- a/test/CMakeLists.txt 2022-07-28 11:25:13.000000000 -0400 ++++ b/test/CMakeLists.txt 2022-07-28 11:26:40.000000000 -0400 +@@ -205,7 +205,7 @@ + set(test_exe ${test}.x) + add_executable(${test_exe} ${test_src}) + add_dependencies(${test_exe} bufr_${kind}) +- target_link_libraries(${test_exe} PRIVATE bufr::bufr_${kind}) ++ target_link_libraries(${test_exe} PRIVATE bufr::bufr_${kind} m) + add_test(NAME ${test} COMMAND ${CMAKE_BINARY_DIR}/test/${test_exe}) + endforeach() + endforeach() diff --git a/var/spack/repos/builtin/packages/bufr/cmakelists-apple-llvm-ranlib.patch b/var/spack/repos/builtin/packages/bufr/cmakelists-apple-llvm-ranlib.patch new file mode 100644 index 00000000000000..8f2e363e58d735 --- /dev/null +++ b/var/spack/repos/builtin/packages/bufr/cmakelists-apple-llvm-ranlib.patch @@ -0,0 +1,15 @@ +--- a/CMakeLists.txt 2022-02-08 10:03:55.000000000 -0700 ++++ b/CMakeLists.txt 2022-02-08 10:03:51.000000000 -0700 +@@ -39,9 +39,9 @@ + find_package(Python3 REQUIRED COMPONENTS Interpreter) + endif() + +-if(APPLE) +- # The linker on macOS does not include `common symbols` by default +- # Passing the -c flag includes them and fixes an error with undefined symbols ++if(APPLE AND NOT "${CMAKE_RANLIB}" MATCHES "^.*(llvm-ranlib)$") ++ # The linker on macOS does not include `common symbols` by default, Intel requires ++ # passing the -c flag to include them and fix an error with undefined symbols + set(CMAKE_Fortran_ARCHIVE_FINISH " -c ") + set(CMAKE_C_ARCHIVE_FINISH " -c ") + endif() diff --git a/var/spack/repos/builtin/packages/bufr/package.py b/var/spack/repos/builtin/packages/bufr/package.py index e02ebbe134e3ca..f475c0afd0962a 100644 --- a/var/spack/repos/builtin/packages/bufr/package.py +++ b/var/spack/repos/builtin/packages/bufr/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + from spack.package import * @@ -11,37 +13,109 @@ class Bufr(CMakePackage): utilities that can be used to read (decode) and write (encode) data in BUFR, which is a WMO standard format for the exchange of meteorological data. This is part of the NCEPLIBS project. - + The library also includes a Python interface. """ homepage = "https://noaa-emc.github.io/NCEPLIBS-bufr" url = "https://github.com/NOAA-EMC/NCEPLIBS-bufr/archive/refs/tags/bufr_v11.5.0.tar.gz" + git = "https://github.com/NOAA-EMC/NCEPLIBS-bufr" - maintainers("t-brown", "AlexanderRichert-NOAA", "edwardhartnett", "Hang-Lei-NOAA", "jbathegit") + maintainers("AlexanderRichert-NOAA", "edwardhartnett", "Hang-Lei-NOAA", "jbathegit") + version("develop", branch="develop") + version("12.0.1", sha256="525f26238dba6511a453fc71cecc05f59e4800a603de2abbbbfb8cbb5adf5708") + version("12.0.0", sha256="d01c02ea8e100e51fd150ff1c4a1192ca54538474acb1b7f7a36e8aeab76ee75") version("11.7.1", sha256="6533ce6eaa6b02c0cb5424cfbc086ab120ccebac3894980a4daafd4dfadd71f8") version("11.7.0", sha256="6a76ae8e7682bbc790321bf80c2f9417775c5b01a5c4f10763df92e01b20b9ca") version("11.6.0", sha256="af4c04e0b394aa9b5f411ec5c8055888619c724768b3094727e8bb7d3ea34a54") version("11.5.0", sha256="d154839e29ef1fe82e58cf20232e9f8a4f0610f0e8b6a394b7ca052e58f97f43") + version("11.4.0", sha256="946482405e675b99e8e0c221d137768f246076f5e9ba92eed6cae47fb68b7a26") + + # Patch to not add "-c" to ranlib flags when using llvm-ranlib on Apple systems + patch("cmakelists-apple-llvm-ranlib.patch", when="@11.5.0:11.6.0") + # C test does not explicity link to -lm causing DSO error when building shared libs + patch("c-tests-libm.patch", when="@11.5.0:11.7.0") + # Patch to identify Python version correctly + patch("python-version.patch", when="@11.5:12.0.0 +python") + + variant("python", default=False, description="Enable Python interface?") + variant("shared", default=True, description="Build shared libraries", when="@11.5:") + + extends("python", when="+python") + + depends_on("python@3:", type=("build", "run"), when="+python") + depends_on("py-setuptools", type="build", when="+python") + depends_on("py-numpy", type=("build", "run"), when="+python") + depends_on("py-pip", type="build", when="+python") + depends_on("py-wheel", type="build", when="+python") + + def url_for_version(self, version): + pre = "bufr_" if version < Version("12.0.1") else "" + return ( + f"https://github.com/NOAA-EMC/NCEPLIBS-bufr/archive/refs/tags/{pre}v{version}.tar.gz" + ) + + # Need to make the lines shorter at least on some systems + def patch(self): + with when("@:11.7.1"): + filter_file("_lenslmax 120", "_lenslmax 60", "CMakeLists.txt") + + def cmake_args(self): + args = [ + self.define_from_variant("ENABLE_PYTHON", "python"), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define("BUILD_TESTS", self.run_tests), + ] + + return args + + def flag_handler(self, name, flags): + """ + On macOS if a library built with the ar utility contains objects + with Fortran module data but no executable functions, + the symbols corresponding to the module data may not be resolved + when an object referencing them is linked against the library. + You can work around this by compiling with option -fno-common. + """ + fc = self.compiler.fc + if self.spec.satisfies("platform=darwin"): + if name == "fflags": + if "ifort" in fc or "gfortran" in fc: + flags.append("-fno-common") + + # Bufr inserts a path into source code which may be longer than 132 + if name == "fflags" and "gfortran" in fc: + flags.append("-ffree-line-length-none") + + # Inject flags into CMake build + return (None, None, flags) def _setup_bufr_environment(self, env, suffix): libname = "libbufr_{0}".format(suffix) - lib = find_libraries(libname, root=self.prefix, shared=False, recursive=True) - lib_envname = "BUFR_LIB{0}".format(suffix) - inc_envname = "BUFR_INC{0}".format(suffix) - include_dir = "include_{0}".format(suffix) + shared = True if "+shared" in self.spec else False + # Bufr has _DA (dynamic allocation) libs in versions <= 11.5.0 + append = "" if self.spec.satisfies("@11.5.0:") else "_DA" + lib = find_libraries(libname + append, root=self.prefix, shared=shared, recursive=True) + lib_envname = "BUFR_LIB{0}".format(suffix) + append + inc_envname = "BUFR_INC{0}".format(suffix) + append + include_dir = "{0}_{1}".format(self.prefix.include.bufr, suffix) env.set(lib_envname, lib[0]) env.set(inc_envname, include_dir) - # Bufr has _DA (dynamic allocation) libs in versions <= 11.5.0 - if self.spec.satisfies("@:11.5.0"): - da_lib = find_libraries( - libname + "_DA", root=self.prefix, shared=False, recursive=True - ) - env.set(lib_envname + "_DA", da_lib[0]) - env.set(inc_envname + "_DA", include_dir) + if self.spec.satisfies("+python"): + pyver = self.spec["python"].version.up_to(2) + pydir = join_path(os.path.dirname(lib[0]), f"python{pyver}", "site-packages") + env.prepend_path("PYTHONPATH", pydir) def setup_run_environment(self, env): - for suffix in ("4", "8", "d"): + suffixes = ["4"] + if not self.spec.satisfies("@12:"): + suffixes += ["8", "d"] + for suffix in suffixes: self._setup_bufr_environment(env, suffix) + + def check(self): + if self.spec.satisfies("~python"): + with working_dir(self.builder.build_directory): + make("test") diff --git a/var/spack/repos/builtin/packages/bufr/python-version.patch b/var/spack/repos/builtin/packages/bufr/python-version.patch new file mode 100644 index 00000000000000..8b4b979d66c568 --- /dev/null +++ b/var/spack/repos/builtin/packages/bufr/python-version.patch @@ -0,0 +1,12 @@ +--- a/python/CMakeLists.txt 2023-06-08 12:39:26.000000000 -0600 ++++ b/python/CMakeLists.txt 2023-07-19 13:45:11.000000000 -0600 +@@ -8,8 +8,7 @@ + file( COPY ncepbufr utils DESTINATION . ) + + # Library installation directory +-execute_process(COMMAND ${Python3_EXECUTABLE} -c "from __future__ import print_function; import sys; print(sys.version[:3], end='')" +- OUTPUT_VARIABLE _PYVER) ++set(_PYVER "${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}") + set(_install_dir "${CMAKE_INSTALL_FULL_LIBDIR}/python${_PYVER}/site-packages") + + # Build the extension module for use in install tree From 3f78f98332d3a4dfb87719ae917beb66db1ee8c5 Mon Sep 17 00:00:00 2001 From: Veselin Dobrev Date: Fri, 6 Oct 2023 15:31:23 -0700 Subject: [PATCH 062/408] MFEM: add new version v4.6 (#40170) * [mfem] Initial changes for v4.6 * [@spackbot] updating style on behalf of v-dobrev * [mfem] Set the proper download link for v4.6 --- .../repos/builtin/packages/mfem/package.py | 25 ++++++++++++++++- .../builtin/packages/mfem/test_builds.sh | 27 +++++++++---------- 2 files changed, 36 insertions(+), 16 deletions(-) diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index 061ce98d43e5ef..eb869fc255b9e4 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -6,6 +6,7 @@ import os import shutil import sys +from platform import machine from spack.package import * @@ -48,6 +49,13 @@ class Mfem(Package, CudaPackage, ROCmPackage): # other version. version("develop", branch="master") + version( + "4.6.0", + sha256="5fa9465b5bec56bfb777a4d2826fba48d85fbace4aed8b64a2fd4059bf075b15", + url="https://bit.ly/mfem-4-6", + extension="tar.gz", + ) + version( "4.5.2", sha256="7003c908c8265810ff97cb37531521b3aed24959975833a01ea05adfdb36e0f7", @@ -286,6 +294,11 @@ class Mfem(Package, CudaPackage, ROCmPackage): "sundials@5.4.0:+cuda cuda_arch={0}".format(sm_), when="@4.2.0:+sundials+cuda cuda_arch={0}".format(sm_), ) + for gfx in ROCmPackage.amdgpu_targets: + depends_on( + "sundials@5.7.0:+rocm amdgpu_target={0}".format(gfx), + when="@4.6.0:+sundials+rocm amdgpu_target={0}".format(gfx), + ) depends_on("pumi", when="+pumi~shared") depends_on("pumi+shared", when="+pumi+shared") depends_on("pumi@2.2.3:2.2.5", when="@4.2.0:4.3.0+pumi") @@ -296,6 +309,16 @@ class Mfem(Package, CudaPackage, ROCmPackage): depends_on("gslib@1.0.7:", when="@4.3.0:+gslib") depends_on("suite-sparse", when="+suite-sparse") depends_on("superlu-dist", when="+superlu-dist") + for sm_ in CudaPackage.cuda_arch_values: + depends_on( + "superlu-dist+cuda cuda_arch={0}".format(sm_), + when="+superlu-dist+cuda cuda_arch={0}".format(sm_), + ) + for gfx in ROCmPackage.amdgpu_targets: + depends_on( + "superlu-dist+rocm amdgpu_target={0}".format(gfx), + when="+superlu-dist+rocm amdgpu_target={0}".format(gfx), + ) depends_on("strumpack@3.0.0:", when="+strumpack~shared") depends_on("strumpack@3.0.0:+shared", when="+strumpack+shared") for sm_ in CudaPackage.cuda_arch_values: @@ -921,7 +944,7 @@ def find_optional_library(name, prefix): options += ["HIP_DIR=%s" % hipsparse["rocsparse"].prefix] if "%cce" in spec: # We assume the proper Cray CCE module (cce) is loaded: - craylibs_path = env["CRAYLIBS_" + env["MACHTYPE"].capitalize()] + craylibs_path = env["CRAYLIBS_" + machine().upper()] craylibs = ["libmodules", "libfi", "libcraymath", "libf", "libu", "libcsup"] hip_libs += find_libraries(craylibs, craylibs_path) if hip_libs: diff --git a/var/spack/repos/builtin/packages/mfem/test_builds.sh b/var/spack/repos/builtin/packages/mfem/test_builds.sh index 13e72e7d558a5f..787f936be132d2 100755 --- a/var/spack/repos/builtin/packages/mfem/test_builds.sh +++ b/var/spack/repos/builtin/packages/mfem/test_builds.sh @@ -14,9 +14,9 @@ rocm_arch="gfx908" spack_jobs='' # spack_jobs='-j 128' -mfem='mfem@4.5.2'${compiler} +mfem='mfem@4.6.0'${compiler} # mfem_dev='mfem@develop'${compiler} -mfem_dev='mfem@4.5.2'${compiler} +mfem_dev='mfem@4.6.0'${compiler} backends='+occa+raja+libceed' backends_specs='^occa~cuda ^raja~openmp' @@ -24,11 +24,9 @@ backends_specs='^occa~cuda ^raja~openmp' # ~fortran is needed for Cray Fortran linking with tcmalloc* conduit_spec='^conduit~fortran' # petsc spec -petsc_spec='^petsc+suite-sparse+mumps' -petsc_spec_cuda='^petsc+cuda+suite-sparse+mumps' -# superlu-dist specs -superlu_spec_cuda='^superlu-dist+cuda cuda_arch='"${cuda_arch}" -superlu_spec_rocm='^superlu-dist+rocm amdgpu_target='"${rocm_arch}" +petsc_spec='^petsc+mumps' +petsc_spec_cuda='^petsc+cuda+mumps' +petsc_spec_rocm='^petsc+rocm+mumps' # strumpack spec without cuda (use version > 6.3.1) strumpack_spec='^strumpack~slate~openmp~cuda' strumpack_cuda_spec='^strumpack+cuda~slate~openmp' @@ -138,7 +136,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared'" $strumpack_cuda_spec"' \ - '"$superlu_spec_cuda $petsc_spec_cuda $conduit_spec" + '"$petsc_spec_cuda $conduit_spec" # hypre with cuda: # TODO: restore '+libceed' when the libCEED CUDA unit tests take less time. @@ -150,7 +148,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib \ +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared ^hypre+cuda \ - '" $superlu_spec_cuda $strumpack_cuda_spec $conduit_spec" + '" $strumpack_cuda_spec $conduit_spec" # # same builds as above with ${mfem_dev} @@ -175,7 +173,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared'" $strumpack_cuda_spec"' \ - '"$superlu_spec_cuda $petsc_spec_cuda $conduit_spec" + '"$petsc_spec_cuda $conduit_spec" # hypre with cuda: # TODO: restore '+libceed' when the libCEED CUDA unit tests take less time. @@ -187,7 +185,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib \ +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared ^hypre+cuda \ - '"$superlu_spec_cuda $strumpack_cuda_spec $conduit_spec" + '"$strumpack_cuda_spec $conduit_spec" ) @@ -203,15 +201,14 @@ builds_rocm=( ^raja+rocm~openmp ^occa~cuda~openmp ^hypre+rocm' # hypre without rocm: - # TODO: add "+petsc+slepc $petsc_spec_rocm" when it is supported. # TODO: add back '+hiop' when it is no longer linked with tcmalloc* through # its magma dependency. # TODO: add back '+ginkgo' when the Ginkgo example works. ${mfem}'+rocm+openmp+raja+occa+libceed amdgpu_target='"${rocm_arch}"' \ - +superlu-dist+strumpack+suite-sparse+gslib \ + +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ ^raja+rocm~openmp ^occa~cuda'" $strumpack_rocm_spec"' \ - '"$superlu_spec_rocm $conduit_spec" + '"$petsc_spec_rocm $conduit_spec" # hypre with rocm: # TODO: add back "+petsc+slepc $petsc_spec_rocm" when it works. @@ -223,7 +220,7 @@ builds_rocm=( +superlu-dist+strumpack+suite-sparse+gslib \ +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ ^raja+rocm~openmp ^occa~cuda ^hypre+rocm \ - '"$strumpack_rocm_spec $superlu_spec_rocm $conduit_spec" + '"$strumpack_rocm_spec $conduit_spec" # # same builds as above with ${mfem_dev} From 4cd5e84a23c11ea59ab89da84486b716f1c60db3 Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Fri, 6 Oct 2023 16:19:19 -0700 Subject: [PATCH 063/408] Change 'exit' to 'return' in setup-env.sh (#36137) * Change 'exit' to 'return' in `setup-env.sh` to avoid losing shell in some cases when sourcing twice. --- share/spack/setup-env.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index 7d4554359fa6ab..b5f434863595f4 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -41,7 +41,7 @@ # prevent infinite recursion when spack shells out (e.g., on cray for modules) if [ -n "${_sp_initializing:-}" ]; then - exit 0 + return 0 fi export _sp_initializing=true From d99fd132b87b2bf52aad779dfd06c3b42e9c5788 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 7 Oct 2023 02:03:00 +0200 Subject: [PATCH 064/408] Python add v3.11.5 (#40330) --- var/spack/repos/builtin/packages/python/package.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index c07ad5d6ad461e..04251b6c2339f6 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -42,14 +42,16 @@ class Python(Package): version("3.12.0", sha256="51412956d24a1ef7c97f1cb5f70e185c13e3de1f50d131c0aac6338080687afb") version( - "3.11.4", - sha256="85c37a265e5c9dd9f75b35f954e31fbfc10383162417285e30ad25cc073a0d63", + "3.11.5", + sha256="a12a0a013a30b846c786c010f2c19dd36b7298d888f7c4bd1581d90ce18b5e58", preferred=True, ) + version("3.11.4", sha256="85c37a265e5c9dd9f75b35f954e31fbfc10383162417285e30ad25cc073a0d63") version("3.11.3", sha256="1a79f3df32265d9e6625f1a0b31c28eb1594df911403d11f3320ee1da1b3e048") version("3.11.2", sha256="2411c74bda5bbcfcddaf4531f66d1adc73f247f529aee981b029513aefdbf849") version("3.11.1", sha256="baed518e26b337d4d8105679caf68c5c32630d702614fc174e98cb95c46bdfa4") version("3.11.0", sha256="64424e96e2457abbac899b90f9530985b51eef2905951febd935f0e73414caeb") + version("3.10.13", sha256="698ec55234c1363bd813b460ed53b0f108877c7a133d48bde9a50a1eb57b7e65") version("3.10.12", sha256="a43cd383f3999a6f4a7db2062b2fc9594fefa73e175b3aedafa295a51a7bb65c") version("3.10.11", sha256="f3db31b668efa983508bd67b5712898aa4247899a346f2eb745734699ccd3859") version("3.10.10", sha256="fba64559dde21ebdc953e4565e731573bb61159de8e4d4cedee70fb1196f610d") @@ -63,6 +65,7 @@ class Python(Package): version("3.10.2", sha256="3c0ede893011319f9b0a56b44953a3d52c7abf9657c23fb4bc9ced93b86e9c97") version("3.10.1", sha256="b76117670e7c5064344b9c138e141a377e686b9063f3a8a620ff674fa8ec90d3") version("3.10.0", sha256="c4e0cbad57c90690cb813fb4663ef670b4d0f587d8171e2c42bd4c9245bd2758") + version("3.9.18", sha256="504ce8cfd59addc04c22f590377c6be454ae7406cb1ebf6f5a350149225a9354") version("3.9.17", sha256="8ead58f669f7e19d777c3556b62fae29a81d7f06a7122ff9bc57f7dd82d7e014") version("3.9.16", sha256="1ad539e9dbd2b42df714b69726e0693bc6b9d2d2c8e91c2e43204026605140c5") version("3.9.15", sha256="48d1ccb29d5fbaf1fb8f912271d09f7450e426d4dfe95978ef6aaada70ece4d8") @@ -81,6 +84,7 @@ class Python(Package): version("3.9.2", sha256="7899e8a6f7946748830d66739f2d8f2b30214dad956e56b9ba216b3de5581519") version("3.9.1", sha256="29cb91ba038346da0bd9ab84a0a55a845d872c341a4da6879f462e94c741f117") version("3.9.0", sha256="df796b2dc8ef085edae2597a41c1c0a63625ebd92487adaef2fed22b567873e8") + version("3.8.18", sha256="7c5df68bab1be81a52dea0cc2e2705ea00553b67107a301188383d7b57320b16") version("3.8.17", sha256="def428fa6cf61b66bcde72e3d9f7d07d33b2e4226f04f9d6fce8384c055113ae") version("3.8.16", sha256="71ca9d935637ed2feb59e90a368361dc91eca472a90acb1d344a2e8178ccaf10") version("3.8.15", sha256="924d46999df82aa2eaa1de5ca51d6800ffb56b4bf52486a28f40634e3362abc4") From 0754a3ae3d18e70535b924362719ea5959dd4590 Mon Sep 17 00:00:00 2001 From: Ken Raffenetti Date: Fri, 6 Oct 2023 23:28:15 -0500 Subject: [PATCH 065/408] yaksa: add version 0.3 (#40368) --- var/spack/repos/builtin/packages/yaksa/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/yaksa/package.py b/var/spack/repos/builtin/packages/yaksa/package.py index 46366659be0ffc..5d94f42740a41d 100644 --- a/var/spack/repos/builtin/packages/yaksa/package.py +++ b/var/spack/repos/builtin/packages/yaksa/package.py @@ -26,6 +26,7 @@ class Yaksa(AutotoolsPackage, CudaPackage, ROCmPackage): url = "https://github.com/pmodels/yaksa/archive/refs/tags/v0.2.tar.gz" maintainers("raffenet", "yfguo", "hzhou") + version("0.3", sha256="c9e5291211bee8852831bb464f430ad5ba1541e31db5718a6fa2f2d3329fc2d9") version("0.2", sha256="9401cb6153dc8c34ddb9781bbabd418fd26b0a27b5da3294ecc21af7be9c86f2") depends_on("autoconf", type="build") From df61916b9117e8eab91fa5df6d0240d71e6e54c0 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 7 Oct 2023 09:21:04 +0200 Subject: [PATCH 066/408] Remove warning for custom module configuration, when no module is enabled (#40358) The warning was added in v0.20 and was slated for removal in v0.21 --- .../spack/hooks/module_file_generation.py | 1 - lib/spack/spack/modules/__init__.py | 9 +---- lib/spack/spack/modules/common.py | 39 ------------------- 3 files changed, 2 insertions(+), 47 deletions(-) diff --git a/lib/spack/spack/hooks/module_file_generation.py b/lib/spack/spack/hooks/module_file_generation.py index dc86c43205a8d5..0c6428ebd44198 100644 --- a/lib/spack/spack/hooks/module_file_generation.py +++ b/lib/spack/spack/hooks/module_file_generation.py @@ -11,7 +11,6 @@ def _for_each_enabled(spec, method_name, explicit=None): """Calls a method for each enabled module""" - spack.modules.ensure_modules_are_enabled_or_warn() set_names = set(spack.config.get("modules", {}).keys()) for name in set_names: enabled = spack.config.get("modules:%s:enable" % name) diff --git a/lib/spack/spack/modules/__init__.py b/lib/spack/spack/modules/__init__.py index ccd800cdecda11..13b8a95bed7d08 100644 --- a/lib/spack/spack/modules/__init__.py +++ b/lib/spack/spack/modules/__init__.py @@ -7,15 +7,10 @@ include Tcl non-hierarchical modules, Lua hierarchical modules, and others. """ -from .common import disable_modules, ensure_modules_are_enabled_or_warn +from .common import disable_modules from .lmod import LmodModulefileWriter from .tcl import TclModulefileWriter -__all__ = [ - "TclModulefileWriter", - "LmodModulefileWriter", - "disable_modules", - "ensure_modules_are_enabled_or_warn", -] +__all__ = ["TclModulefileWriter", "LmodModulefileWriter", "disable_modules"] module_types = {"tcl": TclModulefileWriter, "lmod": LmodModulefileWriter} diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 4dda4b183b1dc5..4a3485c8647959 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -33,10 +33,8 @@ import datetime import inspect import os.path -import pathlib import re import string -import warnings from typing import Optional import llnl.util.filesystem @@ -820,43 +818,6 @@ def verbose(self): return self.conf.verbose -def ensure_modules_are_enabled_or_warn(): - """Ensures that, if a custom configuration file is found with custom configuration for the - default tcl module set, then tcl module file generation is enabled. Otherwise, a warning - is emitted. - """ - - # TODO (v0.21 - Remove this function) - # Check if TCL module generation is enabled, return early if it is - enabled = spack.config.get("modules:default:enable", []) - if "tcl" in enabled: - return - - # Check if we have custom TCL module sections - for scope in spack.config.CONFIG.file_scopes: - # Skip default configuration - if scope.name.startswith("default"): - continue - - data = spack.config.get("modules:default:tcl", scope=scope.name) - if data: - config_file = pathlib.Path(scope.path) - if not scope.name.startswith("env"): - config_file = config_file / "modules.yaml" - break - else: - return - - # If we are here we have a custom "modules" section in "config_file" - msg = ( - f"detected custom TCL modules configuration in {config_file}, while TCL module file " - f"generation for the default module set is disabled. " - f"In Spack v0.20 module file generation has been disabled by default. To enable " - f"it run:\n\n\t$ spack config add 'modules:default:enable:[tcl]'\n" - ) - warnings.warn(msg) - - class BaseModuleFileWriter: def __init__(self, spec, module_set_name, explicit=None): self.spec = spec From 2d15375e29a492f451a2afcca85b4ef7c4c39bc1 Mon Sep 17 00:00:00 2001 From: "Mark (he/his) C. Miller" Date: Sat, 7 Oct 2023 17:44:31 -0700 Subject: [PATCH 067/408] Update CITATION.cff (#40363) You will note the `Cite this repository` link is not working. This commit fixes the underlying file... * `authors` was not indented * `authors` required by `preferred-citation` * `authors` list required at top level (I simply duplicated) * `"USA"` not correct country code * `month` requires an integer month number * Added URL to the actual pdf of the cited paper * Used `identifiers` for doi and LLNL doc number * added `abstract` copied from paper Various fixes were confirmed by `cffconvert` using `docker run -v `pwd`:/app citationcff/cffconvert --validate` --- CITATION.cff | 52 +++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 41 insertions(+), 11 deletions(-) diff --git a/CITATION.cff b/CITATION.cff index 4ae54a57df4165..4753f6cc37f4fa 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -27,12 +27,51 @@ # And here's the CITATION.cff format: # cff-version: 1.2.0 +type: software message: "If you are referencing Spack in a publication, please cite the paper below." +title: "The Spack Package Manager: Bringing Order to HPC Software Chaos" +abstract: >- + Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools. + Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible. + However, managing many configurations is difficult because the configuration space is combinatorial in size. + We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity. + Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies. + It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment. + We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos. preferred-citation: + title: "The Spack Package Manager: Bringing Order to HPC Software Chaos" type: conference-paper - doi: "10.1145/2807591.2807623" - url: "https://github.com/spack/spack" + url: "https://tgamblin.github.io/pubs/spack-sc15.pdf" authors: + - family-names: "Gamblin" + given-names: "Todd" + - family-names: "LeGendre" + given-names: "Matthew" + - family-names: "Collette" + given-names: "Michael R." + - family-names: "Lee" + given-names: "Gregory L." + - family-names: "Moody" + given-names: "Adam" + - family-names: "de Supinski" + given-names: "Bronis R." + - family-names: "Futral" + given-names: "Scott" + conference: + name: "Supercomputing 2015 (SC’15)" + city: "Austin" + region: "Texas" + country: "US" + month: 11 + year: 2015 + identifiers: + - description: "The concept DOI of the work." + type: doi + value: 10.1145/2807591.2807623 + - description: "The DOE Document Release Number of the work" + type: other + value: "LLNL-CONF-669890" +authors: - family-names: "Gamblin" given-names: "Todd" - family-names: "LeGendre" @@ -47,12 +86,3 @@ preferred-citation: given-names: "Bronis R." - family-names: "Futral" given-names: "Scott" - title: "The Spack Package Manager: Bringing Order to HPC Software Chaos" - conference: - name: "Supercomputing 2015 (SC’15)" - city: "Austin" - region: "Texas" - country: "USA" - month: November 15-20 - year: 2015 - notes: LLNL-CONF-669890 From 0900cf461154080901f8852dd30319e189374217 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Sun, 8 Oct 2023 17:06:13 +0200 Subject: [PATCH 068/408] [add] py-cylc-flow: new recipe (#39986) * [add] py-cylc-flow: new recipe * py-cylc-flow: fix py-protobuf version Co-authored-by: Adam J. Stewart * py-cylc-flow: fix py-colorama version Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> * py-cylc-flow: Update dependence on py-aiofiles Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> * py-cylc-flow: Update dependence on py-pyzmq Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> * py-cylcflow: remove useless dependence * py-cylc-flow: fix indent * py-cylc-flow: fix argument in depends_on; move lines * py-cylc-flow: fix the type of the dependence py-setuptools --------- Co-authored-by: Adam J. Stewart Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Co-authored-by: LydDeb --- .../builtin/packages/py-cylc-flow/package.py | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-cylc-flow/package.py diff --git a/var/spack/repos/builtin/packages/py-cylc-flow/package.py b/var/spack/repos/builtin/packages/py-cylc-flow/package.py new file mode 100644 index 00000000000000..bed0abd1b19788 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cylc-flow/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCylcFlow(PythonPackage): + """A workflow engine for cycling systems.""" + + homepage = "https://cylc.org" + pypi = "cylc-flow/cylc-flow-8.1.4.tar.gz" + + maintainers("LydDeb") + + version("8.2.0", sha256="cbe35e0d72d1ca36f28a4cebe9b9040a3445a74253bc94051a3c906cf179ded0") + version("8.1.4", sha256="d1835ac18f6f24f3115c56b2bc821185484e834a86b12fd0033ff7e4dc3c1f63") + + depends_on("py-setuptools@49:66,68:", type=("build", "run")) + depends_on("py-aiofiles@0.7", type=("build", "run"), when="@:8.1") + depends_on("py-ansimarkup@1.0.0:", type=("build", "run")) + depends_on("py-async-timeout@3.0.0:", type=("build", "run")) + depends_on("py-colorama@0.4:1", type=("build", "run")) + depends_on("py-graphene@2.1:2", type=("build", "run")) + depends_on("py-jinja2@3.0", type=("build", "run")) + depends_on("py-metomi-isodatetime@3.0", type=("build", "run")) + depends_on("py-protobuf@4.21.2:4.21", type=("build", "run")) + depends_on("py-psutil@5.6.0:", type=("build", "run")) + depends_on("py-pyzmq@22:", type=("build", "run"), when="@8.2:") + depends_on("py-pyzmq@22", type=("build", "run"), when="@:8.1") + depends_on("py-importlib-metadata", type=("build", "run"), when="^python@:3.7") + depends_on("py-urwid@2", type=("build", "run")) + depends_on("py-rx", type=("build", "run")) + depends_on("py-promise", type=("build", "run")) + depends_on("py-tomli@2:", type=("build", "run"), when="^python@:3.10") From 35df437f04d6ab40553896655301d92da2ce8a98 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sun, 8 Oct 2023 17:16:08 +0200 Subject: [PATCH 069/408] py-bids-validator: add 1.13.1 (#40356) * py-bids-validator: add 1.13.1 * Fix style --- var/spack/repos/builtin/packages/py-bids-validator/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-bids-validator/package.py b/var/spack/repos/builtin/packages/py-bids-validator/package.py index a91047d9726f92..5ba74c229de73f 100644 --- a/var/spack/repos/builtin/packages/py-bids-validator/package.py +++ b/var/spack/repos/builtin/packages/py-bids-validator/package.py @@ -12,6 +12,7 @@ class PyBidsValidator(PythonPackage): homepage = "https://github.com/bids-standard/bids-validator" pypi = "bids-validator/bids-validator-1.7.2.tar.gz" + version("1.13.1", sha256="7205ce4e68fba172215332c786f1ac1665025b702b6dff2b1e158f00a2df9890") version("1.11.0", sha256="408c56748b7cf98cf7c31822f33a8d89c5e6e7db5254c345107e8d527576ff53") version("1.9.8", sha256="ff39799bb205f92d6f2c322f0b8eff0d1c0288f4291a0b18fce61afa4dfd7f3e") version("1.9.4", sha256="4bf07d375f231a2ad2f450beeb3ef6c54f93194fd993aa5157d57a8fba48ed50") @@ -19,4 +20,5 @@ class PyBidsValidator(PythonPackage): version("1.8.4", sha256="63e7a02c9ddb5505a345e178f4e436b82c35ec0a177d7047b67ea10ea3029a68") version("1.7.2", sha256="12398831a3a3a2ed7c67e693cf596610c23dd23e0889bfeae0830bbd1d41e5b9") + depends_on("python@3.8:", when="@1.12:", type=("build", "run")) depends_on("py-setuptools", type="build") From fcd7dd4d7498cb1b7855b3fc3dada58848ea438c Mon Sep 17 00:00:00 2001 From: Jen Herting Date: Sun, 8 Oct 2023 11:18:01 -0400 Subject: [PATCH 070/408] [py-lvis] New package (#39080) * [py-lvis] New package * [py-lvis] flake8 * [py-lvis] os agnostic * [py-lvis] added comment for imported dependency * [py-lvis] style fix --- .../repos/builtin/packages/py-lvis/package.py | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-lvis/package.py diff --git a/var/spack/repos/builtin/packages/py-lvis/package.py b/var/spack/repos/builtin/packages/py-lvis/package.py new file mode 100644 index 00000000000000..4824f487d5fe52 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-lvis/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os + +from spack.package import * + + +class PyLvis(PythonPackage): + """Python API for LVIS dataset.""" + + pypi = "lvis/lvis-0.5.3.tar.gz" + + version("0.5.3", sha256="55aeeb84174abea2ed0d6985a8e93aa9bdbb60c61c6db130c8269a275ef61a6e") + + depends_on("py-setuptools", type="build") + depends_on("py-cycler@0.10:", type=("build", "run")) + depends_on("py-cython@0.29.12:", type=("build", "run")) + depends_on("py-kiwisolver@1.1:", type=("build", "run")) + depends_on("py-matplotlib@3.1.1:", type=("build", "run")) + depends_on("py-numpy@1.18.2:", type=("build", "run")) + depends_on("opencv@4.1.0.25:+python3", type=("build", "run")) + depends_on("py-pyparsing@2.4.0:", type=("build", "run")) + depends_on("py-python-dateutil@2.8:", type=("build", "run")) + depends_on("py-six@1.12:", type=("build", "run")) + + # imported at lvis/lvis.py:15 + depends_on("py-pycocotools", type=("build", "run")) + + def patch(self): + os.rename( + join_path(self.stage.source_path, "lvis.egg-info", "requires.txt"), + join_path(self.stage.source_path, "requirements.txt"), + ) From 4e9ccddbcd97c405edd734a8de7432247683dbb3 Mon Sep 17 00:00:00 2001 From: Jen Herting Date: Sun, 8 Oct 2023 11:20:46 -0400 Subject: [PATCH 071/408] py-torch-sparse: add v0.6.17 (#39495) * [py-torch-sparse] New version 0.6.17 * [py-torch-sparse] added dependency on parallel-hashmap * [py-torch-sparse] - spack only supports python@3.7: - py-pytest-runner only needed with old versions --- var/spack/repos/builtin/packages/py-torch-sparse/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-torch-sparse/package.py b/var/spack/repos/builtin/packages/py-torch-sparse/package.py index 4d4495301adb96..b74a7bed549f24 100644 --- a/var/spack/repos/builtin/packages/py-torch-sparse/package.py +++ b/var/spack/repos/builtin/packages/py-torch-sparse/package.py @@ -13,18 +13,19 @@ class PyTorchSparse(PythonPackage): homepage = "https://github.com/rusty1s/pytorch_sparse/" url = "https://github.com/rusty1s/pytorch_sparse/archive/0.6.7.tar.gz" + version("0.6.17", sha256="c964a70ed978bff65009250eb12fae96317c60c9a04d7d1b07f0beee8b4b9c22") version("0.6.8", sha256="98f7ff1f0f9cd5031bc81c70c11970c3864545ae33677025a6efd2466a97e6f9") version("0.6.7", sha256="0d038a1502548692972a085cd0496460b5d2050bb7328427add990f081d6c44d") variant("cuda", default=False, description="Enable CUDA support") - depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-pytest-runner", type="build") + depends_on("py-pytest-runner", when="@:0.6.8", type="build") depends_on("py-scipy", type=("build", "run")) depends_on("py-torch", type=("build", "run")) depends_on("py-torch-scatter+cuda", when="+cuda") depends_on("py-torch-scatter~cuda", when="~cuda") + depends_on("parallel-hashmap", when="@0.6.17:") def setup_build_environment(self, env): if "+cuda" in self.spec: From 34f3694ce8b5f48567bbef972b32b00f026dd81c Mon Sep 17 00:00:00 2001 From: Jen Herting Date: Sun, 8 Oct 2023 11:21:55 -0400 Subject: [PATCH 072/408] [py-tensorboardx] Added version 2.6.2.2 (#39731) * [py-tensorboardx] Added version 2.6.2.2 * [py-tensorboardx] flake8 * [py-tensorboardx] requires py-setuptools-scm --- var/spack/repos/builtin/packages/py-tensorboardx/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-tensorboardx/package.py b/var/spack/repos/builtin/packages/py-tensorboardx/package.py index 5c901eedb468e0..3bcd5f17545fee 100644 --- a/var/spack/repos/builtin/packages/py-tensorboardx/package.py +++ b/var/spack/repos/builtin/packages/py-tensorboardx/package.py @@ -17,6 +17,7 @@ class PyTensorboardx(PythonPackage): homepage = "https://github.com/lanpa/tensorboardX" pypi = "tensorboardx/tensorboardX-1.8.tar.gz" + version("2.6.2.2", sha256="c6476d7cd0d529b0b72f4acadb1269f9ed8b22f441e87a84f2a3b940bb87b666") version("2.5.1", sha256="ea85a3446f22ce8a917fe4fa4d8a7a96222ef84ac835267d038c34bb99f6d61b") version("2.1", sha256="9e8907cf2ab900542d6cb72bf91aa87b43005a7f0aa43126268697e3727872f9") version("2.0", sha256="835d85db0aef2c6768f07c35e69a74e3dcb122d6afceaf2b8504d7d16c7209a5") @@ -24,7 +25,10 @@ class PyTensorboardx(PythonPackage): version("1.8", sha256="13fe0abba27f407778a7321937190eedaf12bc8c544d9a4e294fcf0ba177fd76") depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", when="@2.6.2.2:", type="build") depends_on("py-numpy", type=("build", "run")) - depends_on("py-protobuf@3.8.0:3.20.1", type=("build", "run")) + depends_on("py-protobuf@3.8.0:3.20.1", when="@:2.5.1", type=("build", "run")) + depends_on("py-protobuf@3.20:", when="@2.6.2.2:", type=("build", "run")) + depends_on("py-packaging", when="@2.6.2.2:", type=("build", "run")) depends_on("py-six", when="@:2.1", type=("build", "run")) From 45fb6e0c5e878d37690e2cac29ec6500c39e18ef Mon Sep 17 00:00:00 2001 From: Jen Herting Date: Sun, 8 Oct 2023 11:27:18 -0400 Subject: [PATCH 073/408] [py-tokenizers] added version 0.13.3 (#40360) --- var/spack/repos/builtin/packages/py-tokenizers/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-tokenizers/package.py b/var/spack/repos/builtin/packages/py-tokenizers/package.py index c45f301c20baf8..5555fcdb087e4c 100644 --- a/var/spack/repos/builtin/packages/py-tokenizers/package.py +++ b/var/spack/repos/builtin/packages/py-tokenizers/package.py @@ -13,6 +13,7 @@ class PyTokenizers(PythonPackage): homepage = "https://github.com/huggingface/tokenizers" pypi = "tokenizers/tokenizers-0.6.0.tar.gz" + version("0.13.3", sha256="2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e") version("0.13.1", sha256="3333d1cee5c8f47c96362ea0abc1f81c77c9b92c6c3d11cbf1d01985f0d5cf1d") version("0.10.3", sha256="1a5d3b596c6d3a237e1ad7f46c472d467b0246be7fd1a364f12576eb8db8f7e6") version("0.6.0", sha256="1da11fbfb4f73be695bed0d655576097d09a137a16dceab2f66399716afaffac") From 9f32bd7d508369d6b8f7abad60e455c2c3fca7de Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sun, 8 Oct 2023 17:28:13 +0200 Subject: [PATCH 074/408] py-tables: add 3.9.0 (#40340) * py-tables: add 3.9.0 * Add conflict with apple-clang --- .../builtin/packages/py-tables/package.py | 26 ++++++++++++------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-tables/package.py b/var/spack/repos/builtin/packages/py-tables/package.py index a350e3c09c62b7..84cd48bc5843d3 100644 --- a/var/spack/repos/builtin/packages/py-tables/package.py +++ b/var/spack/repos/builtin/packages/py-tables/package.py @@ -12,7 +12,10 @@ class PyTables(PythonPackage): homepage = "https://www.pytables.org/" pypi = "tables/tables-3.6.1.tar.gz" + git = "https://github.com/PyTables/PyTables.git" + version("master", branch="master") + version("3.9.0", sha256="27c9ca14c359d875caf945a6a527c12690e017650402dd17d8eb8b6caf6687d5") version("3.8.0", sha256="34f3fa2366ce20b18f1df573a77c1d27306ce1f2a41d9f9eff621b5192ea8788") version("3.7.0", sha256="e92a887ad6f2a983e564a69902de4a7645c30069fc01abd353ec5da255c5e1fe") version("3.6.1", sha256="49a972b8a7c27a8a173aeb05f67acb45fe608b64cd8e9fa667c0962a60b71b49") @@ -27,38 +30,43 @@ class PyTables(PythonPackage): variant("lzo", default=False, description="Support for lzo compression") # pyproject.toml + depends_on("py-setuptools@61:", when="@3.9:", type="build") depends_on("py-setuptools@42:", when="@3.7:", type="build") depends_on("py-setuptools", type="build") + depends_on("py-cython@0.29.32:", when="@3.9:", type="build") + depends_on("py-cython@0.29.21:", when="@3.7:3.8", type=("build", "run")) + depends_on("py-cython@0.21:", type="build") # setup.py depends_on("python@3.8:", when="@3.8:", type=("build", "run")) # requirements.txt - depends_on("py-cython@0.29.21:", when="@3.7:", type=("build", "run")) - depends_on("py-cython@0.21:", type="build") depends_on("py-numpy@1.19:", when="@3.8:", type=("build", "run")) depends_on("py-numpy@1.9.3:", type=("build", "run")) depends_on("py-numexpr@2.6.2:", type=("build", "run")) - depends_on("py-blosc2@2.0", type=("build", "run")) depends_on("py-packaging", when="@3.7:", type=("build", "run")) depends_on("py-py-cpuinfo", when="@3.8:", type=("build", "run")) + depends_on("py-blosc2@2.2.8:", when="@3.9:", type=("build", "run")) + depends_on("py-blosc2@2.0", when="@3.8", type=("build", "run")) # tables/req_versions.py - depends_on("hdf5@1.8.4:", when="@3.4.0:") + depends_on("hdf5@1.10.5:", when="@3.8:") + depends_on("hdf5@1.8.4:", when="@3.4:") depends_on("hdf5@1.8.4:1.8", when="@:3.3") - - # Historical dependencies - depends_on("py-six@1.9:", when="@:3.5", type=("build", "run")) - - # tables/req_versions.py # Versions prior to 3.3 must build with the internal blosc due to a lock # problem in a multithreaded environment. depends_on("c-blosc@1.11.1:", when="@3.8:") depends_on("c-blosc@1.4.1:", when="@3.3:") + depends_on("zlib-api", when="+zlib") depends_on("bzip2", when="+bzip2") depends_on("lzo", when="+lzo") + conflicts("%apple-clang@15:", when="@:3.8") + + # Historical dependencies + depends_on("py-six@1.9:", when="@:3.5", type=("build", "run")) + def setup_build_environment(self, env): env.set("HDF5_DIR", self.spec["hdf5"].prefix) if "+bzip2" in self.spec: From 55a7afe2d9b1459bf4e12c6db40db6f00f2d4aef Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Sun, 8 Oct 2023 08:42:21 -0700 Subject: [PATCH 075/408] gettext: Add static and pic options (#37957) --- var/spack/repos/builtin/packages/gettext/package.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py index 4ae7eb59d98884..d7cccfb3391106 100644 --- a/var/spack/repos/builtin/packages/gettext/package.py +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -33,6 +33,8 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): variant("tar", default=True, description="Enable tar support") variant("bzip2", default=True, description="Enable bzip2 support") variant("xz", default=True, description="Enable xz support") + variant("shared", default=True, description="Build shared libraries") + variant("pic", default=True, description="Enable position-independent code (PIC)") # Optional variants variant("libunistring", default=False, description="Use libunistring") @@ -54,6 +56,8 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): depends_on("libunistring", when="+libunistring") # depends_on('cvs') + conflicts("+shared~pic") + patch("test-verify-parallel-make-check.patch", when="@:0.19.8.1") patch("nvhpc-builtin.patch", when="@:0.21.0 %nvhpc") patch("nvhpc-export-symbols.patch", when="%nvhpc") @@ -87,6 +91,8 @@ def configure_args(self): "--without-cvs", ] + config_args.extend(self.enable_or_disable("shared")) + if self.spec["iconv"].name == "libc": config_args.append("--without-libiconv-prefix") elif not is_system_path(self.spec["iconv"].prefix): @@ -115,12 +121,16 @@ def configure_args(self): else: config_args.append("--with-included-libunistring") + config_args.extend(self.with_or_without("pic")) + return config_args @property def libs(self): - return find_libraries( + libs = find_libraries( ["libasprintf", "libgettextlib", "libgettextpo", "libgettextsrc", "libintl"], root=self.prefix, recursive=True, + shared=self.spec.variants["shared"].value, ) + return libs From 8d78734edf097e9d166c297641f216e2551b3880 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sun, 8 Oct 2023 19:47:36 +0200 Subject: [PATCH 076/408] py-setuptools: sdist + rpath patch backport (#40205) --- .../repos/builtin/packages/py-pip/package.py | 3 + .../builtin/packages/py-setuptools/package.py | 255 ++++-------------- .../py-setuptools/rpath-compiler-flag.patch | 13 + 3 files changed, 72 insertions(+), 199 deletions(-) create mode 100644 var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch diff --git a/var/spack/repos/builtin/packages/py-pip/package.py b/var/spack/repos/builtin/packages/py-pip/package.py index 149f042756675c..52d290d0b549c8 100644 --- a/var/spack/repos/builtin/packages/py-pip/package.py +++ b/var/spack/repos/builtin/packages/py-pip/package.py @@ -86,6 +86,9 @@ class PyPip(Package, PythonExtension): extends("python") depends_on("python@3.7:", when="@22:", type=("build", "run")) + # Uses collections.MutableMapping + depends_on("python@:3.9", when="@:19.1", type=("build", "run")) + def url_for_version(self, version): url = "https://files.pythonhosted.org/packages/{0}/p/pip/pip-{1}-{0}-none-any.whl" if version >= Version("21"): diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py index 128ba5018f4376..03487bfaa07aca 100644 --- a/var/spack/repos/builtin/packages/py-setuptools/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools/package.py @@ -6,191 +6,60 @@ from spack.package import * -class PySetuptools(Package, PythonExtension): +class PySetuptools(PythonPackage): """A Python utility that aids in the process of downloading, building, upgrading, installing, and uninstalling Python packages.""" homepage = "https://github.com/pypa/setuptools" - url = "https://files.pythonhosted.org/packages/py3/s/setuptools/setuptools-62.3.2-py3-none-any.whl" - list_url = "https://pypi.org/simple/setuptools/" + pypi = "setuptools/setuptools-62.3.2.tar.gz" tags = ["build-tools"] - version( - "68.0.0", - sha256="11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f", - expand=False, - ) - version( - "67.6.0", - sha256="b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2", - expand=False, - ) - version( - "65.5.0", - sha256="f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356", - expand=False, - ) - version( - "65.0.0", - sha256="fe9a97f68b064a6ddd4bacfb0b4b93a4c65a556d97ce906255540439d0c35cef", - expand=False, - ) - version( - "64.0.0", - sha256="63f463b90ff5e0a1422010100268fd688e15c44ae0798659013c8412963e15e4", - expand=False, - ) - version( - "63.4.3", - sha256="7f61f7e82647f77d4118eeaf43d64cbcd4d87e38af9611694d4866eb070cd10d", - expand=False, - ) - version( - "63.0.0", - sha256="045aec56a3eee5c82373a70e02db8b6da9a10f7faf61ff89a14ab66c738ed370", - expand=False, - ) - version( - "62.6.0", - sha256="c1848f654aea2e3526d17fc3ce6aeaa5e7e24e66e645b5be2171f3f6b4e5a178", - expand=False, - ) - version( - "62.4.0", - sha256="5a844ad6e190dccc67d6d7411d119c5152ce01f7c76be4d8a1eaa314501bba77", - expand=False, - ) - version( - "62.3.2", - sha256="68e45d17c9281ba25dc0104eadd2647172b3472d9e01f911efa57965e8d51a36", - expand=False, - ) - version( - "59.4.0", - sha256="feb5ff19b354cde9efd2344ef6d5e79880ce4be643037641b49508bbb850d060", - expand=False, - ) - version( - "58.2.0", - sha256="2551203ae6955b9876741a26ab3e767bb3242dafe86a32a749ea0d78b6792f11", - expand=False, - ) - version( - "57.4.0", - sha256="a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6", - expand=False, - ) - version( - "57.1.0", - sha256="ddae4c1b9220daf1e32ba9d4e3714df6019c5b583755559be84ff8199f7e1fe3", - expand=False, - ) - version( - "51.0.0", - sha256="8c177936215945c9a37ef809ada0fab365191952f7a123618432bbfac353c529", - expand=False, - ) - version( - "50.3.2", - sha256="2c242a0856fbad7efbe560df4a7add9324f340cf48df43651e9604924466794a", - expand=False, - ) - version( - "50.1.0", - sha256="4537c77e6e7dc170081f8547564551d4ff4e4999717434e1257600bbd3a23296", - expand=False, - ) - version( - "49.6.0", - sha256="4dd5bb0a0a0cff77b46ca5dd3a84857ee48c83e8223886b556613c724994073f", - expand=False, - ) - version( - "49.2.0", - sha256="272c7f48f5cddc5af5901f4265274c421c7eede5c8bc454ac2903d3f8fc365e9", - expand=False, - ) - version( - "46.1.3", - sha256="4fe404eec2738c20ab5841fa2d791902d2a645f32318a7850ef26f8d7215a8ee", - expand=False, - ) - version( - "44.1.1", - sha256="27a714c09253134e60a6fa68130f78c7037e5562c4f21f8f318f2ae900d152d5", - expand=False, - ) - version( - "44.1.0", - sha256="992728077ca19db6598072414fb83e0a284aca1253aaf2e24bb1e55ee6db1a30", - expand=False, - ) - version( - "43.0.0", - sha256="a67faa51519ef28cd8261aff0e221b6e4c370f8fb8bada8aa3e7ad8945199963", - expand=False, - ) - version( - "41.4.0", - sha256="8d01f7ee4191d9fdcd9cc5796f75199deccb25b154eba82d44d6a042cf873670", - expand=False, - ) - version( - "41.3.0", - sha256="e9832acd9be6f3174f4c34b40e7d913a146727920cbef6465c1c1bd2d21a4ec4", - expand=False, - ) - version( - "41.0.1", - sha256="c7769ce668c7a333d84e17fe8b524b1c45e7ee9f7908ad0a73e1eda7e6a5aebf", - expand=False, - ) - version( - "41.0.0", - sha256="e67486071cd5cdeba783bd0b64f5f30784ff855b35071c8670551fd7fc52d4a1", - expand=False, - ) - version( - "40.8.0", - sha256="e8496c0079f3ac30052ffe69b679bd876c5265686127a3159cfa415669b7f9ab", - expand=False, - ) - version( - "40.4.3", - sha256="ce4137d58b444bac11a31d4e0c1805c69d89e8ed4e91fde1999674ecc2f6f9ff", - expand=False, - ) - version( - "40.2.0", - sha256="ea3796a48a207b46ea36a9d26de4d0cc87c953a683a7b314ea65d666930ea8e6", - expand=False, - ) - version( - "39.2.0", - sha256="8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926", - expand=False, - ) - version( - "39.0.1", - sha256="8010754433e3211b9cdbbf784b50f30e80bf40fc6b05eb5f865fab83300599b8", - expand=False, - ) - version( - "25.2.0", - sha256="2845247c359bb91097ccf8f6be8a69edfa44847f3d2d5def39aa43c3d7f615ca", - expand=False, - ) - version( - "20.7.0", - sha256="8917a52aa3a389893221b173a89dae0471022d32bff3ebc31a1072988aa8039d", - expand=False, - ) - version( - "20.6.7", - sha256="9982ee4d279a2541dc1a7efee994ff9c535cfc05315e121e09df7f93da48c442", - expand=False, - ) + version("68.0.0", sha256="baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235") + version("67.6.0", sha256="2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077") + version("65.5.0", sha256="512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17") + version("65.0.0", sha256="d73f8cd714a1a6691f5eb5abeeacbf313242b7aa2f5eba93776542c1aad90c6f") + version("64.0.0", sha256="9b5d2cb8df48f005825654e0cb17217418317e4d996c035f0bca7cbaeb8acf51") + version("63.4.3", sha256="521c833d1e5e1ef0869940e7f486a83de7773b9f029010ad0c2fe35453a9dad9") + version("63.0.0", sha256="7388e17e72f5c0c7279f59da950a7925910e35bc1a84e19d3affbb40da248d1d") + version("62.6.0", sha256="990a4f7861b31532871ab72331e755b5f14efbe52d336ea7f6118144dd478741") + version("62.4.0", sha256="bf8a748ac98b09d32c9a64a995a6b25921c96cc5743c1efa82763ba80ff54e91") + version("62.3.2", sha256="a43bdedf853c670e5fed28e5623403bad2f73cf02f9a2774e91def6bda8265a7") + version("59.4.0", sha256="b4c634615a0cf5b02cf83c7bedffc8da0ca439f00e79452699454da6fbd4153d") + version("58.2.0", sha256="2c55bdb85d5bb460bd2e3b12052b677879cffcf46c0c688f2e5bf51d36001145") + version("57.4.0", sha256="6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465") + version("57.1.0", sha256="cfca9c97e7eebbc8abe18d5e5e962a08dcad55bb63afddd82d681de4d22a597b") + version("51.0.0", sha256="029c49fd713e9230f6a41c0298e6e1f5839f2cde7104c0ad5e053a37777e7688") + version("50.3.2", sha256="ed0519d27a243843b05d82a5e9d01b0b083d9934eaa3d02779a23da18077bd3c") + version("50.1.0", sha256="4a7708dafd2d360ce5e2ac7577374da9fb65fc867bc4cdaf461f9f834dfa6ac3") + version("49.6.0", sha256="46bd862894ed22c2edff033c758c2dc026324788d758e96788e8f7c11f4e9707") + version("49.2.0", sha256="afe9e81fee0270d3f60d52608549cc8ec4c46dada8c95640c1a00160f577acf2") + version("46.1.3", sha256="795e0475ba6cd7fa082b1ee6e90d552209995627a2a227a47c6ea93282f4bfb1") + version("44.1.1", sha256="c67aa55db532a0dadc4d2e20ba9961cbd3ccc84d544e9029699822542b5a476b") + version("44.1.0", sha256="794a96b0c1dc6f182c36b72ab70d7e90f1d59f7a132e6919bb37b4fd4d424aca") + version("43.0.0", sha256="db45ebb4a4b3b95ff0aca3ce5fe1e820ce17be393caf8902c78aa36240e8c378") + version("41.4.0", sha256="7eae782ccf36b790c21bde7d86a4f303a441cd77036b25c559a602cf5186ce4d") + version("41.3.0", sha256="9f5c54b529b2156c6f288e837e625581bb31ff94d4cfd116b8f271c589749556") + version("41.0.1", sha256="a222d126f5471598053c9a77f4b5d4f26eaa1f150ad6e01dcf1a42e185d05613") + version("41.0.0", sha256="79d30254b6fe7a8e672e43cd85f13a9f3f2a50080bc81d851143e2219ef0dcb1") + version("40.8.0", sha256="6e4eec90337e849ade7103723b9a99631c1f0d19990d6e8412dc42f5ae8b304d") + version("40.4.3", sha256="acbc5740dd63f243f46c2b4b8e2c7fd92259c2ddb55a4115b16418a2ed371b15") + version("40.2.0", sha256="47881d54ede4da9c15273bac65f9340f8929d4f0213193fa7894be384f2dcfa6") + version("39.2.0", sha256="f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2") + version("39.0.1", sha256="bec7badf0f60e7fc8153fac47836edc41b74e5d541d7692e614e635720d6a7c7") + version("25.2.0", sha256="b2757ddac2c41173140b111e246d200768f6dd314110e1e40661d0ecf9b4d6a6") + version("20.7.0", sha256="505cdf282c5f6e3a056e79f0244b8945f3632257bba8469386c6b9b396400233") + version("20.6.7", sha256="d20152ee6337323d3b6d95cd733fb719d6b4f3fbc40f61f7a48e5a1bb96478b2") + + def url_for_version(self, version): + url = self.url.rsplit("/", 1)[0] + if version.satisfies(ver("32.1.2:51.0.0")): + url += "/setuptools-{}.zip" + else: + url += "/setuptools-{}.tar.gz" + return url.format(version) + + patch("rpath-compiler-flag.patch", when="@48:58.2") extends("python") @@ -200,27 +69,15 @@ class PySetuptools(Package, PythonExtension): depends_on("python@2.7:2.8,3.5:", when="@44", type=("build", "run")) depends_on("python@2.7:2.8,3.4:", when="@:43", type=("build", "run")) - # https://github.com/pypa/setuptools/issues/3661 - conflicts("python@3.12:", when="@:67") + # Newer pip requires setuptools to be installed, before building + # setuptools. This issue was fixed or worked around in setuptools 54+ + depends_on("py-pip@:18", when="@:53", type="build") - depends_on("py-pip", type="build") + # Uses HTMLParser.unescape + depends_on("python@:3.8", when="@:41.0", type=("build", "run")) - def url_for_version(self, version): - url = "https://files.pythonhosted.org/packages/{0}/s/setuptools/setuptools-{1}-{0}-none-any.whl" - - if version >= Version("45.1.0"): - python_tag = "py3" - else: - python_tag = "py2.py3" - return url.format(python_tag, version) + # Uses collections.MutableMapping + depends_on("python@:3.9", when="@:40.4.2", type=("build", "run")) - def install(self, spec, prefix): - # When setuptools changes its entry point we might get weird - # incompatibilities if building from sources in a non-isolated environment. - # - # https://github.com/pypa/setuptools/issues/980#issuecomment-1154471423 - # - # We work around this issue by installing setuptools from wheels - whl = self.stage.archive_file - args = ["-m", "pip"] + std_pip_args + ["--prefix=" + prefix, whl] - python(*args) + # https://github.com/pypa/setuptools/issues/3661 + depends_on("python@:3.11", when="@:67", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch b/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch new file mode 100644 index 00000000000000..6b37d623234a53 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch @@ -0,0 +1,13 @@ +diff --git a/setuptools/_distutils/unixccompiler.py b/setuptools/_distutils/unixccompiler.py +--- a/setuptools/_distutils/unixccompiler.py ++++ b/setuptools/_distutils/unixccompiler.py +@@ -257,7 +257,7 @@ class UnixCCompiler(CCompiler): + # No idea how --enable-new-dtags would be passed on to + # ld if this system was using GNU ld. Don't know if a + # system like this even exists. +- return "-R" + dir ++ return "-Wl,-rpath," + dir + + def library_option(self, lib): + return "-l" + lib + From 5eafb6a5fe5baf46bff3a944ecb0c72cd6b9336b Mon Sep 17 00:00:00 2001 From: "Mark (he/his) C. Miller" Date: Sun, 8 Oct 2023 18:04:25 -0700 Subject: [PATCH 077/408] Update CITATION.cff with conf dates (#40375) Add `start-date` and `end-date` to citation --- CITATION.cff | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CITATION.cff b/CITATION.cff index 4753f6cc37f4fa..16f42d01651f19 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -62,6 +62,8 @@ preferred-citation: city: "Austin" region: "Texas" country: "US" + date-start: 2015-11-15 + date-end: 2015-11-20 month: 11 year: 2015 identifiers: From a7117648c8873f53a52f11ede22ace055139bf21 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 9 Oct 2023 07:18:27 +0200 Subject: [PATCH 078/408] parser: use non-capturing groups (#40373) --- lib/spack/spack/parser.py | 46 +++++++++++++++++++-------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py index d8f34d7e595d6e..5e46ddb1b17f7b 100644 --- a/lib/spack/spack/parser.py +++ b/lib/spack/spack/parser.py @@ -73,10 +73,10 @@ #: Valid name for specs and variants. Here we are not using #: the previous "w[\w.-]*" since that would match most #: characters that can be part of a word in any language -IDENTIFIER = r"([a-zA-Z_0-9][a-zA-Z_0-9\-]*)" -DOTTED_IDENTIFIER = rf"({IDENTIFIER}(\.{IDENTIFIER})+)" -GIT_HASH = r"([A-Fa-f0-9]{40})" -GIT_VERSION = rf"((git\.({DOTTED_IDENTIFIER}|{IDENTIFIER}))|({GIT_HASH}))" +IDENTIFIER = r"(?:[a-zA-Z_0-9][a-zA-Z_0-9\-]*)" +DOTTED_IDENTIFIER = rf"(?:{IDENTIFIER}(?:\.{IDENTIFIER})+)" +GIT_HASH = r"(?:[A-Fa-f0-9]{40})" +GIT_VERSION = rf"(?:(?:git\.(?:{DOTTED_IDENTIFIER}|{IDENTIFIER}))|(?:{GIT_HASH}))" NAME = r"[a-zA-Z_0-9][a-zA-Z_0-9\-.]*" @@ -85,15 +85,15 @@ #: A filename starts either with a "." or a "/" or a "{name}/, # or on Windows, a drive letter followed by a colon and "\" # or "." or {name}\ -WINDOWS_FILENAME = r"(\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)([a-zA-Z0-9-_\.\\]*)(\.json|\.yaml)" -UNIX_FILENAME = r"(\.|\/|[a-zA-Z0-9-_]*\/)([a-zA-Z0-9-_\.\/]*)(\.json|\.yaml)" +WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)" +UNIX_FILENAME = r"(?:\.|\/|[a-zA-Z0-9-_]*\/)(?:[a-zA-Z0-9-_\.\/]*)(?:\.json|\.yaml)" if not IS_WINDOWS: FILENAME = UNIX_FILENAME else: FILENAME = WINDOWS_FILENAME -VALUE = r"([a-zA-Z_0-9\-+\*.,:=\~\/\\]+)" -QUOTED_VALUE = r"[\"']+([a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+" +VALUE = r"(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\]+)" +QUOTED_VALUE = r"[\"']+(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+" VERSION = r"=?([a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)" VERSION_RANGE = rf"({VERSION}\s*:\s*{VERSION}(?!\s*=)|:\s*{VERSION}(?!\s*=)|{VERSION}\s*:|:)" @@ -125,34 +125,34 @@ class TokenType(TokenBase): """ # Dependency - DEPENDENCY = r"(\^)" + DEPENDENCY = r"(?:\^)" # Version - VERSION_HASH_PAIR = rf"(@({GIT_VERSION})=({VERSION}))" - VERSION = rf"(@\s*({VERSION_LIST}))" + VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION})=(?:{VERSION}))" + VERSION = rf"(?:@\s*(?:{VERSION_LIST}))" # Variants - PROPAGATED_BOOL_VARIANT = rf"((\+\+|~~|--)\s*{NAME})" - BOOL_VARIANT = rf"([~+-]\s*{NAME})" - PROPAGATED_KEY_VALUE_PAIR = rf"({NAME}\s*==\s*({VALUE}|{QUOTED_VALUE}))" - KEY_VALUE_PAIR = rf"({NAME}\s*=\s*({VALUE}|{QUOTED_VALUE}))" + PROPAGATED_BOOL_VARIANT = rf"(?:(?:\+\+|~~|--)\s*{NAME})" + BOOL_VARIANT = rf"(?:[~+-]\s*{NAME})" + PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}\s*==\s*(?:{VALUE}|{QUOTED_VALUE}))" + KEY_VALUE_PAIR = rf"(?:{NAME}\s*=\s*(?:{VALUE}|{QUOTED_VALUE}))" # Compilers - COMPILER_AND_VERSION = rf"(%\s*({NAME})([\s]*)@\s*({VERSION_LIST}))" - COMPILER = rf"(%\s*({NAME}))" + COMPILER_AND_VERSION = rf"(?:%\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))" + COMPILER = rf"(?:%\s*(?:{NAME}))" # FILENAME - FILENAME = rf"({FILENAME})" + FILENAME = rf"(?:{FILENAME})" # Package name - FULLY_QUALIFIED_PACKAGE_NAME = rf"({DOTTED_IDENTIFIER})" - UNQUALIFIED_PACKAGE_NAME = rf"({IDENTIFIER})" + FULLY_QUALIFIED_PACKAGE_NAME = rf"(?:{DOTTED_IDENTIFIER})" + UNQUALIFIED_PACKAGE_NAME = rf"(?:{IDENTIFIER})" # DAG hash - DAG_HASH = rf"(/({HASH}))" + DAG_HASH = rf"(?:/(?:{HASH}))" # White spaces - WS = r"(\s+)" + WS = r"(?:\s+)" class ErrorTokenType(TokenBase): """Enum with regexes for error analysis""" # Unexpected character - UNEXPECTED = r"(.[\s]*)" + UNEXPECTED = r"(?:.[\s]*)" class Token: From 2e5edbb9ec55f7e85f525a804bb93815b0d46c09 Mon Sep 17 00:00:00 2001 From: Joe Schoonover <11430768+fluidnumerics-joe@users.noreply.github.com> Date: Mon, 9 Oct 2023 18:31:23 +0800 Subject: [PATCH 079/408] feq-parse: add v2.0.3 (#40230) --- var/spack/repos/builtin/packages/feq-parse/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/feq-parse/package.py b/var/spack/repos/builtin/packages/feq-parse/package.py index 6f51d5537cc457..687ae6f66fd297 100644 --- a/var/spack/repos/builtin/packages/feq-parse/package.py +++ b/var/spack/repos/builtin/packages/feq-parse/package.py @@ -16,8 +16,11 @@ class FeqParse(CMakePackage): maintainers("fluidnumerics-joe") + version("2.0.3", sha256="a1c42507801adc55a63a9a904807058079d54e002e10f2b29a916b06fc815f80") version("2.0.1", sha256="08dd08bd100a0a2eb672a5b2792ad56a337df575c634aac0d7a300d7e484b21c") version("1.1.0", sha256="d33a4fd6904939bb70780e8f25f37c1291c4f24fd207feb4ffc0f8d89637d1e3") version("1.0.2", sha256="1cd1db7562908ea16fc65dc5268b654405d0b3d9dcfe11f409949c431b48a3e8") depends_on("cmake@3.0.2:", type="build") + + parallel = False From 2e759110783e998b1530582b9069553ba9eecdbb Mon Sep 17 00:00:00 2001 From: jmuddnv <143751186+jmuddnv@users.noreply.github.com> Date: Mon, 9 Oct 2023 03:35:59 -0700 Subject: [PATCH 080/408] Adding NVIDIA HPC SDK 23.9 (#40371) --- var/spack/repos/builtin/packages/nvhpc/package.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/var/spack/repos/builtin/packages/nvhpc/package.py b/var/spack/repos/builtin/packages/nvhpc/package.py index 7579afd87346f8..2cabbfa056f90d 100644 --- a/var/spack/repos/builtin/packages/nvhpc/package.py +++ b/var/spack/repos/builtin/packages/nvhpc/package.py @@ -21,6 +21,20 @@ # - package key must be in the form '{os}-{arch}' where 'os' is in the # format returned by platform.system() and 'arch' by platform.machine() _versions = { + "23.9": { + "Linux-aarch64": ( + "dd32ae4233438adb71b2b4f8891f04802fdf90f67036ecf18bfde1b6043a03c3", + "https://developer.download.nvidia.com/hpc-sdk/23.9/nvhpc_2023_239_Linux_aarch64_cuda_multi.tar.gz", + ), + "Linux-ppc64le": ( + "984d61695499db098fd32be8345c1f7d7c637ea3bdb29cef17aad656f16b000f", + "https://developer.download.nvidia.com/hpc-sdk/23.9/nvhpc_2023_239_Linux_ppc64le_cuda_multi.tar.gz", + ), + "Linux-x86_64": ( + "ecf343ecad2398e21c8d7f24a580b2932348017dfd8ea38c1ef31b37114b2d4b", + "https://developer.download.nvidia.com/hpc-sdk/23.9/nvhpc_2023_239_Linux_x86_64_cuda_multi.tar.gz", + ), + }, "23.7": { "Linux-aarch64": ( "d3b9b674045e6e17156b298941be4e1e1e7dea6a3c1938f14ad653b180860ff2", From 1c373ed4c96a3962ab7a23c09022ff6af608b4e4 Mon Sep 17 00:00:00 2001 From: Mike Renfro Date: Mon, 9 Oct 2023 05:47:54 -0500 Subject: [PATCH 081/408] velvet: improved variants (#40225) --- .../repos/builtin/packages/velvet/package.py | 49 ++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/velvet/package.py b/var/spack/repos/builtin/packages/velvet/package.py index 89b88feaffe931..55c76fee335792 100644 --- a/var/spack/repos/builtin/packages/velvet/package.py +++ b/var/spack/repos/builtin/packages/velvet/package.py @@ -6,6 +6,13 @@ from spack.package import * +def is_positive_int(x): + if x.isdigit() and int(x) > 0: + return True + else: + return False + + class Velvet(MakefilePackage): """Velvet is a de novo genomic assembler specially designed for short read sequencing technologies.""" @@ -13,14 +20,54 @@ class Velvet(MakefilePackage): homepage = "https://www.ebi.ac.uk/~zerbino/velvet/" url = "https://www.ebi.ac.uk/~zerbino/velvet/velvet_1.2.10.tgz" + maintainers("snehring") + version("1.2.10", sha256="884dd488c2d12f1f89cdc530a266af5d3106965f21ab9149e8cb5c633c977640") + variant( + "categories", + default="2", + description="Number of channels which can be handled independently", + values=is_positive_int, + ) + variant( + "maxkmerlength", + default="31", + description="Longest kmer size you can use in an analysis", + values=is_positive_int, + ) + variant("bigassembly", default=False, description="Allow assemblies with more than 2^31 reads") + variant( + "vbigassembly", + default=False, + description="Allow unsigned 64-bit array index values (also enables bigassembly)", + ) + variant( + "longsequences", default=False, description="Allow assembling contigs longer than 32kb" + ) + variant("openmp", default=False, description="Enable multithreading") + variant("single_cov_cat", default=False, description="Per-library coverage") + depends_on("zlib-api") def edit(self, spec, prefix): + makefile = FileFilter("Makefile") if spec.target.family == "aarch64": - makefile = FileFilter("Makefile") makefile.filter("-m64", "") + maxkmerlength = self.spec.variants["maxkmerlength"].value + categories = self.spec.variants["categories"].value + makefile.filter(r"^MAXKMERLENGTH\s*=\s*.*", f"MAXKMERLENGTH = {maxkmerlength}") + makefile.filter(r"^CATEGORIES\s*=\s*.*", f"CATEGORIES = {categories}") + if "+bigassembly" in self.spec: + makefile.filter("^ifdef BIGASSEMBLY", "BIGASSEMBLY=1\nifdef BIGASSEMBLY") + if "+vbigassembly" in self.spec: + makefile.filter("^ifdef VBIGASSEMBLY", "VBIGASSEMBLY=1\nifdef VBIGASSEMBLY") + if "+longsequences" in self.spec: + makefile.filter("^ifdef LONGSEQUENCES", "LONGSEQUENCES=1\nifdef LONGSEQUENCES") + if "+openmp" in self.spec: + makefile.filter("^ifdef OPENMP", "OPENMP=1\nifdef OPENMP") + if "+single_cov_cat" in self.spec: + makefile.filter("^ifdef SINGLE_COV_CAT", "SINGLE_COV_CAT=1\nifdef SINGLE_COV_CAT") def install(self, spec, prefix): mkdirp(prefix.bin) From 79e560b94b6ce27997e5cd3d962008ada5463dda Mon Sep 17 00:00:00 2001 From: Jordan Galby <67924449+Jordan474@users.noreply.github.com> Date: Mon, 9 Oct 2023 14:17:58 +0200 Subject: [PATCH 082/408] elfutils: fix +debuginfod again with new libarchive versions (#40314) --- var/spack/repos/builtin/packages/elfutils/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py index c7417da21d5f06..15f0b11130e7bf 100644 --- a/var/spack/repos/builtin/packages/elfutils/package.py +++ b/var/spack/repos/builtin/packages/elfutils/package.py @@ -85,10 +85,10 @@ class Elfutils(AutotoolsPackage, SourcewarePackage): provides("elf@1") - # libarchive with iconv doesn't configure. + # libarchive with iconv doesn't configure (still broken as of libarchive@3.7.1) # see https://github.com/spack/spack/issues/36710 # and https://github.com/libarchive/libarchive/issues/1819 - conflicts("^libarchive@3.6.2 +iconv", when="+debuginfod") + conflicts("^libarchive +iconv", when="+debuginfod") # https://sourceware.org/bugzilla/show_bug.cgi?id=24964 conflicts("%apple-clang") From a8c62f31a78f5d3be216bfcc2230056b17c82210 Mon Sep 17 00:00:00 2001 From: Thomas Dickerson Date: Mon, 9 Oct 2023 08:21:13 -0400 Subject: [PATCH 083/408] racket packages: fix typo after multiple build systems support (#40088) --- lib/spack/spack/build_systems/racket.py | 2 +- .../packages/rkt-racket-lib/package.py | 20 +++++++++++++++++++ .../packages/rkt-scheme-lib/package.py | 1 + 3 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin/packages/rkt-racket-lib/package.py diff --git a/lib/spack/spack/build_systems/racket.py b/lib/spack/spack/build_systems/racket.py index 7dd0b23b017f71..50c4944a5d7ccb 100644 --- a/lib/spack/spack/build_systems/racket.py +++ b/lib/spack/spack/build_systems/racket.py @@ -64,7 +64,7 @@ class RacketBuilder(spack.builder.Builder): @property def subdirectory(self): - if self.racket_name: + if self.pkg.racket_name: return "pkgs/{0}".format(self.pkg.racket_name) return None diff --git a/var/spack/repos/builtin/packages/rkt-racket-lib/package.py b/var/spack/repos/builtin/packages/rkt-racket-lib/package.py new file mode 100644 index 00000000000000..a64e9527638f64 --- /dev/null +++ b/var/spack/repos/builtin/packages/rkt-racket-lib/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class RktRacketLib(RacketPackage): + """Stub package for packages which are currently part of core + racket installation (but which may change in the future).""" + + git = "ssh://git@github.com/racket/racket.git" + + maintainers = ["elfprince13"] + + version("8.3", commit="cab83438422bfea0e4bd74bc3e8305e6517cf25f") # tag="v8.3" + depends_on("racket@8.3", type=("build", "run"), when="@8.3") + + racket_name = "racket-lib" diff --git a/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py b/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py index e366a23b7951f2..d29ae350b7779a 100644 --- a/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py +++ b/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py @@ -17,3 +17,4 @@ class RktSchemeLib(RacketPackage): depends_on("rkt-base@8.3", type=("build", "run"), when="@8.3") racket_name = "scheme-lib" + subdirectory = None From a597647d14e91b9d8e854f0932e040b15a90254c Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 9 Oct 2023 07:33:45 -0500 Subject: [PATCH 084/408] py-jsonargparse: add v4.25.0 (#40185) --- .../packages/py-jsonargparse/package.py | 4 ++++ .../builtin/packages/py-tensorflow/package.py | 3 +++ .../builtin/packages/py-wheel/package.py | 21 +++++++++++++------ 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-jsonargparse/package.py b/var/spack/repos/builtin/packages/py-jsonargparse/package.py index 2f4584e48b2352..a84a23c7fc6926 100644 --- a/var/spack/repos/builtin/packages/py-jsonargparse/package.py +++ b/var/spack/repos/builtin/packages/py-jsonargparse/package.py @@ -15,13 +15,17 @@ class PyJsonargparse(PythonPackage): homepage = "https://github.com/omni-us/jsonargparse" pypi = "jsonargparse/jsonargparse-4.19.0.tar.gz" + version("4.25.0", sha256="4eaadae69c387a3d83a76b1eaf20ca98d5274d8637f180dca0754ce5405adb6b") version("4.19.0", sha256="63aa3c7bbdb219d0f254a5ae86f3d54384ebc1ffa905e776cc19283bc843787b") variant("signatures", default=False, description="Enable signature features") + depends_on("py-setuptools@65.6.3:", when="@4.25:", type="build") depends_on("py-setuptools", type="build") + depends_on("py-wheel@0.38.4:", when="@4.25:", type="build") depends_on("py-pyyaml@3.13:", type=("build", "run")) with when("+signatures"): + depends_on("py-typing-extensions@3.10:", when="@4.25: ^python@:3.9", type=("build", "run")) depends_on("py-docstring-parser@0.15:", type=("build", "run")) depends_on("py-typeshed-client@2.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-tensorflow/package.py b/var/spack/repos/builtin/packages/py-tensorflow/package.py index 86f5759cff5a7c..cd90b532c93336 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow/package.py @@ -437,6 +437,9 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): conflicts("~rocm", when="@2.7.4-rocm-enhanced") conflicts("+rocm", when="@:2.7.4-a,2.7.4.0:") + # wheel 0.40 upgrades vendored packaging, trips over tensorflow-io-gcs-filesystem identifier + conflicts("^py-wheel@0.40:", when="@2.11:2.13") + # https://www.tensorflow.org/install/source#tested_build_configurations conflicts("%gcc@:9.3.0", when="@2.9:") conflicts("%gcc@:7.3.0", when="@1.15:") diff --git a/var/spack/repos/builtin/packages/py-wheel/package.py b/var/spack/repos/builtin/packages/py-wheel/package.py index 770397b09feb9b..66192db3298d57 100644 --- a/var/spack/repos/builtin/packages/py-wheel/package.py +++ b/var/spack/repos/builtin/packages/py-wheel/package.py @@ -10,11 +10,14 @@ class PyWheel(Package, PythonExtension): """A built-package format for Python.""" homepage = "https://github.com/pypa/wheel" - url = ( - "https://files.pythonhosted.org/packages/py2.py3/w/wheel/wheel-0.34.2-py2.py3-none-any.whl" - ) + url = "https://files.pythonhosted.org/packages/py3/w/wheel/wheel-0.41.2-py3-none-any.whl" list_url = "https://pypi.org/simple/wheel/" + version( + "0.41.2", + sha256="75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8", + expand=False, + ) version( "0.37.1", sha256="4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a", @@ -73,11 +76,17 @@ class PyWheel(Package, PythonExtension): extends("python") depends_on("python +ctypes", type=("build", "run")) - depends_on("python@2.7:2.8,3.5:", when="@0.34:", type=("build", "run")) - depends_on("python@2.7:2.8,3.4:", when="@0.30:", type=("build", "run")) - depends_on("python@2.6:2.8,3.2:", type=("build", "run")) + depends_on("python@3.7:", when="@0.38:", type=("build", "run")) depends_on("py-pip", type="build") + def url_for_version(self, version): + url = "https://files.pythonhosted.org/packages/{0}/w/wheel/wheel-{1}-{0}-none-any.whl" + if version >= Version("0.38"): + python = "py3" + else: + python = "py2.py3" + return url.format(python, version) + def install(self, spec, prefix): # To build wheel from source, you need setuptools and wheel already installed. # We get around this by using a pre-built wheel, see: From cba3edd6609a3f4e87d88ea486853c56b0da9201 Mon Sep 17 00:00:00 2001 From: Jacob King <12665462+jacobrking@users.noreply.github.com> Date: Mon, 9 Oct 2023 06:39:40 -0600 Subject: [PATCH 085/408] nimrod-aai: add v23.9. (#40303) --- .../repos/builtin/packages/nimrod-aai/package.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/nimrod-aai/package.py b/var/spack/repos/builtin/packages/nimrod-aai/package.py index 90bdd0832af9d5..1ad7101e1a918c 100644 --- a/var/spack/repos/builtin/packages/nimrod-aai/package.py +++ b/var/spack/repos/builtin/packages/nimrod-aai/package.py @@ -14,13 +14,14 @@ class NimrodAai(CMakePackage): homepage = "https://gitlab.com/NIMRODteam/nimrod-abstract" url = ( - "https://gitlab.com/NIMRODteam/nimrod-abstract/-/archive/23.6/nimrod-abstract-23.6.tar.gz" + "https://gitlab.com/NIMRODteam/nimrod-abstract/-/archive/23.9/nimrod-abstract-23.9.tar.gz" ) git = "https://gitlab.com/NIMRODteam/nimrod-abstract.git" maintainers("jacobrking") version("main", branch="main") + version("23.9", sha256="212d591c5a5e7a394b56a5cf2f92cc69feafc49dd5f042fa95eeb6441649390b") version("23.6", sha256="1794b89a5a64ff2b3c548818b90d17eef85d819ba4f63a76c41a682d5b76c14f") variant("debug", default=False, description="Whether to enable debug code") @@ -41,8 +42,9 @@ class NimrodAai(CMakePackage): ) depends_on("cmake", type="build") - depends_on("hdf5+fortran", type="build") depends_on("mpi", when="+mpi") + depends_on("hdf5+fortran~mpi", type="build", when="~mpi") + depends_on("hdf5+fortran+mpi", type="build", when="+mpi") def cmake_args(self): args = [ @@ -62,3 +64,9 @@ def cmake_args(self): ] args.append(addl_args) return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def check(self): + with working_dir(self.builder.build_directory): + ctest("--output-on-failure") From b398456d483ebab7ed3eb6639662a7d3b9d2ae3a Mon Sep 17 00:00:00 2001 From: Gavin John Date: Mon, 9 Oct 2023 07:46:38 -0500 Subject: [PATCH 086/408] py-s3cmd: Add new versions (#40212) * Add new versions of py-s3cmd * Use correct hashes --- var/spack/repos/builtin/packages/py-s3cmd/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-s3cmd/package.py b/var/spack/repos/builtin/packages/py-s3cmd/package.py index b4400927ee766d..532df1438219b5 100644 --- a/var/spack/repos/builtin/packages/py-s3cmd/package.py +++ b/var/spack/repos/builtin/packages/py-s3cmd/package.py @@ -18,6 +18,9 @@ class PyS3cmd(PythonPackage): homepage = "https://github.com/s3tools/s3cmd" url = "https://github.com/s3tools/s3cmd/releases/download/v2.0.2/s3cmd-2.0.2.tar.gz" + version("2.3.0", sha256="15330776e7ff993d8ae0ac213bf896f210719e9b91445f5f7626a8fa7e74e30b") + version("2.2.0", sha256="2a7d2afe09ce5aa9f2ce925b68c6e0c1903dd8d4e4a591cd7047da8e983a99c3") + version("2.1.0", sha256="966b0a494a916fc3b4324de38f089c86c70ee90e8e1cae6d59102103a4c0cc03") version("2.0.2", sha256="9f244c0c10d58d0ccacbba3aa977463e32491bdd9d95109e27b67e4d46c5bd52") version("2.0.1", sha256="caf09f1473301c442fba6431c983c361c9af8bde503dac0953f0d2f8f2c53c8f") version("2.0.0", sha256="bf2a50802f1031cba83e99be488965803899d8ab0228c800c833b55c7269cd48") From 7994211ae6d9ee71766fe4d5855f2e1f50815248 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 15:02:14 +0200 Subject: [PATCH 087/408] build(deps): bump python-levenshtein in /lib/spack/docs (#40220) Bumps [python-levenshtein](https://github.com/maxbachmann/python-Levenshtein) from 0.21.1 to 0.22.0. - [Release notes](https://github.com/maxbachmann/python-Levenshtein/releases) - [Changelog](https://github.com/maxbachmann/python-Levenshtein/blob/main/HISTORY.md) - [Commits](https://github.com/maxbachmann/python-Levenshtein/compare/v0.21.1...v0.22.0) --- updated-dependencies: - dependency-name: python-levenshtein dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 65b6386a8264f5..4744d1339c062d 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -2,7 +2,7 @@ sphinx==7.2.6 sphinxcontrib-programoutput==0.17 sphinx_design==0.5.0 sphinx-rtd-theme==1.3.0 -python-levenshtein==0.21.1 +python-levenshtein==0.22.0 docutils==0.18.1 pygments==2.16.1 urllib3==2.0.6 From ba03914a313d6bdb34d9cdcc0d9dc287688e7214 Mon Sep 17 00:00:00 2001 From: Patrick Broderick <50112491+broderickpt@users.noreply.github.com> Date: Mon, 9 Oct 2023 09:06:59 -0400 Subject: [PATCH 088/408] fftx: add v1.1.3 (#40283) --- var/spack/repos/builtin/packages/fftx/package.py | 3 ++- var/spack/repos/builtin/packages/spiral-package-jit/package.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/fftx/package.py b/var/spack/repos/builtin/packages/fftx/package.py index 301821a0eca3dc..b9229216ff1510 100644 --- a/var/spack/repos/builtin/packages/fftx/package.py +++ b/var/spack/repos/builtin/packages/fftx/package.py @@ -14,13 +14,14 @@ class Fftx(CMakePackage, CudaPackage, ROCmPackage): operations composed of linear operations combined with DFT transforms.""" homepage = "https://spiralgen.com" - url = "https://github.com/spiral-software/fftx/archive/refs/tags/1.1.2.tar.gz" + url = "https://github.com/spiral-software/fftx/archive/refs/tags/1.1.3.tar.gz" git = "https://github.com/spiral-software/fftx.git" maintainers("spiralgen") version("develop", branch="develop") version("main", branch="main") + version("1.1.3", sha256="17ed0baf9c2dcf30c789fdae530e006ae3ff2d2c9006989b1e6348e4ae50cef9") version("1.1.2", sha256="b2c4a7791305481af9e1bd358c1215efa4506c91c943cddca3780a1ccbc27810") version("1.1.1", sha256="5cbca66ef09eca02ee8f336f58eb45cfac69cfb29cd6eb945852ad74085d8a60") version("1.1.0", sha256="a6f95605abc11460bbf51839727a456a31488e27e12a970fc29a1b8c42f4e3b5") diff --git a/var/spack/repos/builtin/packages/spiral-package-jit/package.py b/var/spack/repos/builtin/packages/spiral-package-jit/package.py index cec1f02f33bdb2..c0d37abfbedc61 100644 --- a/var/spack/repos/builtin/packages/spiral-package-jit/package.py +++ b/var/spack/repos/builtin/packages/spiral-package-jit/package.py @@ -11,7 +11,7 @@ class SpiralPackageJit(Package): Compilation (RTC).""" homepage = "https://spiralgen.com" - url = "https://github.com/spiral-software/spiral-package-jit/archive/refs/tags/1.0.2.tar.gz" + url = "https://github.com/spiral-software/spiral-package-jit/archive/refs/tags/1.0.3.tar.gz" git = "https://github.com/spiral-software/spiral-package-jit.git" maintainers("spiralgen") @@ -21,6 +21,7 @@ class SpiralPackageJit(Package): version("develop", branch="develop") version("main", branch="main") + version("1.0.3", sha256="97ff0d7d46ed4e53b1971ca279a30b27f0d9b328c70585d4cc0c56dfe6701894") version("1.0.2", sha256="d7fac0493ac406a8b1874491223c3a9a1c6727ea1aa39de7ef4694c59aac9d26") version("1.0.1", sha256="acf22db04e705276f06642d7f2ebf161f6c347f93bb1bdd6e3ddcfc4b7be5707") From 0684b98d57169b153dac5204da0584a664547dd4 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 9 Oct 2023 08:09:59 -0500 Subject: [PATCH 089/408] py-pydevtool: add new package (#40377) --- .../repos/builtin/packages/py-doit/package.py | 20 +++++++++++++++++++ .../builtin/packages/py-pydevtool/package.py | 19 ++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-doit/package.py create mode 100644 var/spack/repos/builtin/packages/py-pydevtool/package.py diff --git a/var/spack/repos/builtin/packages/py-doit/package.py b/var/spack/repos/builtin/packages/py-doit/package.py new file mode 100644 index 00000000000000..aad45a5c2fa6a8 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-doit/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyDoit(PythonPackage): + """doit - Automation Tool.""" + + homepage = "http://pydoit.org/" + pypi = "doit/doit-0.36.0.tar.gz" + + version("0.36.0", sha256="71d07ccc9514cb22fe59d98999577665eaab57e16f644d04336ae0b4bae234bc") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-cloudpickle", type=("build", "run")) + depends_on("py-importlib-metadata@4.4:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pydevtool/package.py b/var/spack/repos/builtin/packages/py-pydevtool/package.py new file mode 100644 index 00000000000000..ec9b69445fd66e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pydevtool/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPydevtool(PythonPackage): + """CLI dev tools powered by pydoit.""" + + homepage = "https://github.com/pydoit/pydevtool" + pypi = "pydevtool/pydevtool-0.3.0.tar.gz" + + version("0.3.0", sha256="25e3ba4f3d33ccac33ee2b9775995848d49e9b318b7a146477fb5d52f786fc8a") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-doit@0.36:", type=("build", "run")) From a2e33049158c713ab89f293404094eef9f051de1 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 9 Oct 2023 15:41:36 +0200 Subject: [PATCH 090/408] docs: update Spack prerequisites (#40381) --- lib/spack/docs/tables/system_prerequisites.csv | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/spack/docs/tables/system_prerequisites.csv b/lib/spack/docs/tables/system_prerequisites.csv index 0bb82638eb9d70..f275bcef5be291 100644 --- a/lib/spack/docs/tables/system_prerequisites.csv +++ b/lib/spack/docs/tables/system_prerequisites.csv @@ -1,9 +1,8 @@ Name, Supported Versions, Notes, Requirement Reason -Python, 3.6--3.11, , Interpreter for Spack +Python, 3.6--3.12, , Interpreter for Spack C/C++ Compilers, , , Building software make, , , Build software patch, , , Build software -bash, , , Compiler wrappers tar, , , Extract/create archives gzip, , , Compress/Decompress archives unzip, , , Compress/Decompress archives From dc4ba547c6ddd5e2c365951bce7e60c55bfccc68 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 9 Oct 2023 17:22:27 +0200 Subject: [PATCH 091/408] unparse: drop python 3.3 remnants (#40331) --- lib/spack/spack/util/unparse/unparser.py | 28 +++++++----------------- 1 file changed, 8 insertions(+), 20 deletions(-) diff --git a/lib/spack/spack/util/unparse/unparser.py b/lib/spack/spack/util/unparse/unparser.py index 375fb543a31494..932ab8d0205b03 100644 --- a/lib/spack/spack/util/unparse/unparser.py +++ b/lib/spack/spack/util/unparse/unparser.py @@ -938,16 +938,10 @@ def visit_arguments(self, node): self.write(", ") self.write("*") if node.vararg: - if hasattr(node.vararg, "arg"): - self.write(node.vararg.arg) - if node.vararg.annotation: - self.write(": ") - self.dispatch(node.vararg.annotation) - else: - self.write(node.vararg) - if getattr(node, "varargannotation", None): - self.write(": ") - self.dispatch(node.varargannotation) + self.write(node.vararg.arg) + if node.vararg.annotation: + self.write(": ") + self.dispatch(node.vararg.annotation) # keyword-only arguments if getattr(node, "kwonlyargs", False): @@ -967,16 +961,10 @@ def visit_arguments(self, node): first = False else: self.write(", ") - if hasattr(node.kwarg, "arg"): - self.write("**" + node.kwarg.arg) - if node.kwarg.annotation: - self.write(": ") - self.dispatch(node.kwarg.annotation) - else: - self.write("**" + node.kwarg) - if getattr(node, "kwargannotation", None): - self.write(": ") - self.dispatch(node.kwargannotation) + self.write("**" + node.kwarg.arg) + if node.kwarg.annotation: + self.write(": ") + self.dispatch(node.kwarg.annotation) def visit_keyword(self, node): if node.arg is None: From 652bdcc2c8f773ba5c13c8f68e3c3dc6265e8e46 Mon Sep 17 00:00:00 2001 From: Vanessasaurus <814322+vsoch@users.noreply.github.com> Date: Mon, 9 Oct 2023 10:29:18 -0600 Subject: [PATCH 092/408] Add: flux-pmix 0.4.0 (#40323) * Automated deployment to update package flux-pmix 2023-10-05 * Pin exactly to flux-core 0.49.0 when between 0.3 and 0.4 * Update var/spack/repos/builtin/packages/flux-pmix/package.py Co-authored-by: Mark Grondona * Update var/spack/repos/builtin/packages/flux-pmix/package.py Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --------- Co-authored-by: github-actions Co-authored-by: Mark Grondona Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --- var/spack/repos/builtin/packages/flux-pmix/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/flux-pmix/package.py b/var/spack/repos/builtin/packages/flux-pmix/package.py index 5969da47b2f50f..db6d580a61c5c3 100644 --- a/var/spack/repos/builtin/packages/flux-pmix/package.py +++ b/var/spack/repos/builtin/packages/flux-pmix/package.py @@ -18,10 +18,11 @@ class FluxPmix(AutotoolsPackage): maintainers("grondo") version("main", branch="main") + version("0.4.0", sha256="f7f58891fc9d9a97a0399b3ab186f2cae30a75806ba0b4d4c1307f07b3f6d1bc") version("0.3.0", sha256="88edb2afaeb6058b56ff915105a36972acc0d83204cff7f4a4d2f65a5dee9d34") version("0.2.0", sha256="d09f1fe6ffe54f83be4677e1e727640521d8110090515d94013eba0f58216934") - depends_on("flux-core@0.49.0:", when="@0.3.0:") + depends_on("flux-core@0.49:", when="@0.3:") depends_on("flux-core@0.30.0:") depends_on("pmix@v4.1.0:") depends_on("openmpi") From 6f6e0ac6adc605857ff15b398550c8b188310117 Mon Sep 17 00:00:00 2001 From: George Young Date: Mon, 9 Oct 2023 17:54:44 +0100 Subject: [PATCH 093/408] paintor: new package @3.0 (#40359) * paintor: new package @3.0 * Update var/spack/repos/builtin/packages/paintor/package.py --------- Co-authored-by: LMS Bioinformatics Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --- .../repos/builtin/packages/paintor/package.py | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 var/spack/repos/builtin/packages/paintor/package.py diff --git a/var/spack/repos/builtin/packages/paintor/package.py b/var/spack/repos/builtin/packages/paintor/package.py new file mode 100644 index 00000000000000..421a92cb1160c6 --- /dev/null +++ b/var/spack/repos/builtin/packages/paintor/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Paintor(MakefilePackage): + """Probabilistic Annotation integrator. Fast, integrative fine mapping with functional + data""" + + homepage = "https://github.com/gkichaev/PAINTOR_V3.0" + url = "https://github.com/gkichaev/PAINTOR_V3.0/archive/refs/tags/3.0.tar.gz" + + version("3.0", sha256="cc39d3c334cc6d787e4f04847192c9d0185025a2ca46910bd38901b6679d198f") + + depends_on("nlopt") + depends_on("eigen") + + def edit(self, spec, prefix): + makefile = FileFilter("Makefile") + makefile.filter("CC = g\\+\\+", f"CC = {spack_cxx}") + makefile.filter( + r"(.*)-I/\$\(curr\)/eigen/Eigen(.*)", + r"\1-I{}/eigen3/Eigen\2".format(spec["eigen"].prefix.include), + ) + makefile.filter(r"(.*)-L/\$\{curr}/lib(.*)", r"\1-L{}\2".format(spec["nlopt"].prefix.lib)) + makefile.filter( + r"(.*)-I/\${curr}/include(.*)", r"\1-I{}\2".format(spec["nlopt"].prefix.include) + ) + + @run_after("install") + def mv_binary(self): + mkdirp(self.prefix.bin) + with working_dir(self.build_directory): + install("PAINTOR", self.prefix.bin) From f64a695a59eb66ef335ecd5793b46cb91e4ff70c Mon Sep 17 00:00:00 2001 From: kenche-linaro <133872317+kenche-linaro@users.noreply.github.com> Date: Mon, 9 Oct 2023 20:14:14 +0100 Subject: [PATCH 094/408] linaro-forge: added package file for rebranded product (#39587) --- .../builtin/packages/linaro-forge/package.py | 134 ++++++++++++++++++ 1 file changed, 134 insertions(+) create mode 100644 var/spack/repos/builtin/packages/linaro-forge/package.py diff --git a/var/spack/repos/builtin/packages/linaro-forge/package.py b/var/spack/repos/builtin/packages/linaro-forge/package.py new file mode 100644 index 00000000000000..7724f87a29ce6d --- /dev/null +++ b/var/spack/repos/builtin/packages/linaro-forge/package.py @@ -0,0 +1,134 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os +import platform +import subprocess + +from spack.package import * + + +class LinaroForge(Package): + """Build reliable and optimized code for the right results on multiple + Server and HPC architectures, from the latest compilers and C++ standards + to Intel, 64-bit Arm, AMD, OpenPOWER and Nvidia GPU hardware. Linaro Forge + combines Linaro DDT, the leading debugger for time-saving high performance + application debugging, Linaro MAP, the trusted performance profiler for + invaluable optimization advice across native and Python HPC codes, and + Linaro Performance Reports for advanced reporting capabilities.""" + + homepage = "https://www.linaroforge.com" + maintainers("kenche-linaro") + + if platform.machine() in ["aarch64", "arm64"]: + version( + "23.0.3", sha256="a7e23ef2a187f8e2d6a6692cafb931c9bb614abf58e45ea9c2287191c4c44f02" + ) + version( + "23.0.2", sha256="698fda8f7cc05a06909e5dcc50b9956f94135d7b12e84ffb21999a5b45c70c74" + ) + version( + "23.0.1", sha256="552e4a3f408ed4eb5f1bfbb83c94530ee8733579c56c3e98050c0ad2d43eb433" + ) + version("23.0", sha256="7ae20bb27d539751d1776d1e09a65dcce821fc6a75f924675439f791261783fb") + version( + "22.1.4", sha256="4e2af481a37b4c99dba0de6fac75ac945316955fc4170d06e321530adea7ac9f" + ) + version( + "21.1.3", sha256="4a4ff7372aad5a31fc9e18b7b6c493691ab37d8d44a3158584e62d1ab82b0eeb" + ) + elif platform.machine() == "ppc64le": + version( + "23.0.3", sha256="5ff9770f4bc4a2df4bac8a2544a9d6bad9fba2556420fa2e659e5c21e741caf7" + ) + version( + "23.0.2", sha256="181b157bdfc8609b49addf63023f920ebb609dbc9a126e9dc26605188b756ff0" + ) + version( + "23.0.1", sha256="08cffef2195ea96872d56e827f320eed40aaa82fd3b62d4c661a598fb2fb3a47" + ) + version("23.0", sha256="0962c7e0da0f450cf6daffe1156e1f59e02c9f643df458ec8458527afcde5b4d") + version( + "22.1.3", sha256="6479c3a4ae6ce6648c37594eb0266161f06d9f89010fca9299855848661dda49" + ) + version( + "22.0.4", sha256="f4cb5bcbaa67f9209299fe4653186a2829760b8b16a2883913aa43766375b04c" + ) + version( + "21.1.3", sha256="eecbc5686d60994c5468b2d7cd37bebe5d9ac0ba37bd1f98fbfc69b071db541e" + ) + elif platform.machine() == "x86_64": + version( + "23.0.3", sha256="f2a010b94838f174f057cd89d12d03a89ca946163536eab178dd1ec877cdc27f" + ) + version( + "23.0.2", sha256="565f0c073c6c8cbb06c062ca414e3f6ff8c6ca6797b03d247b030a9fbc55a5b1" + ) + version( + "23.0.1", sha256="1d681891c0c725363f0f45584c9b79e669d5c9782158453b7d24b4b865d72755" + ) + version("23.0", sha256="f4ab12289c992dd07cb1a15dd985ef4713d1f9c0cf362ec5e9c995cca9b1cf81") + version( + "22.1.3", sha256="4f8a8b1df6ad712e89c82eedf4bd85b93b57b3c8d5b37d13480ff058fa8f4467" + ) + version( + "22.0.4", sha256="a2c8c1da38b9684d7c4656a98b3fc42777b03fd474cd0bf969324804f47587e5" + ) + version( + "21.1.3", sha256="03dc82f1d075deb6f08d1e3e6592dc9b630d406c08a1316d89c436b5874f3407" + ) + + variant( + "probe", + default=False, + description='Detect available PMU counters via "forge-probe" during install', + ) + + variant("accept-eula", default=False, description="Accept the EULA") + + # forge-probe executes with "/usr/bin/env python" + depends_on("python@2.7:", type="build", when="+probe") + + # Licensing + license_required = True + license_comment = "#" + license_files = ["licences/Licence"] + license_vars = [ + "ALLINEA_LICENSE_DIR", + "ALLINEA_LICENCE_DIR", + "ALLINEA_LICENSE_FILE", + "ALLINEA_LICENCE_FILE", + ] + license_url = "https://docs.linaroforge.com/latest/html/licenceserver/index.html" + + def url_for_version(self, version): + pre = "arm" if version < Version("23.0") else "linaro" + return f"https://downloads.linaroforge.com/{version}/{pre}-forge-{version}-linux-{platform.machine()}.tar" + + @run_before("install") + def abort_without_eula_acceptance(self): + install_example = "spack install linaro-forge +accept-eula" + license_terms_path = os.path.join(self.stage.source_path, "license_terms") + if not self.spec.variants["accept-eula"].value: + raise InstallError( + "\n\n\nNOTE:\nUse +accept-eula " + + "during installation " + + "to accept the license terms in:\n" + + " {0}\n".format(os.path.join(license_terms_path, "license_agreement.txt")) + + " {0}\n\n".format(os.path.join(license_terms_path, "supplementary_terms.txt")) + + "Example: '{0}'\n".format(install_example) + ) + + def install(self, spec, prefix): + subprocess.call(["./textinstall.sh", "--accept-license", prefix]) + if spec.satisfies("+probe"): + probe = join_path(prefix, "bin", "forge-probe") + subprocess.call([probe, "--install", "global"]) + + def setup_run_environment(self, env): + # Only PATH is needed for Forge. + # Adding lib to LD_LIBRARY_PATH can cause conflicts with Forge's internal libs. + env.clear() + env.prepend_path("PATH", join_path(self.prefix, "bin")) From ce93a7030cc988649bf082fbb370deaaff22e9af Mon Sep 17 00:00:00 2001 From: Dennis Klein Date: Tue, 10 Oct 2023 01:30:46 +0200 Subject: [PATCH 095/408] libzmq: add v4.3.5 (#40383) --- var/spack/repos/builtin/packages/libzmq/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libzmq/package.py b/var/spack/repos/builtin/packages/libzmq/package.py index 207b8672344314..086b0a6e4afcdc 100644 --- a/var/spack/repos/builtin/packages/libzmq/package.py +++ b/var/spack/repos/builtin/packages/libzmq/package.py @@ -12,12 +12,13 @@ class Libzmq(AutotoolsPackage): """The ZMQ networking/concurrency library and core API""" homepage = "https://zguide.zeromq.org/" - url = "https://github.com/zeromq/libzmq/releases/download/v4.3.2/zeromq-4.3.2.tar.gz" + url = "https://github.com/zeromq/libzmq/releases/download/v4.3.5/zeromq-4.3.5.tar.gz" git = "https://github.com/zeromq/libzmq.git" maintainers("dennisklein") version("master", branch="master") + version("4.3.5", sha256="6653ef5910f17954861fe72332e68b03ca6e4d9c7160eb3a8de5a5a913bfab43") version("4.3.4", sha256="c593001a89f5a85dd2ddf564805deb860e02471171b3f204944857336295c3e5") version("4.3.3", sha256="9d9285db37ae942ed0780c016da87060497877af45094ff9e1a1ca736e3875a2") version("4.3.2", sha256="ebd7b5c830d6428956b67a0454a7f8cbed1de74b3b01e5c33c5378e22740f763") From e08796046fd07f1752b875e437aa3e2f528802d7 Mon Sep 17 00:00:00 2001 From: Auriane R <48684432+aurianer@users.noreply.github.com> Date: Tue, 10 Oct 2023 04:23:54 +0200 Subject: [PATCH 096/408] Add pika 0.19.1 (#40385) --- var/spack/repos/builtin/packages/pika/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index f581a6cd0c8602..50ff40f4112d90 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -17,6 +17,7 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pika-org/pika.git" maintainers("msimberg", "albestro", "teonnik", "aurianer") + version("0.19.1", sha256="674675abf0dd4c6f5a0b2fa3db944b277ed65c62f654029d938a8cab608a9c1d") version("0.19.0", sha256="f45cc16e4e50cbb183ed743bdc8b775d49776ee33c13ea39a650f4230a5744cb") version("0.18.0", sha256="f34890e0594eeca6ac57f2b988d0807b502782817e53a7f7043c3f921b08c99f") version("0.17.0", sha256="717429fc1bc986d62cbec190a69939e91608122d09d54bda1b028871c9ca9ad4") From 1d9182f5a65c69e544f584d005399eaf148d9cff Mon Sep 17 00:00:00 2001 From: Tom Epperly Date: Mon, 9 Oct 2023 19:28:45 -0700 Subject: [PATCH 097/408] Add a new release sha256 hash (#37680) --- var/spack/repos/builtin/packages/caliper/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/caliper/package.py b/var/spack/repos/builtin/packages/caliper/package.py index 5cdbadacf9ee8d..66bd9cc84cd0ad 100644 --- a/var/spack/repos/builtin/packages/caliper/package.py +++ b/var/spack/repos/builtin/packages/caliper/package.py @@ -27,6 +27,7 @@ class Caliper(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") version("2.10.0", sha256="14c4fb5edd5e67808d581523b4f8f05ace8549698c0e90d84b53171a77f58565") + version("2.9.1", sha256="4771d630de505eff9227e0ec498d0da33ae6f9c34df23cb201b56181b8759e9e") version("2.9.0", sha256="507ea74be64a2dfd111b292c24c4f55f459257528ba51a5242313fa50978371f") version("2.8.0", sha256="17807b364b5ac4b05997ead41bd173e773f9a26ff573ff2fe61e0e70eab496e4") version( @@ -117,7 +118,10 @@ class Caliper(CMakePackage, CudaPackage, ROCmPackage): conflicts("+rocm+cuda") patch("for_aarch64.patch", when="target=aarch64:") - patch("sampler-service-missing-libunwind-include-dir.patch", when="@2.9.0 +libunwind +sampler") + patch( + "sampler-service-missing-libunwind-include-dir.patch", + when="@2.9.0:2.9.1 +libunwind +sampler", + ) def cmake_args(self): spec = self.spec From 8344afcfa513e2e4ef1eafa0e9668667a8d1a561 Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Mon, 9 Oct 2023 22:33:40 -0400 Subject: [PATCH 098/408] openmm: new version 8.0.0 (#40396) --- var/spack/repos/builtin/packages/openmm/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/openmm/package.py b/var/spack/repos/builtin/packages/openmm/package.py index 3b8329fdea1683..6bdac8640351d2 100644 --- a/var/spack/repos/builtin/packages/openmm/package.py +++ b/var/spack/repos/builtin/packages/openmm/package.py @@ -17,6 +17,7 @@ class Openmm(CMakePackage, CudaPackage): homepage = "https://openmm.org/" url = "https://github.com/openmm/openmm/archive/7.4.1.tar.gz" + version("8.0.0", sha256="dc63d7b47c8bb7b169c409cfd63d909ed0ce1ae114d37c627bf7a4231acf488e") version("7.7.0", sha256="51970779b8dc639ea192e9c61c67f70189aa294575acb915e14be1670a586c25") version("7.6.0", sha256="5a99c491ded9ba83ecc3fb1d8d22fca550f45da92e14f64f25378fda0048a89d") version("7.5.1", sha256="c88d6946468a2bde2619acb834f57b859b5e114a93093cf562165612e10f4ff7") @@ -32,7 +33,7 @@ class Openmm(CMakePackage, CudaPackage): depends_on("doxygen@:1.9.1", type="build", when="@:7.6.0") depends_on("doxygen", type="build", when="@7.7:") depends_on("swig", type="build") - depends_on("fftw") + depends_on("fftw", when="@:7") depends_on("py-cython", type="build") depends_on("py-numpy", type=("build", "run")) depends_on("cuda", when="+cuda", type=("build", "link", "run")) From 7e946b38660fb393d3a31832f58b205364f55ea9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Tue, 10 Oct 2023 04:43:56 +0200 Subject: [PATCH 099/408] rkcommon: checksum 0.11.0 (#40391) --- var/spack/repos/builtin/packages/rkcommon/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/rkcommon/package.py b/var/spack/repos/builtin/packages/rkcommon/package.py index 50409f10a4e0de..4f0a07559ef9c6 100644 --- a/var/spack/repos/builtin/packages/rkcommon/package.py +++ b/var/spack/repos/builtin/packages/rkcommon/package.py @@ -16,6 +16,7 @@ class Rkcommon(CMakePackage): # maintainers("github_user1",o"github_user2") + version("1.11.0", sha256="9cfeedaccdefbdcf23c465cb1e6c02057100c4a1a573672dc6cfea5348cedfdd") version("1.10.0", sha256="57a33ce499a7fc5a5aaffa39ec7597115cf69ed4ff773546b5b71ff475ee4730") version("1.9.0", sha256="b68aa02ef44c9e35c168f826a14802bb5cc6a9d769ba4b64b2c54f347a14aa53") version("1.8.0", sha256="f037c15f7049610ef8bca37500b2ab00775af60ebbb9d491ba5fc2e5c04a7794") From 3ef04044948b2a2333b15592b82127b58d8e769c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Tue, 10 Oct 2023 04:44:10 +0200 Subject: [PATCH 100/408] embree: checksum 4.3.0 (#40395) --- var/spack/repos/builtin/packages/embree/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/embree/package.py b/var/spack/repos/builtin/packages/embree/package.py index 3da828eb6b0f56..6a6e91115eb4f3 100644 --- a/var/spack/repos/builtin/packages/embree/package.py +++ b/var/spack/repos/builtin/packages/embree/package.py @@ -13,6 +13,7 @@ class Embree(CMakePackage): url = "https://github.com/embree/embree/archive/v3.7.0.tar.gz" maintainers("aumuell") + version("4.3.0", sha256="baf0a57a45837fc055ba828a139467bce0bc0c6a9a5f2dccb05163d012c12308") version("4.2.0", sha256="b0479ce688045d17aa63ce6223c84b1cdb5edbf00d7eda71c06b7e64e21f53a0") version("4.1.0", sha256="117efd87d6dddbf7b164edd94b0bc057da69d6422a25366283cded57ed94738b") version("4.0.1", sha256="1fa3982fa3531f1b6e81f19e6028ae8a62b466597f150b853440fe35ef7c6c06") From 0a2c2afaecead3a97ab20f58ef9aa0da074d2275 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Bederi=C3=A1n?= <4043375+zzzoom@users.noreply.github.com> Date: Tue, 10 Oct 2023 04:53:04 -0300 Subject: [PATCH 101/408] python: add 3.11.6 (#40384) --- var/spack/repos/builtin/packages/python/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 04251b6c2339f6..b6c78adbe319c4 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -42,10 +42,11 @@ class Python(Package): version("3.12.0", sha256="51412956d24a1ef7c97f1cb5f70e185c13e3de1f50d131c0aac6338080687afb") version( - "3.11.5", - sha256="a12a0a013a30b846c786c010f2c19dd36b7298d888f7c4bd1581d90ce18b5e58", + "3.11.6", + sha256="c049bf317e877cbf9fce8c3af902436774ecef5249a29d10984ca3a37f7f4736", preferred=True, ) + version("3.11.5", sha256="a12a0a013a30b846c786c010f2c19dd36b7298d888f7c4bd1581d90ce18b5e58") version("3.11.4", sha256="85c37a265e5c9dd9f75b35f954e31fbfc10383162417285e30ad25cc073a0d63") version("3.11.3", sha256="1a79f3df32265d9e6625f1a0b31c28eb1594df911403d11f3320ee1da1b3e048") version("3.11.2", sha256="2411c74bda5bbcfcddaf4531f66d1adc73f247f529aee981b029513aefdbf849") From 2e322cd0b5d7b6e1bbd5220fd3a7145ac1938bfe Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 10 Oct 2023 11:48:06 +0200 Subject: [PATCH 102/408] git: optimize build by not setting CFLAGS (#40387) --- var/spack/repos/builtin/packages/git/package.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index b0984f52c1a641..b63335eabce4bc 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -253,8 +253,6 @@ def setup_build_environment(self, env): extlib_bits.append(spec["gettext"].libs.search_flags) extlib_bits.append("-lintl") env.append_flags("EXTLIBS", " ".join(extlib_bits)) - if not is_system_path(spec["gettext"].prefix): - env.append_flags("CFLAGS", spec["gettext"].headers.include_flags) if not self.spec["curl"].satisfies("libs=shared"): curlconfig = which(os.path.join(self.spec["curl"].prefix.bin, "curl-config")) From 416a71c40c6b9ebb6ea0c34256e98a01040b3408 Mon Sep 17 00:00:00 2001 From: Miroslav Stoyanov <30537612+mkstoyanov@users.noreply.github.com> Date: Tue, 10 Oct 2023 13:42:26 -0400 Subject: [PATCH 103/408] find rocm fix (#40388) * find rocm fix * format fix * style fix * formatting is broken --- var/spack/repos/builtin/packages/tasmanian/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/tasmanian/package.py b/var/spack/repos/builtin/packages/tasmanian/package.py index ff974c79d314b0..2a3db2ade34ba3 100644 --- a/var/spack/repos/builtin/packages/tasmanian/package.py +++ b/var/spack/repos/builtin/packages/tasmanian/package.py @@ -189,12 +189,14 @@ def test_make_test(self): options = [cmake_dir] if "+rocm" in self.spec: + options.append(f"-Dhip_DIR={self.spec['hip'].prefix.lib.cmake.hip}") options.append( f"-DAMDDeviceLibs_DIR={self.spec['llvm-amdgpu'].prefix.lib.cmake.AMDDeviceLibs}" ) options.append(f"-Damd_comgr_DIR={self.spec['comgr'].prefix.lib.cmake.amd_comgr}") options.append( - f"-Dhsa-runtime64_DIR={self.spec['hsa-rocr-dev'].prefix.lib.cmake.hsa-runtime64}" + "-Dhsa-runtime64_DIR=" + + join_path(self.spec["hsa-rocr-dev"].prefix.lib.cmake, "hsa-runtime64") ) options.append(f"-DHSA_HEADER={self.spec['hsa-rocr-dev'].prefix.include}") options.append(f"-DCMAKE_INCLUDE_PATH={self.spec['hsa-rocr-dev'].prefix.include.hsa}") From 02865c5e48f8fec716d35d2a9305b497d41054da Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 10 Oct 2023 13:45:32 -0500 Subject: [PATCH 104/408] Update PyTorch ecosystem (#40321) * Update PyTorch ecosystem * py-pybind11: better documentation of supported compilers * py-torchdata: add v0.7.0 * Black fixes * py-torchtext: fix Python reqs * py-horovod: py-torch 2.1.0 not yet supported --- .../builtin/packages/py-horovod/package.py | 2 + .../builtin/packages/py-pybind11/package.py | 13 +- .../builtin/packages/py-torch/package.py | 157 +++++++----------- .../builtin/packages/py-torchaudio/package.py | 114 ++++--------- .../builtin/packages/py-torchdata/package.py | 4 +- .../builtin/packages/py-torchtext/package.py | 83 ++++----- .../packages/py-torchvision/package.py | 15 +- 7 files changed, 139 insertions(+), 249 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-horovod/package.py b/var/spack/repos/builtin/packages/py-horovod/package.py index 6406ef85800e45..0e0bc5fd7f6068 100644 --- a/var/spack/repos/builtin/packages/py-horovod/package.py +++ b/var/spack/repos/builtin/packages/py-horovod/package.py @@ -225,6 +225,8 @@ class PyHorovod(PythonPackage, CudaPackage): conflicts( "controllers=gloo", when="@:0.20.0 platform=darwin", msg="Gloo cannot be compiled on MacOS" ) + # FIXME + conflicts("^py-torch@2.1:") # https://github.com/horovod/horovod/pull/1835 patch("fma.patch", when="@0.19.0:0.19.1") diff --git a/var/spack/repos/builtin/packages/py-pybind11/package.py b/var/spack/repos/builtin/packages/py-pybind11/package.py index 1dc422793c0346..63625651bcd964 100644 --- a/var/spack/repos/builtin/packages/py-pybind11/package.py +++ b/var/spack/repos/builtin/packages/py-pybind11/package.py @@ -27,6 +27,8 @@ class PyPybind11(CMakePackage, PythonExtension): maintainers("ax3l") version("master", branch="master") + version("2.11.1", sha256="d475978da0cdc2d43b73f30910786759d593a9d8ee05b1b6846d1eb16c6d2e0c") + version("2.11.0", sha256="7af30a84c6810e721829c4646e31927af9d8861e085aa5dd37c3c8b8169fcda1") version("2.10.4", sha256="832e2f309c57da9c1e6d4542dedd34b24e4192ecb4d62f6f4866a737454c9970") version("2.10.1", sha256="111014b516b625083bef701df7880f78c2243835abdb263065b6b59b960b6bad") version("2.10.0", sha256="eacf582fa8f696227988d08cfc46121770823839fe9e301a20fbce67e7cd70ec") @@ -52,9 +54,6 @@ class PyPybind11(CMakePackage, PythonExtension): depends_on("py-setuptools@42:", type="build") depends_on("py-pytest", type="test") - depends_on("python@2.7:2.8,3.5:", type=("build", "run")) - depends_on("python@3.6:", when="@2.10.0:", type=("build", "run")) - depends_on("py-pip", type="build") depends_on("py-wheel", type="build") extends("python") @@ -64,10 +63,12 @@ class PyPybind11(CMakePackage, PythonExtension): depends_on("cmake@3.13:", type="build") depends_on("cmake@3.18:", type="build", when="@2.6.0:") - # compiler support - conflicts("%gcc@:4.7") + # https://github.com/pybind/pybind11/#supported-compilers conflicts("%clang@:3.2") - conflicts("%intel@:16") + conflicts("%apple-clang@:4") + conflicts("%gcc@:4.7") + conflicts("%msvc@:16") + conflicts("%intel@:17") # https://github.com/pybind/pybind11/pull/1995 @when("@:2.4") diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index cc863fcbafc61b..30666314eea1f8 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -11,11 +11,11 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): - """Tensors and Dynamic neural networks in Python - with strong GPU acceleration.""" + """Tensors and Dynamic neural networks in Python with strong GPU acceleration.""" homepage = "https://pytorch.org/" git = "https://github.com/pytorch/pytorch.git" + submodules = True maintainers("adamjstewart") @@ -23,89 +23,36 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # core libraries to ensure that the package was successfully installed. import_modules = ["torch", "torch.autograd", "torch.nn", "torch.utils"] - version("main", branch="main", submodules=True) - version("master", branch="main", submodules=True, deprecated=True) - version( - "2.0.1", tag="v2.0.1", commit="e9ebda29d87ce0916ab08c06ab26fd3766a870e5", submodules=True - ) - version( - "2.0.0", tag="v2.0.0", commit="c263bd43e8e8502d4726643bc6fd046f0130ac0e", submodules=True - ) - version( - "1.13.1", tag="v1.13.1", commit="49444c3e546bf240bed24a101e747422d1f8a0ee", submodules=True - ) - version( - "1.13.0", tag="v1.13.0", commit="7c98e70d44abc7a1aead68b6ea6c8adc8c554db5", submodules=True - ) - version( - "1.12.1", tag="v1.12.1", commit="664058fa83f1d8eede5d66418abff6e20bd76ca8", submodules=True - ) - version( - "1.12.0", tag="v1.12.0", commit="67ece03c8cd632cce9523cd96efde6f2d1cc8121", submodules=True - ) - version( - "1.11.0", tag="v1.11.0", commit="bc2c6edaf163b1a1330e37a6e34caf8c553e4755", submodules=True - ) - version( - "1.10.2", tag="v1.10.2", commit="71f889c7d265b9636b93ede9d651c0a9c4bee191", submodules=True - ) - version( - "1.10.1", tag="v1.10.1", commit="302ee7bfb604ebef384602c56e3853efed262030", submodules=True - ) - version( - "1.10.0", tag="v1.10.0", commit="36449ea93134574c2a22b87baad3de0bf8d64d42", submodules=True - ) - version( - "1.9.1", tag="v1.9.1", commit="dfbd030854359207cb3040b864614affeace11ce", submodules=True - ) - version( - "1.9.0", tag="v1.9.0", commit="d69c22dd61a2f006dcfe1e3ea8468a3ecaf931aa", submodules=True - ) - version( - "1.8.2", tag="v1.8.2", commit="e0495a7aa104471d95dc85a1b8f6473fbcc427a8", submodules=True - ) - version( - "1.8.1", tag="v1.8.1", commit="56b43f4fec1f76953f15a627694d4bba34588969", submodules=True - ) - version( - "1.8.0", tag="v1.8.0", commit="37c1f4a7fef115d719104e871d0cf39434aa9d56", submodules=True - ) - version( - "1.7.1", tag="v1.7.1", commit="57bffc3a8e4fee0cce31e1ff1f662ccf7b16db57", submodules=True - ) - version( - "1.7.0", tag="v1.7.0", commit="e85d494707b835c12165976b8442af54b9afcb26", submodules=True - ) - version( - "1.6.0", tag="v1.6.0", commit="b31f58de6fa8bbda5353b3c77d9be4914399724d", submodules=True - ) - version( - "1.5.1", tag="v1.5.1", commit="3c31d73c875d9a4a6ea8a843b9a0d1b19fbe36f3", submodules=True - ) - version( - "1.5.0", tag="v1.5.0", commit="4ff3872a2099993bf7e8c588f7182f3df777205b", submodules=True - ) - version( - "1.4.1", tag="v1.4.1", commit="74044638f755cd8667bedc73da4dbda4aa64c948", submodules=True - ) - version( - "1.3.1", tag="v1.3.1", commit="ee77ccbb6da4e2efd83673e798acf7081bc03564", submodules=True - ) - version( - "1.3.0", tag="v1.3.0", commit="de394b672d0346f2f387a8bb1a1280d5d2eaf9cb", submodules=True - ) - version( - "1.2.0", tag="v1.2.0", commit="8554416a199c4cec01c60c7015d8301d2bb39b64", submodules=True - ) - version( - "1.1.0", tag="v1.1.0", commit="142c973f4179e768164cd578951489e89021b29c", submodules=True - ) - version( - "1.0.1", tag="v1.0.1", commit="83221655a8237ca80f9673dad06a98d34c43e546", submodules=True - ) - version( - "1.0.0", tag="v1.0.0", commit="db5d3131d16f57abd4f13d3f4b885d5f67bf6644", submodules=True - ) + version("main", branch="main") + version("master", branch="main", deprecated=True) + version("2.1.0", tag="v2.1.0", commit="7bcf7da3a268b435777fe87c7794c382f444e86d") + version("2.0.1", tag="v2.0.1", commit="e9ebda29d87ce0916ab08c06ab26fd3766a870e5") + version("2.0.0", tag="v2.0.0", commit="c263bd43e8e8502d4726643bc6fd046f0130ac0e") + version("1.13.1", tag="v1.13.1", commit="49444c3e546bf240bed24a101e747422d1f8a0ee") + version("1.13.0", tag="v1.13.0", commit="7c98e70d44abc7a1aead68b6ea6c8adc8c554db5") + version("1.12.1", tag="v1.12.1", commit="664058fa83f1d8eede5d66418abff6e20bd76ca8") + version("1.12.0", tag="v1.12.0", commit="67ece03c8cd632cce9523cd96efde6f2d1cc8121") + version("1.11.0", tag="v1.11.0", commit="bc2c6edaf163b1a1330e37a6e34caf8c553e4755") + version("1.10.2", tag="v1.10.2", commit="71f889c7d265b9636b93ede9d651c0a9c4bee191") + version("1.10.1", tag="v1.10.1", commit="302ee7bfb604ebef384602c56e3853efed262030") + version("1.10.0", tag="v1.10.0", commit="36449ea93134574c2a22b87baad3de0bf8d64d42") + version("1.9.1", tag="v1.9.1", commit="dfbd030854359207cb3040b864614affeace11ce") + version("1.9.0", tag="v1.9.0", commit="d69c22dd61a2f006dcfe1e3ea8468a3ecaf931aa") + version("1.8.2", tag="v1.8.2", commit="e0495a7aa104471d95dc85a1b8f6473fbcc427a8") + version("1.8.1", tag="v1.8.1", commit="56b43f4fec1f76953f15a627694d4bba34588969") + version("1.8.0", tag="v1.8.0", commit="37c1f4a7fef115d719104e871d0cf39434aa9d56") + version("1.7.1", tag="v1.7.1", commit="57bffc3a8e4fee0cce31e1ff1f662ccf7b16db57") + version("1.7.0", tag="v1.7.0", commit="e85d494707b835c12165976b8442af54b9afcb26") + version("1.6.0", tag="v1.6.0", commit="b31f58de6fa8bbda5353b3c77d9be4914399724d") + version("1.5.1", tag="v1.5.1", commit="3c31d73c875d9a4a6ea8a843b9a0d1b19fbe36f3") + version("1.5.0", tag="v1.5.0", commit="4ff3872a2099993bf7e8c588f7182f3df777205b") + version("1.4.1", tag="v1.4.1", commit="74044638f755cd8667bedc73da4dbda4aa64c948") + version("1.3.1", tag="v1.3.1", commit="ee77ccbb6da4e2efd83673e798acf7081bc03564") + version("1.3.0", tag="v1.3.0", commit="de394b672d0346f2f387a8bb1a1280d5d2eaf9cb") + version("1.2.0", tag="v1.2.0", commit="8554416a199c4cec01c60c7015d8301d2bb39b64") + version("1.1.0", tag="v1.1.0", commit="142c973f4179e768164cd578951489e89021b29c") + version("1.0.1", tag="v1.0.1", commit="83221655a8237ca80f9673dad06a98d34c43e546") + version("1.0.0", tag="v1.0.0", commit="db5d3131d16f57abd4f13d3f4b885d5f67bf6644") is_darwin = sys.platform == "darwin" @@ -191,12 +138,10 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # See python_min_version in setup.py # Upper bounds come from wheel availability on PyPI depends_on("python@3.8:3.11", when="@2:", type=("build", "link", "run")) - depends_on("python@3.7:3.10", when="@1.11:1", type=("build", "link", "run")) - depends_on("python@3.6.2:3.9", when="@1.7.1:1.10", type=("build", "link", "run")) - depends_on("python@3.6.1:3.8", when="@1.6:1.7.0", type=("build", "link", "run")) - depends_on("python@3.5:3.8", when="@1.5", type=("build", "link", "run")) - depends_on("python@2.7:2,3.5:3.8", when="@1.4", type=("build", "link", "run")) - depends_on("python@2.7:2,3.5:3.7", when="@:1.3", type=("build", "link", "run")) + depends_on("python@:3.10", when="@1.11:1", type=("build", "link", "run")) + depends_on("python@:3.9", when="@1.7.1:1.10", type=("build", "link", "run")) + depends_on("python@:3.8", when="@1.4:1.7.0", type=("build", "link", "run")) + depends_on("python@:3.7", when="@:1.3", type=("build", "link", "run")) # CMakelists.txt depends_on("cmake@3.18:", when="@2:", type="build") @@ -221,6 +166,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("py-sympy", when="@2:", type=("build", "run")) depends_on("py-networkx", when="@2:", type=("build", "run")) depends_on("py-jinja2", when="@2:", type=("build", "run")) + depends_on("py-fsspec", when="@2.1:", type=("build", "run")) # Undocumented dependencies depends_on("py-tqdm", type="run") @@ -228,7 +174,8 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("lapack") # third_party - depends_on("py-pybind11@2.10.1", when="@2:", type=("build", "link", "run")) + depends_on("py-pybind11@2.11.0", when="@2.1:", type=("build", "link", "run")) + depends_on("py-pybind11@2.10.1", when="@2.0", type=("build", "link", "run")) depends_on("py-pybind11@2.10.0", when="@1.13:1", type=("build", "link", "run")) depends_on("py-pybind11@2.6.2", when="@1.8:1.12", type=("build", "link", "run")) depends_on("py-pybind11@2.3.0", when="@1.1:1.7", type=("build", "link", "run")) @@ -243,7 +190,8 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("protobuf@:3", type=("build", "run")) depends_on("eigen") # https://github.com/pytorch/pytorch/issues/60329 - # depends_on("cpuinfo@2022-08-19", when="@1.13:") + # depends_on("cpuinfo@2023-01-13", when="@2.1:") + # depends_on("cpuinfo@2022-08-19", when="@1.13:2.0") # depends_on("cpuinfo@2020-12-17", when="@1.8:1.12") # depends_on("cpuinfo@2020-06-11", when="@1.6:1.7") # https://github.com/shibatch/sleef/issues/427 @@ -300,14 +248,16 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # depends_on("xnnpack@2020-03-23", when="@1.6:1.7+xnnpack") depends_on("mpi", when="+mpi") # https://github.com/pytorch/pytorch/issues/60270 - # depends_on("gloo@2023-01-17", when="@2:+gloo") + # depends_on("gloo@2023-05-19", when="@2.1:+gloo") + # depends_on("gloo@2023-01-17", when="@2.0+gloo") # depends_on("gloo@2022-05-18", when="@1.13:1+gloo") # depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo") # depends_on("gloo@2021-05-04", when="@1.9+gloo") # depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo") # depends_on("gloo@2020-03-17", when="@1.6+gloo") # https://github.com/pytorch/pytorch/issues/60331 - # depends_on("onnx@1.13.1", when="@2:+onnx_ml") + # depends_on("onnx@1.14.1", when="@2.1:+onnx_ml") + # depends_on("onnx@1.13.1", when="@2.0+onnx_ml") # depends_on("onnx@1.12.0", when="@1.13:1+onnx_ml") # depends_on("onnx@1.11.0", when="@1.12+onnx_ml") # depends_on("onnx@1.10.1_2021-10-08", when="@1.11+onnx_ml") @@ -420,7 +370,22 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): sha256="a54db63640b90e5833cc1099c0935572f5297d2d8625f62f01ac1fda79ed4569", when="@1.13 arch=ppc64le:", ) - conflicts("arch=ppc64le:", when="@:1.9,2:") + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.9.0/pytorch-2.0/recipe/0309-fallback-to-cpu_kernel-with-VSX.patch", + sha256="27f41c8d6cb61e69e761be62f03dc1ce023cbca34926e3ba559996821a7ce726", + when="@2.0 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.9.0/pytorch-2.0/recipe/0310-PR100149.patch", + sha256="1adbd38a9cc1611f1caaa325614695f4349d9ffd236332e0d8f0de5a3880f4dd", + when="@2.0 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.10.0/pytorch-2.0/recipe/0311-PR104956.patch", + sha256="be27c906924a21be198a3ea6c459739a1daa8b8b89045af339dafa4cd6f90d6c", + when="@2.0 arch=ppc64le:", + ) + conflicts("arch=ppc64le:", when="@:1.9") # Cherry-pick a patch to allow earlier versions of PyTorch to work with CUDA 11.4 patch( diff --git a/var/spack/repos/builtin/packages/py-torchaudio/package.py b/var/spack/repos/builtin/packages/py-torchaudio/package.py index 679a43aba301f9..d07ce1de2182c7 100644 --- a/var/spack/repos/builtin/packages/py-torchaudio/package.py +++ b/var/spack/repos/builtin/packages/py-torchaudio/package.py @@ -8,93 +8,41 @@ class PyTorchaudio(PythonPackage): - """The aim of torchaudio is to apply PyTorch to the audio - domain. By supporting PyTorch, torchaudio follows the same - philosophy of providing strong GPU acceleration, having a focus on - trainable features through the autograd system, and having - consistent style (tensor names and dimension names). Therefore, it - is primarily a machine learning library and not a general signal - processing library. The benefits of Pytorch is be seen in - torchaudio through having all the computations be through Pytorch - operations which makes it easy to use and feel like a natural - extension.""" + """An audio package for PyTorch.""" homepage = "https://github.com/pytorch/audio" git = "https://github.com/pytorch/audio.git" + submodules = True - version("main", branch="main", submodules=True) - version( - "2.0.2", tag="v2.0.2", commit="31de77dad5c89274451b3f5c4bcb630be12787c4", submodules=True - ) - version( - "2.0.1", tag="v2.0.1", commit="3b40834aca41957002dfe074175e900cf8906237", submodules=True - ) - version( - "0.13.1", tag="v0.13.1", commit="b90d79882c3521fb3882833320b4b85df3b622f4", submodules=True - ) - version( - "0.13.0", tag="v0.13.0", commit="bc8640b4722abf6587fb4cc2521da45aeb55a711", submodules=True - ) - version( - "0.12.1", tag="v0.12.1", commit="58da31733e08438f9d1816f55f54756e53872a92", submodules=True - ) - version( - "0.12.0", tag="v0.12.0", commit="2e1388401c434011e9f044b40bc8374f2ddfc414", submodules=True - ) - version( - "0.11.0", tag="v0.11.0", commit="820b383b3b21fc06e91631a5b1e6ea1557836216", submodules=True - ) - version( - "0.10.2", tag="v0.10.2", commit="6f539cf3edc4224b51798e962ca28519e5479ffb", submodules=True - ) - version( - "0.10.1", tag="v0.10.1", commit="4b64f80bef85bd951ea35048c461c8304e7fc4c4", submodules=True - ) - version( - "0.10.0", tag="v0.10.0", commit="d2634d866603c1e2fc8e44cd6e9aea7ddd21fe29", submodules=True - ) - version( - "0.9.1", tag="v0.9.1", commit="a85b2398722182dd87e76d9ffcbbbf7e227b83ce", submodules=True - ) - version( - "0.9.0", tag="v0.9.0", commit="33b2469744955e2129c6367457dffe9bb4b05dea", submodules=True - ) - version( - "0.8.2", tag="v0.8.2", commit="d254d547d183e7203e455de6b99e56d3ffdd4499", submodules=True - ) - version( - "0.8.1", tag="v0.8.1", commit="e4e171a51714b2b2bd79e1aea199c3f658eddf9a", submodules=True - ) - version( - "0.8.0", tag="v0.8.0", commit="099d7883c6b7af1d1c3b416191e5f3edf492e104", submodules=True - ) - version( - "0.7.2", tag="v0.7.2", commit="a853dff25de36cc637b1f02029343790d2dd0199", submodules=True - ) - version( - "0.7.0", tag="v0.7.0", commit="ac17b64f4daedd45d0495e2512e22eaa6e5b7eeb", submodules=True - ) - version( - "0.6.0", tag="v0.6.0", commit="f17ae39ff9da0df8f795fef2fcc192f298f81268", submodules=True - ) - version( - "0.5.1", tag="v0.5.1", commit="71434798460a4ceca9d42004567ef419c62a612e", submodules=True - ) - version( - "0.5.0", tag="v0.5.0", commit="09494ea545738538f9db2dceeffe10d421060ee5", submodules=True - ) - version( - "0.4.0", tag="v0.4.0", commit="8afed303af3de41f3586007079c0534543c8f663", submodules=True - ) + version("main", branch="main") + version("2.1.0", tag="v2.1.0", commit="6ea1133706801ec6e81bb29142da2e21a8583a0a") + version("2.0.2", tag="v2.0.2", commit="31de77dad5c89274451b3f5c4bcb630be12787c4") + version("2.0.1", tag="v2.0.1", commit="3b40834aca41957002dfe074175e900cf8906237") + version("0.13.1", tag="v0.13.1", commit="b90d79882c3521fb3882833320b4b85df3b622f4") + version("0.13.0", tag="v0.13.0", commit="bc8640b4722abf6587fb4cc2521da45aeb55a711") + version("0.12.1", tag="v0.12.1", commit="58da31733e08438f9d1816f55f54756e53872a92") + version("0.12.0", tag="v0.12.0", commit="2e1388401c434011e9f044b40bc8374f2ddfc414") + version("0.11.0", tag="v0.11.0", commit="820b383b3b21fc06e91631a5b1e6ea1557836216") + version("0.10.2", tag="v0.10.2", commit="6f539cf3edc4224b51798e962ca28519e5479ffb") + version("0.10.1", tag="v0.10.1", commit="4b64f80bef85bd951ea35048c461c8304e7fc4c4") + version("0.10.0", tag="v0.10.0", commit="d2634d866603c1e2fc8e44cd6e9aea7ddd21fe29") + version("0.9.1", tag="v0.9.1", commit="a85b2398722182dd87e76d9ffcbbbf7e227b83ce") + version("0.9.0", tag="v0.9.0", commit="33b2469744955e2129c6367457dffe9bb4b05dea") + version("0.8.2", tag="v0.8.2", commit="d254d547d183e7203e455de6b99e56d3ffdd4499") + version("0.8.1", tag="v0.8.1", commit="e4e171a51714b2b2bd79e1aea199c3f658eddf9a") + version("0.8.0", tag="v0.8.0", commit="099d7883c6b7af1d1c3b416191e5f3edf492e104") + version("0.7.2", tag="v0.7.2", commit="a853dff25de36cc637b1f02029343790d2dd0199") + version("0.7.0", tag="v0.7.0", commit="ac17b64f4daedd45d0495e2512e22eaa6e5b7eeb") + version("0.6.0", tag="v0.6.0", commit="f17ae39ff9da0df8f795fef2fcc192f298f81268") + version("0.5.1", tag="v0.5.1", commit="71434798460a4ceca9d42004567ef419c62a612e") + version("0.5.0", tag="v0.5.0", commit="09494ea545738538f9db2dceeffe10d421060ee5") + version("0.4.0", tag="v0.4.0", commit="8afed303af3de41f3586007079c0534543c8f663") - # https://github.com/pytorch/audio#dependencies + # https://pytorch.org/audio/main/installation.html#dependencies depends_on("python@3.8:3.11", when="@2:", type=("build", "link", "run")) - depends_on("python@3.7:3.10", when="@0.12:0", type=("build", "link", "run")) - depends_on("python@3.7:3.9", when="@0.11", type=("build", "link", "run")) - depends_on("python@3.6:3.9", when="@0.7.2:0.10", type=("build", "link", "run")) - depends_on("python@3.6:3.8", when="@0.6:0.7.0", type=("build", "link", "run")) - depends_on("python@3.5:3.8", when="@0.5", type=("build", "link", "run")) - depends_on("python@2.7,3.5:3.8", when="@0.4", type=("build", "link", "run")) + depends_on("python@:3.10", when="@0.12:0", type=("build", "link", "run")) + depends_on("python@:3.9", when="@0.7.2:0.11", type=("build", "link", "run")) + depends_on("python@:3.8", when="@:0.7.0", type=("build", "link", "run")) # CMakelists.txt depends_on("cmake@3.18:", when="@0.10:", type="build") @@ -107,8 +55,8 @@ class PyTorchaudio(PythonPackage): depends_on("pkgconfig", type="build") depends_on("sox") - # https://github.com/pytorch/audio#dependencies - depends_on("py-torch@master", when="@main", type=("build", "link", "run")) + depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.0", when="@2.1.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@2.0.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@2.0.1", type=("build", "link", "run")) depends_on("py-torch@1.13.1", when="@0.13.1", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchdata/package.py b/var/spack/repos/builtin/packages/py-torchdata/package.py index a207d7f7b4192b..51b95c1cca7758 100644 --- a/var/spack/repos/builtin/packages/py-torchdata/package.py +++ b/var/spack/repos/builtin/packages/py-torchdata/package.py @@ -16,6 +16,7 @@ class PyTorchdata(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("0.7.0-rc1", sha256="9d48e1a5bc0fa4e50976b89aea38d028dcb7e580dba594755d266e19c369b747") version("0.6.1", sha256="c596db251c5e6550db3f00e4308ee7112585cca4d6a1c82a433478fd86693257") version("0.6.0", sha256="048dea12ee96c0ea1525097959fee811d7b38c2ed05f44a90f35f8961895fb5b") version("0.5.1", sha256="69d80bd33ce8f08e7cfeeb71cefddfc29cede25a85881e33dbae47576b96ed29") @@ -36,7 +37,8 @@ class PyTorchdata(PythonPackage): depends_on("ninja", when="@0.4:", type="build") # https://github.com/pytorch/data#version-compatibility - depends_on("py-torch@master", when="@main", type=("build", "run")) + depends_on("py-torch@main", when="@main", type=("build", "run")) + depends_on("py-torch@2.1.0", when="@0.7.0", type=("build", "run")) depends_on("py-torch@2.0.1", when="@0.6.1", type=("build", "run")) depends_on("py-torch@2.0.0", when="@0.6.0", type=("build", "run")) depends_on("py-torch@1.13.1", when="@0.5.1", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchtext/package.py b/var/spack/repos/builtin/packages/py-torchtext/package.py index 10e02316db7743..180b555069bc5e 100644 --- a/var/spack/repos/builtin/packages/py-torchtext/package.py +++ b/var/spack/repos/builtin/packages/py-torchtext/package.py @@ -8,67 +8,37 @@ class PyTorchtext(PythonPackage): - """Text utilities and datasets for PyTorch.""" + """Text utilities, models, transforms, and datasets for PyTorch.""" homepage = "https://github.com/pytorch/text" git = "https://github.com/pytorch/text.git" + submodules = True maintainers("adamjstewart") - version("main", branch="main", submodules=True) - version( - "0.15.2", tag="v0.15.2", commit="4571036cf66c539e50625218aeb99a288d79f3e1", submodules=True - ) - version( - "0.15.1", tag="v0.15.1", commit="c696895e524c61fd2b8b26916dd006411c5f3ba5", submodules=True - ) - version( - "0.14.1", tag="v0.14.1", commit="e1e969d4947bb3dd01ea927af2f8ac9a2d778c39", submodules=True - ) - version( - "0.14.0", tag="v0.14.0", commit="e2b27f9b06ca71d55c2fcf6d47c60866ee936f40", submodules=True - ) - version( - "0.13.1", tag="v0.13.1", commit="330201f1132dcd0981180c19bc6843a19d310ff0", submodules=True - ) - version( - "0.13.0", tag="v0.13.0", commit="35298c43f3ce908fe06c177ecbd8ef1503a1292b", submodules=True - ) - version( - "0.12.0", tag="v0.12.0", commit="d7a34d6ae0f4e36a52777854d0163b9e85f1576b", submodules=True - ) - version( - "0.11.2", tag="v0.11.2", commit="92f4d158d8cbe9136896befa2d4234ea8b8e2795", submodules=True - ) - version( - "0.11.1", tag="v0.11.1", commit="5c65ec05d7c1eba5b0ea2d7ee170ccf977d9674f", submodules=True - ) - version( - "0.10.1", tag="v0.10.1", commit="0d670e03c1eee7e30e032bb96df4c12b785a15ff", submodules=True - ) - version( - "0.10.0", tag="v0.10.0", commit="4da1de36247aa06622088e78508e0e38a4392e38", submodules=True - ) - version( - "0.9.2", tag="v0.9.2", commit="22e5ee7548a85190eee78e8ed6c8911ec2c53035", submodules=True - ) - version( - "0.8.1", tag="v0.8.1", commit="0f911ec35ab020983efbf36b8c14415651e98618", submodules=True - ) - version( - "0.6.0", tag="0.6.0", commit="3a54c7f52584f201c17ca7489b52b812152612dc", submodules=True - ) - version( - "0.5.0", tag="0.5.0", commit="0169cde2f1d446ae886ef0be07e9a673585ed256", submodules=True - ) + version("main", branch="main") + version("0.16.0", tag="v0.16.0", commit="4e255c95c76b1ccde4f6650391c0bc30650d6dbe") + version("0.15.2", tag="v0.15.2", commit="4571036cf66c539e50625218aeb99a288d79f3e1") + version("0.15.1", tag="v0.15.1", commit="c696895e524c61fd2b8b26916dd006411c5f3ba5") + version("0.14.1", tag="v0.14.1", commit="e1e969d4947bb3dd01ea927af2f8ac9a2d778c39") + version("0.14.0", tag="v0.14.0", commit="e2b27f9b06ca71d55c2fcf6d47c60866ee936f40") + version("0.13.1", tag="v0.13.1", commit="330201f1132dcd0981180c19bc6843a19d310ff0") + version("0.13.0", tag="v0.13.0", commit="35298c43f3ce908fe06c177ecbd8ef1503a1292b") + version("0.12.0", tag="v0.12.0", commit="d7a34d6ae0f4e36a52777854d0163b9e85f1576b") + version("0.11.2", tag="v0.11.2", commit="92f4d158d8cbe9136896befa2d4234ea8b8e2795") + version("0.11.1", tag="v0.11.1", commit="5c65ec05d7c1eba5b0ea2d7ee170ccf977d9674f") + version("0.10.1", tag="v0.10.1", commit="0d670e03c1eee7e30e032bb96df4c12b785a15ff") + version("0.10.0", tag="v0.10.0", commit="4da1de36247aa06622088e78508e0e38a4392e38") + version("0.9.2", tag="v0.9.2", commit="22e5ee7548a85190eee78e8ed6c8911ec2c53035") + version("0.8.1", tag="v0.8.1", commit="0f911ec35ab020983efbf36b8c14415651e98618") + version("0.6.0", tag="0.6.0", commit="3a54c7f52584f201c17ca7489b52b812152612dc") + version("0.5.0", tag="0.5.0", commit="0169cde2f1d446ae886ef0be07e9a673585ed256") # https://github.com/pytorch/text#installation - depends_on("python@3.8:3.11", when="@2:", type=("build", "link", "run")) - depends_on("python@3.7:3.10", when="@0.13:1", type=("build", "link", "run")) - depends_on("python@3.6:3.9", when="@0.8.1:0.12", type=("build", "link", "run")) - depends_on("python@3.6:3.8", when="@0.7:0.8.0", type=("build", "link", "run")) - depends_on("python@3.5:3.8", when="@0.6", type=("build", "link", "run")) - depends_on("python@2.7,3.5:3.8", when="@:0.5", type=("build", "link", "run")) + depends_on("python@3.8:3.11", when="@0.15:", type=("build", "link", "run")) + depends_on("python@:3.10", when="@0.13:0.14", type=("build", "link", "run")) + depends_on("python@:3.9", when="@0.8.1:0.12", type=("build", "link", "run")) + depends_on("python@:3.8", when="@:0.8.0", type=("build", "link", "run")) # CMakelists.txt depends_on("cmake@3.18:", when="@0.13:", type="build") @@ -79,13 +49,16 @@ class PyTorchtext(PythonPackage): depends_on("py-tqdm", type=("build", "run")) depends_on("py-requests", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) - depends_on("py-torchdata@0.6:", when="@0.15:", type=("build", "run")) + depends_on("py-torchdata@0.7.0", when="@0.16.0", type=("build", "run")) + depends_on("py-torchdata@0.6.1", when="@0.15.2", type=("build", "run")) + depends_on("py-torchdata@0.6.0", when="@0.15.1", type=("build", "run")) depends_on("py-pybind11", when="@0.8:", type=("build", "link")) depends_on("py-six", when="@:0.6", type=("build", "run")) depends_on("py-sentencepiece", when="@:0.7", type=("build", "run")) # https://github.com/pytorch/text#installation - depends_on("py-torch@master", when="@main", type=("build", "link", "run")) + depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.0", when="@0.16.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@0.15.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@0.15.1", type=("build", "link", "run")) depends_on("py-torch@1.13.1", when="@0.14.1", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchvision/package.py b/var/spack/repos/builtin/packages/py-torchvision/package.py index 55b35f37266f80..5aef4c6aef8a29 100644 --- a/var/spack/repos/builtin/packages/py-torchvision/package.py +++ b/var/spack/repos/builtin/packages/py-torchvision/package.py @@ -8,8 +8,7 @@ class PyTorchvision(PythonPackage): - """The torchvision package consists of popular datasets, model - architectures, and common image transformations for computer vision.""" + """Image and video datasets and models for torch deep learning.""" homepage = "https://github.com/pytorch/vision" url = "https://github.com/pytorch/vision/archive/v0.8.2.tar.gz" @@ -18,6 +17,7 @@ class PyTorchvision(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("0.16.0", sha256="79b30b082237e3ead21e74587cedf4a4d832f977cf7dfeccfb65f67988b12ceb") version("0.15.2", sha256="1efcb80e0a6e42c54f07ee16167839b4d302aeeecc12839cc47c74b06a2c20d4") version("0.15.1", sha256="689d23d4ebb0c7e54e8651c89b17155b64341c14ae4444a04ca7dc6f2b6a0a43") version("0.14.1", sha256="ced67e1cf1f97e168cdf271851a4d0b6d382ab7936e7bcbb39aaa87239c324b6") @@ -55,15 +55,14 @@ class PyTorchvision(PythonPackage): # https://github.com/pytorch/vision#installation depends_on("python@3.8:3.11", when="@0.15:", type=("build", "link", "run")) - depends_on("python@3.7:3.10", when="@0.12:0.14", type=("build", "link", "run")) - depends_on("python@3.6:3.9", when="@0.8.2:0.11", type=("build", "link", "run")) - depends_on("python@3.6:3.8", when="@0.7:0.8.1", type=("build", "link", "run")) - depends_on("python@3.5:3.8", when="@0.6", type=("build", "link", "run")) - depends_on("python@2.7,3.5:3.8", when="@0.5", type=("build", "link", "run")) - depends_on("python@2.7,3.5:3.7", when="@:0.4", type=("build", "link", "run")) + depends_on("python@:3.10", when="@0.12:0.14", type=("build", "link", "run")) + depends_on("python@:3.9", when="@0.8.2:0.11", type=("build", "link", "run")) + depends_on("python@:3.8", when="@0.5:0.8.1", type=("build", "link", "run")) + depends_on("python@:3.7", when="@:0.4", type=("build", "link", "run")) # https://github.com/pytorch/vision#installation depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.0", when="@0.16.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@0.15.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@0.15.1", type=("build", "link", "run")) depends_on("py-torch@1.13.1", when="@0.14.1", type=("build", "link", "run")) From 4a9be30e6a62bca70c55a28ef7a6dd56deca6c79 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 10 Oct 2023 13:50:22 -0500 Subject: [PATCH 105/408] py-jupyter-packaging: remove duplicate packages (#38671) * py-jupyter-packaging: remove duplicate packages * Allow py-jupyter-packaging to be duplicated in DAG * Deprecate version of py-jupyterlab that requires py-jupyter-packaging at run-time --- .../builtin/packages/py-ipycanvas/package.py | 4 +--- .../builtin/packages/py-ipyevents/package.py | 4 +--- .../builtin/packages/py-ipympl/package.py | 4 +--- .../packages/py-jupyter-packaging/package.py | 2 ++ .../packages/py-jupyter-packaging11/package.py | 18 +++++++++++++++--- .../packages/py-jupyter-packaging7/package.py | 6 +++++- .../py-jupyter-server-mathjax/package.py | 2 +- .../py-jupyter-server-proxy/package.py | 2 +- .../packages/py-jupyter-server/package.py | 4 +--- .../packages/py-jupyterlab-server/package.py | 5 ++--- .../builtin/packages/py-jupyterlab/package.py | 15 ++++++++------- .../builtin/packages/py-jupytext/package.py | 4 +--- .../builtin/packages/py-nbclassic/package.py | 4 +--- .../builtin/packages/py-notebook/package.py | 4 +--- .../packages/py-widgetsnbextension/package.py | 3 +-- 15 files changed, 42 insertions(+), 39 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-ipycanvas/package.py b/var/spack/repos/builtin/packages/py-ipycanvas/package.py index 82f832ac8043e3..d61e222fb47a0d 100644 --- a/var/spack/repos/builtin/packages/py-ipycanvas/package.py +++ b/var/spack/repos/builtin/packages/py-ipycanvas/package.py @@ -17,9 +17,7 @@ class PyIpycanvas(PythonPackage): depends_on("python@3.5:", type=("build", "run")) depends_on("py-setuptools@40.8:", type="build") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging7", type="build") - # depends_on('py-jupyter-packaging@0.7.0:0.7', type='build') + depends_on("py-jupyter-packaging@0.7", type="build") depends_on("py-jupyterlab@3.0:3", type="build") depends_on("py-ipywidgets@7.6:", type=("build", "run")) depends_on("pil@6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ipyevents/package.py b/var/spack/repos/builtin/packages/py-ipyevents/package.py index 8f9d717d75e571..042cff2119353b 100644 --- a/var/spack/repos/builtin/packages/py-ipyevents/package.py +++ b/var/spack/repos/builtin/packages/py-ipyevents/package.py @@ -16,8 +16,6 @@ class PyIpyevents(PythonPackage): depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools@40.8:", type="build") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging7", type="build") - # depends_on('py-jupyter-packaging@0.7.0:0.7', type='build') + depends_on("py-jupyter-packaging@0.7", type="build") depends_on("py-jupyterlab@3.0:3", type="build") depends_on("py-ipywidgets@7.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ipympl/package.py b/var/spack/repos/builtin/packages/py-ipympl/package.py index 158345349f4518..56eefb8429f3e6 100644 --- a/var/spack/repos/builtin/packages/py-ipympl/package.py +++ b/var/spack/repos/builtin/packages/py-ipympl/package.py @@ -23,8 +23,6 @@ class PyIpympl(PythonPackage): depends_on("py-traitlets@:5", type=("build", "run")) depends_on("py-ipywidgets@7.6:7", type=("build", "run")) depends_on("py-matplotlib@2:3", type=("build", "run")) - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging7", type="build") - # depends_on('py-jupyter-packaging@0.7', type='build') + depends_on("py-jupyter-packaging@0.7", type="build") depends_on("py-jupyterlab@3", type="build") depends_on("yarn", type="build") diff --git a/var/spack/repos/builtin/packages/py-jupyter-packaging/package.py b/var/spack/repos/builtin/packages/py-jupyter-packaging/package.py index 472d7e6bc93afc..cf333579fd2031 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-packaging/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-packaging/package.py @@ -12,6 +12,8 @@ class PyJupyterPackaging(PythonPackage): homepage = "https://github.com/jupyter/jupyter-packaging" pypi = "jupyter_packaging/jupyter_packaging-0.10.4.tar.gz" + tags = ["build-tools"] + version("0.12.0", sha256="b27455d60adc93a7baa2e0b8f386be81b932bb4e3c0116046df9ed230cd3faac") version("0.11.1", sha256="6f5c7eeea98f7f3c8fb41d565a94bf59791768a93f93148b3c2dfb7ebade8eec") version("0.10.6", sha256="a8a2c90bf2e0cae83be63ccb0b7035032a1589f268cc08b1d479e37ce50fc940") diff --git a/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py b/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py index c74a7a5231d0a0..b15cfe8752e480 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py @@ -16,9 +16,21 @@ class PyJupyterPackaging11(PythonPackage): homepage = "https://github.com/jupyter/jupyter-packaging" pypi = "jupyter_packaging/jupyter_packaging-0.11.1.tar.gz" - version("0.12.3", sha256="9d9b2b63b97ffd67a8bc5391c32a421bc415b264a32c99e4d8d8dd31daae9cf4") - version("0.12.0", sha256="b27455d60adc93a7baa2e0b8f386be81b932bb4e3c0116046df9ed230cd3faac") - version("0.11.1", sha256="6f5c7eeea98f7f3c8fb41d565a94bf59791768a93f93148b3c2dfb7ebade8eec") + version( + "0.12.3", + sha256="9d9b2b63b97ffd67a8bc5391c32a421bc415b264a32c99e4d8d8dd31daae9cf4", + deprecated=True, + ) + version( + "0.12.0", + sha256="b27455d60adc93a7baa2e0b8f386be81b932bb4e3c0116046df9ed230cd3faac", + deprecated=True, + ) + version( + "0.11.1", + sha256="6f5c7eeea98f7f3c8fb41d565a94bf59791768a93f93148b3c2dfb7ebade8eec", + deprecated=True, + ) depends_on("python@3.7:", type=("build", "run")) depends_on("py-packaging", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupyter-packaging7/package.py b/var/spack/repos/builtin/packages/py-jupyter-packaging7/package.py index be160db5350ee1..8f0da9b9999cb4 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-packaging7/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-packaging7/package.py @@ -16,7 +16,11 @@ class PyJupyterPackaging7(PythonPackage): homepage = "https://github.com/jupyter/jupyter-packaging" pypi = "jupyter_packaging/jupyter-packaging-0.7.12.tar.gz" - version("0.7.12", sha256="b140325771881a7df7b7f2d14997b619063fe75ae756b9025852e4346000bbb8") + version( + "0.7.12", + sha256="b140325771881a7df7b7f2d14997b619063fe75ae756b9025852e4346000bbb8", + deprecated=True, + ) depends_on("python@3.6:", type=("build", "run")) depends_on("py-packaging", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py b/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py index ee2234d14747f5..c86e2f18147c0c 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py @@ -18,6 +18,6 @@ class PyJupyterServerMathjax(PythonPackage): depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-jupyter-packaging", type="build") - depends_on("py-jupyter-packaging11@:1", when="@0.2.6:", type="build") + depends_on("py-jupyter-packaging@0.10:1", when="@0.2.6:", type="build") depends_on("py-jupyter-server@1.1:1", when="@0.2.3", type=("build", "run")) depends_on("py-jupyter-server@1.1:", when="@0.2.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupyter-server-proxy/package.py b/var/spack/repos/builtin/packages/py-jupyter-server-proxy/package.py index a44d656f773a63..4e111148069580 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-server-proxy/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-server-proxy/package.py @@ -19,7 +19,7 @@ class PyJupyterServerProxy(PythonPackage): version("3.2.2", sha256="54690ea9467035d187c930c599e76065017baf16e118e6eebae0d3a008c4d946") - depends_on("py-jupyter-packaging7@0.7.9:0.7", type="build") + depends_on("py-jupyter-packaging@0.7.9:0.7", type="build") depends_on("py-jupyterlab@3.0:3", type="build") depends_on("py-setuptools@40.8.0:", type="build") diff --git a/var/spack/repos/builtin/packages/py-jupyter-server/package.py b/var/spack/repos/builtin/packages/py-jupyter-server/package.py index 5f371a5403f437..4f461dca212cf3 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-server/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-server/package.py @@ -32,9 +32,7 @@ class PyJupyterServer(PythonPackage): depends_on("py-hatch-jupyter-builder@0.8.1:", when="@2:", type="build") with when("@:1"): - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", when="@1.6.2:", type="build") - # depends_on('py-jupyter-packaging@0.9:0', when='@1.6.2:', type='build') + depends_on("py-jupyter-packaging@0.9:0", when="@1.6.2:", type="build") depends_on("py-pre-commit", when="@1.16:", type="build") depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py b/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py index 35e5be5e8e198b..d6791671f645c4 100644 --- a/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py +++ b/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py @@ -24,9 +24,8 @@ class PyJupyterlabServer(PythonPackage): with when("@:2.14"): depends_on("py-setuptools", type="build") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", type="build") - # depends_on('py-jupyter-packaging@0.9:0', type='build') + depends_on("py-jupyter-packaging@0.10:1", when="@2.10.3", type="build") + depends_on("py-jupyter-packaging@0.9:0", when="@:2.6", type="build") depends_on("py-babel@2.10:", when="@2.16.4:", type=("build", "run")) depends_on("py-babel", when="@2.5.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupyterlab/package.py b/var/spack/repos/builtin/packages/py-jupyterlab/package.py index 8865713bf5d270..ccacf77c8e8d62 100644 --- a/var/spack/repos/builtin/packages/py-jupyterlab/package.py +++ b/var/spack/repos/builtin/packages/py-jupyterlab/package.py @@ -24,7 +24,11 @@ class PyJupyterlab(PythonPackage): version("3.1.14", sha256="13174cb6076dd5da6f1b85725ccfcc9518d8f98e86b8b644fc89b1dfaeda63a9") version("3.0.18", sha256="0e4bb4b89014607a16658b54f13df2f0af14f3c286109a0e14d5a46cbbe28caf") version("3.0.16", sha256="7ad4fbe1f6d38255869410fd151a8b15692a663ca97c0a8146b3f5c40e275c23") - version("3.0.14", sha256="713a84991dfcca8c0bc260911f1bd54ac25a386a86285713b9555a60f795059b") + version( + "3.0.14", + sha256="713a84991dfcca8c0bc260911f1bd54ac25a386a86285713b9555a60f795059b", + deprecated=True, + ) version("2.2.7", sha256="a72ffd0d919cba03a5ef8422bc92c3332a957ff97b0490494209c83ad93826da") version("2.1.0", sha256="8c239aababf5baa0b3d36e375fddeb9fd96f3a9a24a8cda098d6a414f5bbdc81") @@ -50,12 +54,9 @@ class PyJupyterlab(PythonPackage): with when("@:3"): depends_on("py-setuptools", when="@:3", type=("build", "run")) - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", when="@3.0.15:3", type="build") - depends_on("py-jupyter-packaging7", when="@3.0.0:3.0.14", type="build") - # depends_on('py-jupyter-packaging@0.9:0', when='@3.0.15:', type='build') - # depends_on('py-jupyter-packaging@0.7.3:0.7', when='@3.0.0:3.0.14', - # type=('build', 'run')) + depends_on("py-jupyter-packaging@0.9:1", when="@3.4.8", type="build") + depends_on("py-jupyter-packaging@0.9:0", when="@3.0.15:3.4.2", type="build") + depends_on("py-jupyter-packaging@0.7.3:0.7", when="@3.0.0:3.0.14", type=("build", "run")) depends_on("py-pre-commit", when="@3.4:3.4.3", type="build") depends_on("py-ipython", when="@3", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupytext/package.py b/var/spack/repos/builtin/packages/py-jupytext/package.py index f9bf4efa8d1219..6491fd575611dc 100644 --- a/var/spack/repos/builtin/packages/py-jupytext/package.py +++ b/var/spack/repos/builtin/packages/py-jupytext/package.py @@ -31,6 +31,4 @@ class PyJupytext(PythonPackage): # todo: in order to use jupytext as a jupyterlab extension, # some additional dependencies need to be added (and checked): depends_on("py-jupyterlab@3", type=("build", "run")) - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging7", type="build") - # depends_on('py-jupyter-packaging@0.7.9:0.7', type='build')``` + depends_on("py-jupyter-packaging@0.7.9:0.7", type="build") diff --git a/var/spack/repos/builtin/packages/py-nbclassic/package.py b/var/spack/repos/builtin/packages/py-nbclassic/package.py index 0f9bf98d9c9465..e46a6cd01e48e4 100644 --- a/var/spack/repos/builtin/packages/py-nbclassic/package.py +++ b/var/spack/repos/builtin/packages/py-nbclassic/package.py @@ -18,9 +18,7 @@ class PyNbclassic(PythonPackage): version("0.3.1", sha256="f920f8d09849bea7950e1017ff3bd101763a8d68f565a51ce053572e65aa7947") depends_on("py-setuptools", type="build") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", when="@0.3.3:", type="build") - # depends_on('py-jupyter-packaging@0.9:1', when='@0.3.3:', type='build') + depends_on("py-jupyter-packaging@0.9:0", when="@0.3.3:", type="build") depends_on("py-babel", when="@0.4:", type="build") depends_on("py-jinja2", when="@0.4:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-notebook/package.py b/var/spack/repos/builtin/packages/py-notebook/package.py index 6c3a5a6983eb8d..382c0aa91d2e18 100644 --- a/var/spack/repos/builtin/packages/py-notebook/package.py +++ b/var/spack/repos/builtin/packages/py-notebook/package.py @@ -40,9 +40,7 @@ class PyNotebook(PythonPackage): depends_on("python@3.7:", type=("build", "run"), when="@6.4:") depends_on("python@3.6:", type=("build", "run"), when="@6.3:") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", when="@6.4.1:", type="build") - # depends_on('py-jupyter-packaging@0.9:0', when='@6.4.1:', type='build') + depends_on("py-jupyter-packaging@0.9:0", when="@6.4.1:", type="build") depends_on("py-setuptools", when="@5:", type="build") depends_on("py-jinja2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py b/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py index ba9a0ab5f171ed..ee53aac4e7b5ca 100644 --- a/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py +++ b/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py @@ -20,8 +20,7 @@ class PyWidgetsnbextension(PythonPackage): version("1.2.6", sha256="c618cfb32978c9517caf0b4ef3aec312f8dd138577745e7b0d4abfcc7315ce51") depends_on("py-setuptools", type="build") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", when="@4.0.3:", type="build") + depends_on("py-jupyter-packaging@0.10:0", when="@4.0.3:", type="build") depends_on("python@2.7:2.8,3.3:", type=("build", "run")) depends_on("python@3.7:", when="@4.0.3:", type=("build", "run")) From ab9c61ef3133108f3d963152f6cec64e8916a540 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 10 Oct 2023 21:09:04 +0200 Subject: [PATCH 106/408] More helpful error when patch lookup fails (#40379) --- lib/spack/spack/patch.py | 20 +++++++++++--------- lib/spack/spack/spec.py | 11 ++++++++++- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index 23a5ee20a86d9d..8b094a7642d634 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -312,21 +312,19 @@ def from_json(cls, stream, repository): def to_json(self, stream): sjson.dump({"patches": self.index}, stream) - def patch_for_package(self, sha256, pkg): + def patch_for_package(self, sha256: str, pkg): """Look up a patch in the index and build a patch object for it. Arguments: - sha256 (str): sha256 hash to look up + sha256: sha256 hash to look up pkg (spack.package_base.PackageBase): Package object to get patch for. We build patch objects lazily because building them requires that - we have information about the package's location in its repo. - - """ + we have information about the package's location in its repo.""" sha_index = self.index.get(sha256) if not sha_index: - raise NoSuchPatchError( - "Couldn't find patch for package %s with sha256: %s" % (pkg.fullname, sha256) + raise PatchLookupError( + f"Couldn't find patch for package {pkg.fullname} with sha256: {sha256}" ) # Find patches for this class or any class it inherits from @@ -335,8 +333,8 @@ def patch_for_package(self, sha256, pkg): if patch_dict: break else: - raise NoSuchPatchError( - "Couldn't find patch for package %s with sha256: %s" % (pkg.fullname, sha256) + raise PatchLookupError( + f"Couldn't find patch for package {pkg.fullname} with sha256: {sha256}" ) # add the sha256 back (we take it out on write to save space, @@ -405,5 +403,9 @@ class NoSuchPatchError(spack.error.SpackError): """Raised when a patch file doesn't exist.""" +class PatchLookupError(NoSuchPatchError): + """Raised when a patch file cannot be located from sha256.""" + + class PatchDirectiveError(spack.error.SpackError): """Raised when the wrong arguments are suppled to the patch directive.""" diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 0f6589f0e6be16..93913e437993fe 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -74,6 +74,7 @@ import spack.deptypes as dt import spack.error import spack.hash_types as ht +import spack.patch import spack.paths import spack.platforms import spack.provider_index @@ -3913,7 +3914,15 @@ def patches(self): for sha256 in self.variants["patches"]._patches_in_order_of_appearance: index = spack.repo.PATH.patch_index pkg_cls = spack.repo.PATH.get_pkg_class(self.name) - patch = index.patch_for_package(sha256, pkg_cls) + try: + patch = index.patch_for_package(sha256, pkg_cls) + except spack.patch.PatchLookupError as e: + raise spack.error.SpecError( + f"{e}. This usually means the patch was modified or removed. " + "To fix this, either reconcretize or use the original package " + "repository" + ) from e + self._patches.append(patch) return self._patches From 08f6e07ee9157f7fd7f4bbb7de7c8d948da2a142 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 10 Oct 2023 15:35:15 -0500 Subject: [PATCH 107/408] GCC: fix build with Apple Clang 15 (#40318) --- var/spack/repos/builtin/packages/gcc/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py index f4ce615e7fcf7d..116371cdb272bc 100644 --- a/var/spack/repos/builtin/packages/gcc/package.py +++ b/var/spack/repos/builtin/packages/gcc/package.py @@ -783,6 +783,11 @@ def configure_args(self): "--with-as=" + binutils.join("as"), ] ) + elif spec.satisfies("%apple-clang@15:"): + # https://github.com/iains/gcc-darwin-arm64/issues/117 + # https://github.com/iains/gcc-12-branch/issues/22 + # https://github.com/iains/gcc-13-branch/issues/8 + options.append("--with-ld=/Library/Developer/CommandLineTools/usr/bin/ld-classic") # enable_bootstrap if spec.satisfies("+bootstrap"): From 7b266717c3692127177b0f231533bab6250d5747 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Tue, 10 Oct 2023 14:32:51 -0700 Subject: [PATCH 108/408] e4s arm stack: duplicate and target neoverse v1 (#40369) * e4s arm stack: duplicate and target both neoverse n1, v1 * remove neoverse_n1 target until issue #40397 is resolved --- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 24 +++++++++++-------- .../{e4s-arm => e4s-neoverse_v1}/spack.yaml | 6 ++--- 2 files changed, 17 insertions(+), 13 deletions(-) rename share/spack/gitlab/cloud_pipelines/stacks/{e4s-arm => e4s-neoverse_v1}/spack.yaml (98%) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index df2c7f85ca2cca..b5e57e3e8429a6 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -165,6 +165,10 @@ default: extends: [ ".generate-base" ] tags: ["spack", "public", "medium", "aarch64"] +.generate-neoverse_v1: + extends: [ ".generate-base" ] + tags: ["spack", "public", "medium", "aarch64", "graviton3"] + .generate-deprecated: extends: [ ".base-job" ] stage: generate @@ -301,27 +305,27 @@ e4s-build: job: e4s-generate ######################################## -# E4S ARM pipeline +# E4S Neoverse V1 pipeline ######################################## -.e4s-arm: - extends: [ ".linux_aarch64" ] +.e4s-neoverse_v1: + extends: [ ".linux_neoverse_v1" ] variables: - SPACK_CI_STACK_NAME: e4s-arm + SPACK_CI_STACK_NAME: e4s-neoverse_v1 -e4s-arm-generate: - extends: [ ".e4s-arm", ".generate-aarch64" ] +e4s-neoverse_v1-generate: + extends: [ ".e4s-neoverse_v1", ".generate-neoverse_v1" ] image: ghcr.io/spack/ubuntu20.04-runner-arm64-gcc-11.4:2023.08.01 -e4s-arm-build: - extends: [ ".e4s-arm", ".build" ] +e4s-neoverse_v1-build: + extends: [ ".e4s-neoverse_v1", ".build" ] trigger: include: - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: e4s-arm-generate + job: e4s-neoverse_v1-generate strategy: depend needs: - artifacts: True - job: e4s-arm-generate + job: e4s-neoverse_v1-generate ######################################## # E4S ROCm External pipeline diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml similarity index 98% rename from share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml rename to share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml index 02aafd6addd1dc..b4e8114df67f51 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-arm/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml @@ -7,7 +7,7 @@ spack: packages: all: - require: '%gcc@11.4.0 target=aarch64' + require: '%gcc@11.4.0 target=neoverse_v1' providers: blas: [openblas] mpi: [mpich] @@ -340,7 +340,7 @@ spack: # - tasmanian +cuda cuda_arch=90 # tasmanian: conflicts with cuda@12 # - upcxx +cuda cuda_arch=90 # upcxx: needs NVIDIA driver - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-arm" } + mirrors: { "mirror": "s3://spack-binaries/develop/e4s-arm-neoverse_v1" } ci: pipeline-gen: @@ -348,4 +348,4 @@ spack: image: "ghcr.io/spack/ubuntu20.04-runner-arm64-gcc-11.4:2023.08.01" cdash: - build-group: E4S ARM + build-group: E4S ARM Neoverse V1 From 1ed47c6315cdd27e85c10bfa7287571381ee4b67 Mon Sep 17 00:00:00 2001 From: Edward Hartnett <38856240+edwardhartnett@users.noreply.github.com> Date: Tue, 10 Oct 2023 16:11:14 -0600 Subject: [PATCH 109/408] w3emc: add v2.11.0 (#40376) * added version 2.11.0 * more fixes --- var/spack/repos/builtin/packages/w3emc/package.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/w3emc/package.py b/var/spack/repos/builtin/packages/w3emc/package.py index d556f833cef012..335e9caa5e6fa8 100644 --- a/var/spack/repos/builtin/packages/w3emc/package.py +++ b/var/spack/repos/builtin/packages/w3emc/package.py @@ -16,9 +16,10 @@ class W3emc(CMakePackage): url = "https://github.com/NOAA-EMC/NCEPLIBS-w3emc/archive/refs/tags/v2.9.0.tar.gz" git = "https://github.com/NOAA-EMC/NCEPLIBS-w3emc" - maintainers("t-brown", "AlexanderRichert-NOAA", "Hang-Lei-NOAA", "edwardhartnett") + maintainers("AlexanderRichert-NOAA", "Hang-Lei-NOAA", "edwardhartnett") version("develop", branch="develop") + version("2.11.0", sha256="53a03d03421c5da699b026ca220512ed494a531b83284693f66d2579d570c43b") version("2.10.0", sha256="366b55a0425fc3e729ecb9f3b236250349399fe4c8e19f325500463043fd2f18") version("2.9.3", sha256="9ca1b08dd13dfbad4a955257ae0cf38d2e300ccd8d983606212bc982370a29bc") version("2.9.2", sha256="eace811a1365f69b85fdf2bcd93a9d963ba72de5a7111e6fa7c0e6578b69bfbc") @@ -56,7 +57,7 @@ class W3emc(CMakePackage): def setup_run_environment(self, env): if self.spec.satisfies("@:2.9"): - suffixes = ["4", "d", "8"] + suffixes = ("4", "d", "8") shared = False else: suffixes = self.spec.variants["precision"].value @@ -81,3 +82,7 @@ def cmake_args(self): ] return args + + def check(self): + with working_dir(self.builder.build_directory): + make("test") From d3418e03b334813a49e9ab050d0a331d380b031b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Wed, 11 Oct 2023 05:24:17 +0200 Subject: [PATCH 110/408] openvkl: add 1.3.2 (#40392) * openvkl: add 1.3.2 works with (and requires) embree@4 * openvkl: simplify formatting with f-string thank you for the suggestion in the review --- var/spack/repos/builtin/packages/openvkl/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/openvkl/package.py b/var/spack/repos/builtin/packages/openvkl/package.py index 69b80f270fc48c..32bbdcafe26c3b 100644 --- a/var/spack/repos/builtin/packages/openvkl/package.py +++ b/var/spack/repos/builtin/packages/openvkl/package.py @@ -16,6 +16,7 @@ class Openvkl(CMakePackage): # maintainers("github_user1", "github_user2") + version("1.3.2", sha256="7704736566bf17497a3e51c067bd575316895fda96eccc682dae4aac7fb07b28") version("1.3.1", sha256="c9cefb6c313f2b4c0331e9629931759a6bc204ec00deed6ec0becad1670a1933") version("1.3.0", sha256="c6d4d40e6d232839c278b53dee1e7bd3bd239c3ccac33f49b465fc65a0692be9") version("1.2.0", sha256="dc468c2f0a359aaa946e04a01c2a6634081f7b6ce31b3c212c74bf7b4b0c9ec2") @@ -24,7 +25,8 @@ class Openvkl(CMakePackage): version("1.0.0", sha256="81ccae679bfa2feefc4d4b1ce72bcd242ba34d2618fbb418a1c2a05d640d16b4") version("0.13.0", sha256="974608259e3a5d8e29d2dfe81c6b2b1830aadeb9bbdc87127f3a7c8631e9f1bd") - depends_on("embree@3.13.0:3") + depends_on("embree@4", when="@1.3.2:") + depends_on("embree@3.13.0:3", when="@:1.3.1") depends_on("embree@3.13.1:", when="@1.0.0:") depends_on("ispc@1.15.0:", type=("build")) depends_on("ispc@1.16.0:", when="@1.0.0:", type=("build")) @@ -32,10 +34,14 @@ class Openvkl(CMakePackage): depends_on("rkcommon@1.6.1:") depends_on("rkcommon@1.7.0:", when="@1.0.0:") depends_on("rkcommon@1.8.0:", when="@1.1:") + depends_on("rkcommon@:1.10.0", when="@:1.3.1") + depends_on("rkcommon@1.11.0:", when="@1.3.2:") depends_on("tbb") def cmake_args(self): args = [ + # otherwise, openvkl 1.3.2 tries to install its headers into /openvkl + self.define("CMAKE_INSTALL_INCLUDEDIR", f"{self.spec.prefix}/include"), self.define("BUILD_BENCHMARKS", False), self.define("BUILD_EXAMPLES", False), self.define("BUILD_TESTING", False), From d0b17d43e920c151b3296c38aa9cd3c2187c959a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Wed, 11 Oct 2023 05:52:51 +0200 Subject: [PATCH 111/408] botan: checksum 3.2.0 (#40417) --- var/spack/repos/builtin/packages/botan/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/botan/package.py b/var/spack/repos/builtin/packages/botan/package.py index 57d1e79975eae8..0967c8b032d7ca 100644 --- a/var/spack/repos/builtin/packages/botan/package.py +++ b/var/spack/repos/builtin/packages/botan/package.py @@ -14,6 +14,7 @@ class Botan(MakefilePackage): maintainers("aumuell") + version("3.2.0", sha256="049c847835fcf6ef3a9e206b33de05dd38999c325e247482772a5598d9e5ece3") version("3.1.1", sha256="30c84fe919936a98fef5331f246c62aa2c0e4d2085b2d4511207f6a20afa3a6b") version("3.1.0", sha256="4e18e755a8bbc6bf96fac916fbf072ecd06740c72a72017c27162e4c0b4725fe") version("3.0.0", sha256="5da552e00fa1c047a90c22eb5f0247ec27e7432b68b78e10a7ce0955269ccad7") From ed3a2ac586fdcad9fe7ed27cdc3a99a27e0c3027 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 11 Oct 2023 13:09:21 +0200 Subject: [PATCH 112/408] spack buildcache: fix a typo in a function call (#40446) fixes #40415 --- lib/spack/spack/cmd/buildcache.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 5eb0ea1ed405e8..13e77927add9ad 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -268,7 +268,7 @@ def _matching_specs(specs: List[Spec]) -> List[Spec]: return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs] -def push_fn(args): +def push_fn(args: argparse.Namespace): """create a binary package and push it to a mirror""" if args.spec_file: tty.warn( @@ -414,7 +414,7 @@ def preview_fn(args): ) -def check_fn(args): +def check_fn(args: argparse.Namespace): """check specs against remote binary mirror(s) to see if any need to be rebuilt this command uses the process exit code to indicate its result, specifically, if the @@ -429,7 +429,7 @@ def check_fn(args): specs = spack.cmd.parse_specs(args.spec or args.spec_file) if specs: - specs = _matching_specs(specs, specs) + specs = _matching_specs(specs) else: specs = spack.cmd.require_active_env("buildcache check").all_specs() From a7209f12c830b34871ad929d14c367d5cbac10a7 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 05:35:37 -0700 Subject: [PATCH 113/408] Update legacy `.format()` calls to fstrings in installer.py (#40426) --- lib/spack/spack/installer.py | 284 ++++++++++++++++------------------- 1 file changed, 132 insertions(+), 152 deletions(-) diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index 99ab7d45bd412c..51f70341761835 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -131,12 +131,12 @@ def set_term_title(self, text: str): if not sys.stdout.isatty(): return - status = "{0} {1}".format(text, self.get_progress()) - sys.stdout.write("\033]0;Spack: {0}\007".format(status)) + status = f"{text} {self.get_progress()}" + sys.stdout.write(f"\x1b]0;Spack: {status}\x07") sys.stdout.flush() def get_progress(self) -> str: - return "[{0}/{1}]".format(self.pkg_num, self.pkg_count) + return f"[{self.pkg_num}/{self.pkg_count}]" class TermStatusLine: @@ -175,7 +175,7 @@ def clear(self): # Move the cursor to the beginning of the first "Waiting for" message and clear # everything after it. - sys.stdout.write("\x1b[%sF\x1b[J" % lines) + sys.stdout.write(f"\x1b[{lines}F\x1b[J") sys.stdout.flush() @@ -220,14 +220,13 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici # consists in module file generation and registration in the DB. if pkg.spec.external: _process_external_package(pkg, explicit) - _print_installed_pkg("{0} (external {1})".format(pkg.prefix, package_id(pkg))) + _print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg)})") return True if pkg.spec.installed_upstream: tty.verbose( - "{0} is installed in an upstream Spack instance at {1}".format( - package_id(pkg), pkg.spec.prefix - ) + f"{package_id(pkg)} is installed in an upstream Spack instance at " + f"{pkg.spec.prefix}" ) _print_installed_pkg(pkg.prefix) @@ -296,7 +295,7 @@ def _packages_needed_to_bootstrap_compiler( package is the bootstrap compiler (``True``) or one of its dependencies (``False``). The list will be empty if there are no compilers. """ - tty.debug("Bootstrapping {0} compiler".format(compiler)) + tty.debug(f"Bootstrapping {compiler} compiler") compilers = spack.compilers.compilers_for_spec(compiler, arch_spec=architecture) if compilers: return [] @@ -305,9 +304,9 @@ def _packages_needed_to_bootstrap_compiler( # Set the architecture for the compiler package in a way that allows the # concretizer to back off if needed for the older bootstrapping compiler - dep.constrain("platform=%s" % str(architecture.platform)) - dep.constrain("os=%s" % str(architecture.os)) - dep.constrain("target=%s:" % architecture.target.microarchitecture.family.name) + dep.constrain(f"platform={str(architecture.platform)}") + dep.constrain(f"os={str(architecture.os)}") + dep.constrain(f"target={architecture.target.microarchitecture.family.name}:") # concrete CompilerSpec has less info than concrete Spec # concretize as Spec to add that information dep.concretize() @@ -340,15 +339,15 @@ def _hms(seconds: int) -> str: if m: parts.append("%dm" % m) if s: - parts.append("%.2fs" % s) + parts.append(f"{s:.2f}s") return " ".join(parts) def _log_prefix(pkg_name) -> str: """Prefix of the form "[pid]: [pkg name]: ..." when printing a status update during the build.""" - pid = "{0}: ".format(os.getpid()) if tty.show_pid() else "" - return "{0}{1}:".format(pid, pkg_name) + pid = f"{os.getpid()}: " if tty.show_pid() else "" + return f"{pid}{pkg_name}:" def _print_installed_pkg(message: str) -> None: @@ -375,9 +374,9 @@ def print_install_test_log(pkg: "spack.package_base.PackageBase") -> None: def _print_timer(pre: str, pkg_id: str, timer: timer.BaseTimer) -> None: - phases = ["{}: {}.".format(p.capitalize(), _hms(timer.duration(p))) for p in timer.phases] - phases.append("Total: {}".format(_hms(timer.duration()))) - tty.msg("{0} Successfully installed {1}".format(pre, pkg_id), " ".join(phases)) + phases = [f"{p.capitalize()}: {_hms(timer.duration(p))}." for p in timer.phases] + phases.append(f"Total: {_hms(timer.duration())}") + tty.msg(f"{pre} Successfully installed {pkg_id}", " ".join(phases)) def _install_from_cache( @@ -402,14 +401,14 @@ def _install_from_cache( ) pkg_id = package_id(pkg) if not installed_from_cache: - pre = "No binary for {0} found".format(pkg_id) + pre = f"No binary for {pkg_id} found" if cache_only: - tty.die("{0} when cache-only specified".format(pre)) + tty.die(f"{pre} when cache-only specified") - tty.msg("{0}: installing from source".format(pre)) + tty.msg(f"{pre}: installing from source") return False t.stop() - tty.debug("Successfully extracted {0} from binary cache".format(pkg_id)) + tty.debug(f"Successfully extracted {pkg_id} from binary cache") _write_timer_json(pkg, t, True) _print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t) @@ -430,19 +429,19 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b """ assert pkg.spec.external, "Expected to post-install/register an external package." - pre = "{s.name}@{s.version} :".format(s=pkg.spec) + pre = f"{pkg.spec.name}@{pkg.spec.version} :" spec = pkg.spec if spec.external_modules: - tty.msg("{0} has external module in {1}".format(pre, spec.external_modules)) - tty.debug("{0} is actually installed in {1}".format(pre, spec.external_path)) + tty.msg(f"{pre} has external module in {spec.external_modules}") + tty.debug(f"{pre} is actually installed in {spec.external_path}") else: - tty.debug("{0} externally installed in {1}".format(pre, spec.external_path)) + tty.debug(f"{pre} externally installed in {spec.external_path}") try: # Check if the package was already registered in the DB. # If this is the case, then only make explicit if required. - tty.debug("{0} already registered in DB".format(pre)) + tty.debug(f"{pre} already registered in DB") record = spack.store.STORE.db.get_record(spec) if explicit and not record.explicit: spack.store.STORE.db.update_explicit(spec, explicit) @@ -451,11 +450,11 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b # If not, register it and generate the module file. # For external packages we just need to run # post-install hooks to generate module files. - tty.debug("{0} generating module file".format(pre)) + tty.debug(f"{pre} generating module file") spack.hooks.post_install(spec, explicit) # Add to the DB - tty.debug("{0} registering into DB".format(pre)) + tty.debug(f"{pre} registering into DB") spack.store.STORE.db.add(spec, None, explicit=explicit) @@ -490,7 +489,7 @@ def _process_binary_cache_tarball( if download_result is None: return False - tty.msg("Extracting {0} from binary cache".format(package_id(pkg))) + tty.msg(f"Extracting {package_id(pkg)} from binary cache") with timer.measure("install"), spack.util.path.filter_padding(): binary_distribution.extract_tarball( @@ -522,7 +521,7 @@ def _try_install_from_binary_cache( if not spack.mirror.MirrorCollection(binary=True): return False - tty.debug("Searching for binary cache of {0}".format(package_id(pkg))) + tty.debug(f"Searching for binary cache of {package_id(pkg)}") with timer.measure("search"): matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True) @@ -590,9 +589,9 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None: source_repo = spack.repo.Repo(source_repo_root) source_pkg_dir = source_repo.dirname_for_package_name(node.name) except spack.repo.RepoError as err: - tty.debug("Failed to create source repo for {0}: {1}".format(node.name, str(err))) + tty.debug(f"Failed to create source repo for {node.name}: {str(err)}") source_pkg_dir = None - tty.warn("Warning: Couldn't copy in provenance for {0}".format(node.name)) + tty.warn(f"Warning: Couldn't copy in provenance for {node.name}") # Create a destination repository dest_repo_root = os.path.join(path, node.namespace) @@ -632,7 +631,7 @@ def install_msg(name: str, pid: int, install_status: InstallStatus) -> str: Return: Colorized installing message """ - pre = "{0}: ".format(pid) if tty.show_pid() else "" + pre = f"{pid}: " if tty.show_pid() else "" post = ( " @*{%s}" % install_status.get_progress() if install_status and spack.config.get("config:install_status", True) @@ -698,7 +697,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None: # in the stage tree (not arbitrary files) abs_expr = os.path.realpath(glob_expr) if os.path.realpath(pkg.stage.path) not in abs_expr: - errors.write("[OUTSIDE SOURCE PATH]: {0}\n".format(glob_expr)) + errors.write(f"[OUTSIDE SOURCE PATH]: {glob_expr}\n") continue # Now that we are sure that the path is within the correct # folder, make it relative and check for matches @@ -718,14 +717,14 @@ def log(pkg: "spack.package_base.PackageBase") -> None: # Here try to be conservative, and avoid discarding # the whole install procedure because of copying a # single file failed - errors.write("[FAILED TO ARCHIVE]: {0}".format(f)) + errors.write(f"[FAILED TO ARCHIVE]: {f}") if errors.getvalue(): error_file = os.path.join(target_dir, "errors.txt") fs.mkdirp(target_dir) with open(error_file, "w") as err: err.write(errors.getvalue()) - tty.warn("Errors occurred when archiving files.\n\t" "See: {0}".format(error_file)) + tty.warn(f"Errors occurred when archiving files.\n\tSee: {error_file}") dump_packages(pkg.spec, packages_dir) @@ -761,11 +760,11 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict): """ # Ensure dealing with a package that has a concrete spec if not isinstance(pkg, spack.package_base.PackageBase): - raise ValueError("{0} must be a package".format(str(pkg))) + raise ValueError(f"{str(pkg)} must be a package") self.pkg = pkg if not self.pkg.spec.concrete: - raise ValueError("{0} must have a concrete spec".format(self.pkg.name)) + raise ValueError(f"{self.pkg.name} must have a concrete spec") # Cache the package phase options with the explicit package, # popping the options to ensure installation of associated @@ -797,14 +796,14 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict): def __repr__(self) -> str: """Returns a formal representation of the build request.""" - rep = "{0}(".format(self.__class__.__name__) + rep = f"{self.__class__.__name__}(" for attr, value in self.__dict__.items(): - rep += "{0}={1}, ".format(attr, value.__repr__()) - return "{0})".format(rep.strip(", ")) + rep += f"{attr}={value.__repr__()}, " + return f"{rep.strip(', ')})" def __str__(self) -> str: """Returns a printable version of the build request.""" - return "package={0}, install_args={1}".format(self.pkg.name, self.install_args) + return f"package={self.pkg.name}, install_args={self.install_args}" def _add_default_args(self) -> None: """Ensure standard install options are set to at least the default.""" @@ -930,18 +929,18 @@ def __init__( # Ensure dealing with a package that has a concrete spec if not isinstance(pkg, spack.package_base.PackageBase): - raise ValueError("{0} must be a package".format(str(pkg))) + raise ValueError(f"{str(pkg)} must be a package") self.pkg = pkg if not self.pkg.spec.concrete: - raise ValueError("{0} must have a concrete spec".format(self.pkg.name)) + raise ValueError(f"{self.pkg.name} must have a concrete spec") # The "unique" identifier for the task's package self.pkg_id = package_id(self.pkg) # The explicit build request associated with the package if not isinstance(request, BuildRequest): - raise ValueError("{0} must have a build request".format(str(pkg))) + raise ValueError(f"{str(pkg)} must have a build request") self.request = request @@ -949,8 +948,9 @@ def __init__( # ensure priority queue invariants when tasks are "removed" from the # queue. if status == STATUS_REMOVED: - msg = "Cannot create a build task for {0} with status '{1}'" - raise InstallError(msg.format(self.pkg_id, status), pkg=pkg) + raise InstallError( + f"Cannot create a build task for {self.pkg_id} with status '{status}'", pkg=pkg + ) self.status = status @@ -964,9 +964,9 @@ def __init__( # to support tracking of parallel, multi-spec, environment installs. self.dependents = set(get_dependent_ids(self.pkg.spec)) - tty.debug("Pkg id {0} has the following dependents:".format(self.pkg_id)) + tty.debug(f"Pkg id {self.pkg_id} has the following dependents:") for dep_id in self.dependents: - tty.debug("- {0}".format(dep_id)) + tty.debug(f"- {dep_id}") # Set of dependencies # @@ -988,9 +988,9 @@ def __init__( if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec): # The compiler is in the queue, identify it as dependency dep = spack.compilers.pkg_spec_for_compiler(compiler_spec) - dep.constrain("platform=%s" % str(arch_spec.platform)) - dep.constrain("os=%s" % str(arch_spec.os)) - dep.constrain("target=%s:" % arch_spec.target.microarchitecture.family.name) + dep.constrain(f"platform={str(arch_spec.platform)}") + dep.constrain(f"os={str(arch_spec.os)}") + dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:") dep.concretize() dep_id = package_id(dep.package) self.dependencies.add(dep_id) @@ -1026,14 +1026,14 @@ def __ne__(self, other): def __repr__(self) -> str: """Returns a formal representation of the build task.""" - rep = "{0}(".format(self.__class__.__name__) + rep = f"{self.__class__.__name__}(" for attr, value in self.__dict__.items(): - rep += "{0}={1}, ".format(attr, value.__repr__()) - return "{0})".format(rep.strip(", ")) + rep += f"{attr}={value.__repr__()}, " + return f"{rep.strip(', ')})" def __str__(self) -> str: """Returns a printable version of the build task.""" - dependencies = "#dependencies={0}".format(len(self.dependencies)) + dependencies = f"#dependencies={len(self.dependencies)}" return "priority={0}, status={1}, start={2}, {3}".format( self.priority, self.status, self.start, dependencies ) @@ -1056,7 +1056,7 @@ def add_dependent(self, pkg_id: str) -> None: pkg_id: package identifier of the dependent package """ if pkg_id != self.pkg_id and pkg_id not in self.dependents: - tty.debug("Adding {0} as a dependent of {1}".format(pkg_id, self.pkg_id)) + tty.debug(f"Adding {pkg_id} as a dependent of {self.pkg_id}") self.dependents.add(pkg_id) def flag_installed(self, installed: List[str]) -> None: @@ -1070,9 +1070,8 @@ def flag_installed(self, installed: List[str]) -> None: for pkg_id in now_installed: self.uninstalled_deps.remove(pkg_id) tty.debug( - "{0}: Removed {1} from uninstalled deps list: {2}".format( - self.pkg_id, pkg_id, self.uninstalled_deps - ), + f"{self.pkg_id}: Removed {pkg_id} from uninstalled deps list: " + f"{self.uninstalled_deps}", level=2, ) @@ -1170,18 +1169,18 @@ def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] def __repr__(self) -> str: """Returns a formal representation of the package installer.""" - rep = "{0}(".format(self.__class__.__name__) + rep = f"{self.__class__.__name__}(" for attr, value in self.__dict__.items(): - rep += "{0}={1}, ".format(attr, value.__repr__()) - return "{0})".format(rep.strip(", ")) + rep += f"{attr}={value.__repr__()}, " + return f"{rep.strip(', ')})" def __str__(self) -> str: """Returns a printable version of the package installer.""" - requests = "#requests={0}".format(len(self.build_requests)) - tasks = "#tasks={0}".format(len(self.build_tasks)) - failed = "failed ({0}) = {1}".format(len(self.failed), self.failed) - installed = "installed ({0}) = {1}".format(len(self.installed), self.installed) - return "{0}: {1}; {2}; {3}; {4}".format(self.pid, requests, tasks, installed, failed) + requests = f"#requests={len(self.build_requests)}" + tasks = f"#tasks={len(self.build_tasks)}" + failed = f"failed ({len(self.failed)}) = {self.failed}" + installed = f"installed ({len(self.installed)}) = {self.installed}" + return f"{self.pid}: {requests}; {tasks}; {installed}; {failed}" def _add_bootstrap_compilers( self, @@ -1226,9 +1225,7 @@ def _modify_existing_task(self, pkgid: str, attr, value) -> None: for i, tup in enumerate(self.build_pq): key, task = tup if task.pkg_id == pkgid: - tty.debug( - "Modifying task for {0} to treat it as a compiler".format(pkgid), level=2 - ) + tty.debug(f"Modifying task for {pkgid} to treat it as a compiler", level=2) setattr(task, attr, value) self.build_pq[i] = (key, task) @@ -1293,7 +1290,7 @@ def _check_deps_status(self, request: BuildRequest) -> None: # Check for failure since a prefix lock is not required if spack.store.STORE.failure_tracker.has_failed(dep): action = "'spack install' the dependency" - msg = "{0} is marked as an install failure: {1}".format(dep_id, action) + msg = f"{dep_id} is marked as an install failure: {action}" raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg) # Attempt to get a read lock to ensure another process does not @@ -1301,7 +1298,7 @@ def _check_deps_status(self, request: BuildRequest) -> None: # installed ltype, lock = self._ensure_locked("read", dep_pkg) if lock is None: - msg = "{0} is write locked by another process".format(dep_id) + msg = f"{dep_id} is write locked by another process" raise InstallError(err.format(request.pkg_id, msg), pkg=request.pkg) # Flag external and upstream packages as being installed @@ -1320,7 +1317,7 @@ def _check_deps_status(self, request: BuildRequest) -> None: or rec.installation_time > request.overwrite_time ) ): - tty.debug("Flagging {0} as installed per the database".format(dep_id)) + tty.debug(f"Flagging {dep_id} as installed per the database") self._flag_installed(dep_pkg) else: lock.release_read() @@ -1356,9 +1353,9 @@ def _prepare_for_install(self, task: BuildTask) -> None: # Ensure there is no other installed spec with the same prefix dir if spack.store.STORE.db.is_occupied_install_prefix(task.pkg.spec.prefix): raise InstallError( - "Install prefix collision for {0}".format(task.pkg_id), - long_msg="Prefix directory {0} already used by another " - "installed spec.".format(task.pkg.spec.prefix), + f"Install prefix collision for {task.pkg_id}", + long_msg=f"Prefix directory {task.pkg.spec.prefix} already " + "used by another installed spec.", pkg=task.pkg, ) @@ -1368,7 +1365,7 @@ def _prepare_for_install(self, task: BuildTask) -> None: if not keep_prefix: task.pkg.remove_prefix() else: - tty.debug("{0} is partially installed".format(task.pkg_id)) + tty.debug(f"{task.pkg_id} is partially installed") # Destroy the stage for a locally installed, non-DIYStage, package if restage and task.pkg.stage.managed_by_spack: @@ -1413,9 +1410,8 @@ def _cleanup_failed(self, pkg_id: str) -> None: lock = self.failed.get(pkg_id, None) if lock is not None: err = "{0} exception when removing failure tracking for {1}: {2}" - msg = "Removing failure mark on {0}" try: - tty.verbose(msg.format(pkg_id)) + tty.verbose(f"Removing failure mark on {pkg_id}") lock.release_write() except Exception as exc: tty.warn(err.format(exc.__class__.__name__, pkg_id, str(exc))) @@ -1442,19 +1438,19 @@ def _ensure_install_ready(self, pkg: "spack.package_base.PackageBase") -> None: pkg: the package being locally installed """ pkg_id = package_id(pkg) - pre = "{0} cannot be installed locally:".format(pkg_id) + pre = f"{pkg_id} cannot be installed locally:" # External packages cannot be installed locally. if pkg.spec.external: - raise ExternalPackageError("{0} {1}".format(pre, "is external")) + raise ExternalPackageError(f"{pre} is external") # Upstream packages cannot be installed locally. if pkg.spec.installed_upstream: - raise UpstreamPackageError("{0} {1}".format(pre, "is upstream")) + raise UpstreamPackageError(f"{pre} is upstream") # The package must have a prefix lock at this stage. if pkg_id not in self.locks: - raise InstallLockError("{0} {1}".format(pre, "not locked")) + raise InstallLockError(f"{pre} not locked") def _ensure_locked( self, lock_type: str, pkg: "spack.package_base.PackageBase" @@ -1481,14 +1477,14 @@ def _ensure_locked( assert lock_type in [ "read", "write", - ], '"{0}" is not a supported package management lock type'.format(lock_type) + ], f'"{lock_type}" is not a supported package management lock type' pkg_id = package_id(pkg) ltype, lock = self.locks.get(pkg_id, (lock_type, None)) if lock and ltype == lock_type: return ltype, lock - desc = "{0} lock".format(lock_type) + desc = f"{lock_type} lock" msg = "{0} a {1} on {2} with timeout {3}" err = "Failed to {0} a {1} for {2} due to {3}: {4}" @@ -1507,11 +1503,7 @@ def _ensure_locked( op = "acquire" lock = spack.store.STORE.prefix_locker.lock(pkg.spec, timeout) if timeout != lock.default_timeout: - tty.warn( - "Expected prefix lock timeout {0}, not {1}".format( - timeout, lock.default_timeout - ) - ) + tty.warn(f"Expected prefix lock timeout {timeout}, not {lock.default_timeout}") if lock_type == "read": lock.acquire_read() else: @@ -1536,7 +1528,7 @@ def _ensure_locked( tty.debug(msg.format("Upgrading to", desc, pkg_id, pretty_seconds(timeout or 0))) op = "upgrade to" lock.upgrade_read_to_write(timeout) - tty.debug("{0} is now {1} locked".format(pkg_id, lock_type)) + tty.debug(f"{pkg_id} is now {lock_type} locked") except (lk.LockDowngradeError, lk.LockTimeoutError) as exc: tty.debug(err.format(op, desc, pkg_id, exc.__class__.__name__, str(exc))) @@ -1561,14 +1553,14 @@ def _add_tasks(self, request: BuildRequest, all_deps): all_deps (defaultdict(set)): dictionary of all dependencies and associated dependents """ - tty.debug("Initializing the build queue for {0}".format(request.pkg.name)) + tty.debug(f"Initializing the build queue for {request.pkg.name}") # Ensure not attempting to perform an installation when user didn't # want to go that far for the requested package. try: _check_last_phase(request.pkg) except BadInstallPhase as err: - tty.warn("Installation request refused: {0}".format(str(err))) + tty.warn(f"Installation request refused: {str(err)}") return # Skip out early if the spec is not being installed locally (i.e., if @@ -1719,9 +1711,9 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None: # A StopPhase exception means that do_install was asked to # stop early from clients, and is not an error at this point spack.hooks.on_install_failure(task.request.pkg.spec) - pid = "{0}: ".format(self.pid) if tty.show_pid() else "" - tty.debug("{0}{1}".format(pid, str(e))) - tty.debug("Package stage directory: {0}".format(pkg.stage.source_path)) + pid = f"{self.pid}: " if tty.show_pid() else "" + tty.debug(f"{pid}{str(e)}") + tty.debug(f"Package stage directory: {pkg.stage.source_path}") def _next_is_pri0(self) -> bool: """ @@ -1816,7 +1808,7 @@ def _remove_task(self, pkg_id: str) -> Optional[BuildTask]: pkg_id: identifier for the package to be removed """ if pkg_id in self.build_tasks: - tty.debug("Removing build task for {0} from list".format(pkg_id)) + tty.debug(f"Removing build task for {pkg_id} from list") task = self.build_tasks.pop(pkg_id) task.status = STATUS_REMOVED return task @@ -1832,10 +1824,8 @@ def _requeue_task(self, task: BuildTask, install_status: InstallStatus) -> None: """ if task.status not in [STATUS_INSTALLED, STATUS_INSTALLING]: tty.debug( - "{0} {1}".format( - install_msg(task.pkg_id, self.pid, install_status), - "in progress by another process", - ) + f"{install_msg(task.pkg_id, self.pid, install_status)} " + "in progress by another process" ) new_task = task.next_attempt(self.installed) @@ -1852,7 +1842,7 @@ def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None: """ if not os.path.exists(pkg.spec.prefix): path = spack.util.path.debug_padded_filter(pkg.spec.prefix) - tty.debug("Creating the installation directory {0}".format(path)) + tty.debug(f"Creating the installation directory {path}") spack.store.STORE.layout.create_install_directory(pkg.spec) else: # Set the proper group for the prefix @@ -1888,8 +1878,8 @@ def _update_failed( exc: optional exception if associated with the failure """ pkg_id = task.pkg_id - err = "" if exc is None else ": {0}".format(str(exc)) - tty.debug("Flagging {0} as failed{1}".format(pkg_id, err)) + err = "" if exc is None else f": {str(exc)}" + tty.debug(f"Flagging {pkg_id} as failed{err}") if mark: self.failed[pkg_id] = spack.store.STORE.failure_tracker.mark(task.pkg.spec) else: @@ -1898,14 +1888,14 @@ def _update_failed( for dep_id in task.dependents: if dep_id in self.build_tasks: - tty.warn("Skipping build of {0} since {1} failed".format(dep_id, pkg_id)) + tty.warn(f"Skipping build of {dep_id} since {pkg_id} failed") # Ensure the dependent's uninstalled dependents are # up-to-date and their build tasks removed. dep_task = self.build_tasks[dep_id] self._update_failed(dep_task, mark) self._remove_task(dep_id) else: - tty.debug("No build task for {0} to skip since {1} failed".format(dep_id, pkg_id)) + tty.debug(f"No build task for {dep_id} to skip since {pkg_id} failed") def _update_installed(self, task: BuildTask) -> None: """ @@ -1935,23 +1925,21 @@ def _flag_installed( # Already determined the package has been installed return - tty.debug("Flagging {0} as installed".format(pkg_id)) + tty.debug(f"Flagging {pkg_id} as installed") self.installed.add(pkg_id) # Update affected dependents dependent_ids = dependent_ids or get_dependent_ids(pkg.spec) for dep_id in set(dependent_ids): - tty.debug("Removing {0} from {1}'s uninstalled dependencies.".format(pkg_id, dep_id)) + tty.debug(f"Removing {pkg_id} from {dep_id}'s uninstalled dependencies.") if dep_id in self.build_tasks: # Ensure the dependent's uninstalled dependencies are # up-to-date. This will require requeueing the task. dep_task = self.build_tasks[dep_id] self._push_task(dep_task.next_attempt(self.installed)) else: - tty.debug( - "{0} has no build task to update for {1}'s success".format(dep_id, pkg_id) - ) + tty.debug(f"{dep_id} has no build task to update for {pkg_id}'s success") def _init_queue(self) -> None: """Initialize the build queue from the list of build requests.""" @@ -2032,8 +2020,8 @@ def install(self) -> None: pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec install_status.next_pkg(pkg) - install_status.set_term_title("Processing {0}".format(pkg.name)) - tty.debug("Processing {0}: task={1}".format(pkg_id, task)) + install_status.set_term_title(f"Processing {pkg.name}") + tty.debug(f"Processing {pkg_id}: task={task}") # Ensure that the current spec has NO uninstalled dependencies, # which is assumed to be reflected directly in its priority. # @@ -2045,24 +2033,19 @@ def install(self) -> None: if task.priority != 0: term_status.clear() tty.error( - "Detected uninstalled dependencies for {0}: {1}".format( - pkg_id, task.uninstalled_deps - ) + f"Detected uninstalled dependencies for {pkg_id}: " f"{task.uninstalled_deps}" ) left = [dep_id for dep_id in task.uninstalled_deps if dep_id not in self.installed] if not left: - tty.warn( - "{0} does NOT actually have any uninstalled deps" " left".format(pkg_id) - ) + tty.warn(f"{pkg_id} does NOT actually have any uninstalled deps left") dep_str = "dependencies" if task.priority > 1 else "dependency" # Hook to indicate task failure, but without an exception spack.hooks.on_install_failure(task.request.pkg.spec) raise InstallError( - "Cannot proceed with {0}: {1} uninstalled {2}: {3}".format( - pkg_id, task.priority, dep_str, ",".join(task.uninstalled_deps) - ), + f"Cannot proceed with {pkg_id}: {task.priority} uninstalled " + f"{dep_str}: {','.join(task.uninstalled_deps)}", pkg=pkg, ) @@ -2079,7 +2062,7 @@ def install(self) -> None: # assume using a separate (failed) prefix lock file. if pkg_id in self.failed or spack.store.STORE.failure_tracker.has_failed(spec): term_status.clear() - tty.warn("{0} failed to install".format(pkg_id)) + tty.warn(f"{pkg_id} failed to install") self._update_failed(task) # Mark that the package failed @@ -2096,7 +2079,7 @@ def install(self) -> None: # another process is likely (un)installing the spec or has # determined the spec has already been installed (though the # other process may be hung). - install_status.set_term_title("Acquiring lock for {0}".format(pkg.name)) + install_status.set_term_title(f"Acquiring lock for {pkg.name}") term_status.add(pkg_id) ltype, lock = self._ensure_locked("write", pkg) if lock is None: @@ -2119,7 +2102,7 @@ def install(self) -> None: task.request.overwrite_time = time.time() # Determine state of installation artifacts and adjust accordingly. - install_status.set_term_title("Preparing {0}".format(pkg.name)) + install_status.set_term_title(f"Preparing {pkg.name}") self._prepare_for_install(task) # Flag an already installed package @@ -2165,7 +2148,7 @@ def install(self) -> None: # Proceed with the installation since we have an exclusive write # lock on the package. - install_status.set_term_title("Installing {0}".format(pkg.name)) + install_status.set_term_title(f"Installing {pkg.name}") try: action = self._install_action(task) @@ -2186,8 +2169,9 @@ def install(self) -> None: except KeyboardInterrupt as exc: # The build has been terminated with a Ctrl-C so terminate # regardless of the number of remaining specs. - err = "Failed to install {0} due to {1}: {2}" - tty.error(err.format(pkg.name, exc.__class__.__name__, str(exc))) + tty.error( + f"Failed to install {pkg.name} due to " f"{exc.__class__.__name__}: {str(exc)}" + ) spack.hooks.on_install_cancel(task.request.pkg.spec) raise @@ -2196,9 +2180,10 @@ def install(self) -> None: raise # Checking hash on downloaded binary failed. - err = "Failed to install {0} from binary cache due to {1}:" - err += " Requeueing to install from source." - tty.error(err.format(pkg.name, str(exc))) + tty.error( + f"Failed to install {pkg.name} from binary cache due " + f"to {str(exc)}: Requeueing to install from source." + ) # this overrides a full method, which is ugly. task.use_cache = False # type: ignore[misc] self._requeue_task(task, install_status) @@ -2216,13 +2201,12 @@ def install(self) -> None: # lower levels -- skip printing if already printed. # TODO: sort out this and SpackError.print_context() tty.error( - "Failed to install {0} due to {1}: {2}".format( - pkg.name, exc.__class__.__name__, str(exc) - ) + f"Failed to install {pkg.name} due to " + f"{exc.__class__.__name__}: {str(exc)}" ) # Terminate if requested to do so on the first failure. if self.fail_fast: - raise InstallError("{0}: {1}".format(fail_fast_err, str(exc)), pkg=pkg) + raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg) # Terminate at this point if the single explicit spec has # failed to install. @@ -2261,17 +2245,17 @@ def install(self) -> None: if failed_explicits or missing: for _, pkg_id, err in failed_explicits: - tty.error("{0}: {1}".format(pkg_id, err)) + tty.error(f"{pkg_id}: {err}") for _, pkg_id in missing: - tty.error("{0}: Package was not installed".format(pkg_id)) + tty.error(f"{pkg_id}: Package was not installed") if len(failed_explicits) > 0: pkg = failed_explicits[0][0] ids = [pkg_id for _, pkg_id, _ in failed_explicits] tty.debug( "Associating installation failure with first failed " - "explicit package ({0}) from {1}".format(ids[0], ", ".join(ids)) + f"explicit package ({ids[0]}) from {', '.join(ids)}" ) elif len(missing) > 0: @@ -2279,7 +2263,7 @@ def install(self) -> None: ids = [pkg_id for _, pkg_id in missing] tty.debug( "Associating installation failure with first " - "missing package ({0}) from {1}".format(ids[0], ", ".join(ids)) + f"missing package ({ids[0]}) from {', '.join(ids)}" ) raise InstallError( @@ -2357,7 +2341,7 @@ def run(self) -> bool: self.timer.stop("stage") tty.debug( - "{0} Building {1} [{2}]".format(self.pre, self.pkg_id, self.pkg.build_system_class) # type: ignore[attr-defined] # noqa: E501 + f"{self.pre} Building {self.pkg_id} [{self.pkg.build_system_class}]" # type: ignore[attr-defined] # noqa: E501 ) # get verbosity from do_install() parameter or saved value @@ -2402,7 +2386,7 @@ def _install_source(self) -> None: return src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src") - tty.debug("{0} Copying source to {1}".format(self.pre, src_target)) + tty.debug(f"{self.pre} Copying source to {src_target}") fs.install_tree( pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32") @@ -2464,8 +2448,7 @@ def _real_install(self) -> None: with logger.force_echo(): inner_debug_level = tty.debug_level() tty.set_debug(debug_level) - msg = "{0} Executing phase: '{1}'" - tty.msg(msg.format(self.pre, phase_fn.name)) + tty.msg(f"{self.pre} Executing phase: '{phase_fn.name}'") tty.set_debug(inner_debug_level) # Catch any errors to report to logging @@ -2539,12 +2522,9 @@ def install(self): except fs.CouldNotRestoreDirectoryBackup as e: self.database.remove(self.task.pkg.spec) tty.error( - "Recovery of install dir of {0} failed due to " - "{1}: {2}. The spec is now uninstalled.".format( - self.task.pkg.name, - e.outer_exception.__class__.__name__, - str(e.outer_exception), - ) + f"Recovery of install dir of {self.task.pkg.name} failed due to " + f"{e.outer_exception.__class__.__name__}: {str(e.outer_exception)}. " + "The spec is now uninstalled." ) # Unwrap the actual installation exception. @@ -2567,7 +2547,7 @@ class BadInstallPhase(InstallError): """Raised for an install phase option is not allowed for a package.""" def __init__(self, pkg_name, phase): - super().__init__("'{0}' is not a valid phase for package {1}".format(phase, pkg_name)) + super().__init__(f"'{phase}' is not a valid phase for package {pkg_name}") class ExternalPackageError(InstallError): From 927c0af1658132fc28a06e6fa05bbcf6f565de1c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Oct 2023 12:35:41 +0000 Subject: [PATCH 114/408] build(deps): bump mypy from 1.5.1 to 1.6.0 in /lib/spack/docs (#40424) Bumps [mypy](https://github.com/python/mypy) from 1.5.1 to 1.6.0. - [Commits](https://github.com/python/mypy/compare/v1.5.1...v1.6.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 4744d1339c062d..6d95f1d40d1ff2 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -10,4 +10,4 @@ pytest==7.4.2 isort==5.12.0 black==23.9.1 flake8==6.1.0 -mypy==1.5.1 +mypy==1.6.0 From c9ccfde82806ef1b4d48b7d541732e8d9944ebf9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Oct 2023 06:35:51 -0600 Subject: [PATCH 115/408] build(deps): bump mypy from 1.5.1 to 1.6.0 in /.github/workflows/style (#40422) Bumps [mypy](https://github.com/python/mypy) from 1.5.1 to 1.6.0. - [Commits](https://github.com/python/mypy/compare/v1.5.1...v1.6.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/style/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/style/requirements.txt b/.github/workflows/style/requirements.txt index ea8a7c40cfdf24..125c3559afe877 100644 --- a/.github/workflows/style/requirements.txt +++ b/.github/workflows/style/requirements.txt @@ -2,6 +2,6 @@ black==23.9.1 clingo==5.6.2 flake8==6.1.0 isort==5.12.0 -mypy==1.5.1 +mypy==1.6.0 types-six==1.16.21.9 vermin==1.5.2 From b6a5c3eb2b06edc0cda42cafca9f8b410d7ac57c Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Wed, 11 Oct 2023 14:56:59 +0200 Subject: [PATCH 116/408] qt-base: fix-build without opengl (#40421) --- var/spack/repos/builtin/packages/qt-base/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/qt-base/package.py b/var/spack/repos/builtin/packages/qt-base/package.py index 7818313f45b623..f3fcfc0eed7193 100644 --- a/var/spack/repos/builtin/packages/qt-base/package.py +++ b/var/spack/repos/builtin/packages/qt-base/package.py @@ -235,6 +235,9 @@ def define_feature(key, variant=None): for k in features: define("FEATURE_" + k, True) + if "~opengl" in spec: + args.append(self.define("INPUT_opengl", "no")) + # INPUT_* arguments: undefined/no/qt/system sys_inputs = ["doubleconversion"] if "+sql" in spec: From 1b5986ed99b5f01f45226a28657c95eb7c037c58 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 11 Oct 2023 17:49:50 +0200 Subject: [PATCH 117/408] spider: respect tag (#40443) --- lib/spack/spack/util/web.py | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index eca7bd72a2ba1f..57158db950e917 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -110,19 +110,28 @@ def handle_starttag(self, tag, attrs): self.links.append(val) -class IncludeFragmentParser(HTMLParser): +class ExtractMetadataParser(HTMLParser): """This parser takes an HTML page and selects the include-fragments, - used on GitHub, https://github.github.io/include-fragment-element.""" + used on GitHub, https://github.github.io/include-fragment-element, + as well as a possible base url.""" def __init__(self): super().__init__() - self.links = [] + self.fragments = [] + self.base_url = None def handle_starttag(self, tag, attrs): + # if tag == "include-fragment": for attr, val in attrs: if attr == "src": - self.links.append(val) + self.fragments.append(val) + + # + elif tag == "base": + for attr, val in attrs: + if attr == "href": + self.base_url = val def read_from_url(url, accept_content_type=None): @@ -625,12 +634,15 @@ def _spider(url: urllib.parse.ParseResult, collect_nested: bool, _visited: Set[s # Parse out the include-fragments in the page # https://github.github.io/include-fragment-element - include_fragment_parser = IncludeFragmentParser() - include_fragment_parser.feed(page) + metadata_parser = ExtractMetadataParser() + metadata_parser.feed(page) + + # Change of base URL due to tag + response_url = metadata_parser.base_url or response_url fragments = set() - while include_fragment_parser.links: - raw_link = include_fragment_parser.links.pop() + while metadata_parser.fragments: + raw_link = metadata_parser.fragments.pop() abs_link = url_util.join(response_url, raw_link.strip(), resolve_href=True) try: From df46b07152d78d9b95edb706983af712f2165c49 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 11 Oct 2023 19:03:17 +0200 Subject: [PATCH 118/408] Update bootstrap buildcache to support Python 3.12 (#40404) * Add support for Python 3.12 * Use optimized build of clingo --- .github/workflows/bootstrap.yml | 14 +- .github/workflows/unit_tests.yaml | 8 +- etc/spack/defaults/bootstrap.yaml | 6 +- lib/spack/spack/bootstrap/core.py | 2 +- lib/spack/spack/spec.py | 4 +- lib/spack/spack/test/cmd/bootstrap.py | 2 +- .../spack/test/data/config/bootstrap.yaml | 2 +- .../bootstrap/github-actions-v0.3/clingo.json | 268 ------------ .../bootstrap/github-actions-v0.3/gnupg.json | 204 --------- .../bootstrap/github-actions-v0.5/clingo.json | 389 ++++++++++++++++++ .../bootstrap/github-actions-v0.5/gnupg.json | 254 ++++++++++++ .../metadata.yaml | 2 +- .../patchelf.json | 12 +- 13 files changed, 672 insertions(+), 495 deletions(-) delete mode 100644 share/spack/bootstrap/github-actions-v0.3/clingo.json delete mode 100644 share/spack/bootstrap/github-actions-v0.3/gnupg.json create mode 100644 share/spack/bootstrap/github-actions-v0.5/clingo.json create mode 100644 share/spack/bootstrap/github-actions-v0.5/gnupg.json rename share/spack/bootstrap/{github-actions-v0.3 => github-actions-v0.5}/metadata.yaml (83%) rename share/spack/bootstrap/{github-actions-v0.3 => github-actions-v0.5}/patchelf.json (56%) diff --git a/.github/workflows/bootstrap.yml b/.github/workflows/bootstrap.yml index fdfcf0fd57ed70..ab98e83f4ccebf 100644 --- a/.github/workflows/bootstrap.yml +++ b/.github/workflows/bootstrap.yml @@ -42,8 +42,8 @@ jobs: shell: runuser -u spack-test -- bash {0} run: | source share/spack/setup-env.sh + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack external find cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -80,8 +80,8 @@ jobs: shell: runuser -u spack-test -- bash {0} run: | source share/spack/setup-env.sh + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack external find cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -145,8 +145,8 @@ jobs: - name: Bootstrap clingo run: | source share/spack/setup-env.sh + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack external find cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -163,8 +163,8 @@ jobs: run: | source share/spack/setup-env.sh export PATH=/usr/local/opt/bison@2.7/bin:$PATH + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack external find --not-buildable cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -265,6 +265,7 @@ jobs: shell: runuser -u spack-test -- bash {0} run: | source share/spack/setup-env.sh + spack bootstrap disable github-actions-v0.4 spack bootstrap disable spack-install spack -d gpg list tree ~/.spack/bootstrap/store/ @@ -302,8 +303,8 @@ jobs: run: | source share/spack/setup-env.sh spack solve zlib + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack -d gpg list tree ~/.spack/bootstrap/store/ @@ -320,6 +321,7 @@ jobs: - name: Bootstrap GnuPG run: | source share/spack/setup-env.sh + spack bootstrap disable github-actions-v0.4 spack bootstrap disable spack-install spack -d gpg list tree ~/.spack/bootstrap/store/ @@ -338,8 +340,8 @@ jobs: run: | source share/spack/setup-env.sh spack solve zlib + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack -d gpg list tree ~/.spack/bootstrap/store/ diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml index e82531677dfc1d..4e1d909f025848 100644 --- a/.github/workflows/unit_tests.yaml +++ b/.github/workflows/unit_tests.yaml @@ -15,7 +15,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] concretizer: ['clingo'] on_develop: - ${{ github.ref == 'refs/heads/develop' }} @@ -45,6 +45,10 @@ jobs: os: ubuntu-latest concretizer: 'clingo' on_develop: false + - python-version: '3.11' + os: ubuntu-latest + concretizer: 'clingo' + on_develop: false steps: - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 @@ -185,7 +189,7 @@ jobs: runs-on: macos-latest strategy: matrix: - python-version: ["3.10"] + python-version: ["3.11"] steps: - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 with: diff --git a/etc/spack/defaults/bootstrap.yaml b/etc/spack/defaults/bootstrap.yaml index 464994d171e4b3..6f2dbe171c5f60 100644 --- a/etc/spack/defaults/bootstrap.yaml +++ b/etc/spack/defaults/bootstrap.yaml @@ -9,15 +9,15 @@ bootstrap: # may not be able to bootstrap all the software that Spack needs, # depending on its type. sources: + - name: 'github-actions-v0.5' + metadata: $spack/share/spack/bootstrap/github-actions-v0.5 - name: 'github-actions-v0.4' metadata: $spack/share/spack/bootstrap/github-actions-v0.4 - - name: 'github-actions-v0.3' - metadata: $spack/share/spack/bootstrap/github-actions-v0.3 - name: 'spack-install' metadata: $spack/share/spack/bootstrap/spack-install trusted: # By default we trust bootstrapping from sources and from binaries # produced on Github via the workflow + github-actions-v0.5: true github-actions-v0.4: true - github-actions-v0.3: true spack-install: true diff --git a/lib/spack/spack/bootstrap/core.py b/lib/spack/spack/bootstrap/core.py index 606e80d6d86224..4b7807e47bba23 100644 --- a/lib/spack/spack/bootstrap/core.py +++ b/lib/spack/spack/bootstrap/core.py @@ -228,7 +228,7 @@ def _install_and_test( if not abstract_spec.intersects(candidate_spec): continue - if python_spec is not None and python_spec not in abstract_spec: + if python_spec is not None and not abstract_spec.intersects(f"^{python_spec}"): continue for _, pkg_hash, pkg_sha256 in item["binaries"]: diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 93913e437993fe..aa6afa668ebe74 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -3679,7 +3679,7 @@ def _autospec(self, spec_like): return spec_like return Spec(spec_like) - def intersects(self, other: "Spec", deps: bool = True) -> bool: + def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool: """Return True if there exists at least one concrete spec that matches both self and other, otherwise False. @@ -3802,7 +3802,7 @@ def _intersects_dependencies(self, other): return True - def satisfies(self, other: "Spec", deps: bool = True) -> bool: + def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool: """Return True if all concrete specs matching self also match other, otherwise False. Args: diff --git a/lib/spack/spack/test/cmd/bootstrap.py b/lib/spack/spack/test/cmd/bootstrap.py index eff9bf042d5481..277a279222b8d7 100644 --- a/lib/spack/spack/test/cmd/bootstrap.py +++ b/lib/spack/spack/test/cmd/bootstrap.py @@ -169,7 +169,7 @@ def test_remove_and_add_a_source(mutable_config): assert not sources # Add it back and check we restored the initial state - _bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.3") + _bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.5") sources = spack.bootstrap.core.bootstrapping_sources() assert len(sources) == 1 diff --git a/lib/spack/spack/test/data/config/bootstrap.yaml b/lib/spack/spack/test/data/config/bootstrap.yaml index 6adb7ab9967e78..4757b8729d23a8 100644 --- a/lib/spack/spack/test/data/config/bootstrap.yaml +++ b/lib/spack/spack/test/data/config/bootstrap.yaml @@ -1,5 +1,5 @@ bootstrap: sources: - name: 'github-actions' - metadata: $spack/share/spack/bootstrap/github-actions-v0.3 + metadata: $spack/share/spack/bootstrap/github-actions-v0.5 trusted: {} diff --git a/share/spack/bootstrap/github-actions-v0.3/clingo.json b/share/spack/bootstrap/github-actions-v0.3/clingo.json deleted file mode 100644 index 60e771221df3dd..00000000000000 --- a/share/spack/bootstrap/github-actions-v0.3/clingo.json +++ /dev/null @@ -1,268 +0,0 @@ -{ - "verified": [ - { - "binaries": [ - [ - "clingo-bootstrap", - "i5rx6vbyw7cyg3snajcpnuozo7l3lcab", - "c55d1c76adb82ac9fbe67725641ef7e4fe1ae11e2e8da0dc93a3efe362549127" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "xoxkdgo3n332ewhbh7pz2zuevrjxkrke", - "b50e2fba026e85af3f99b3c412b4f0c88ec2fbce15b48eeb75072f1d3737f3cc" - ] - ], - "python": "python@3.5", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "sgmirxbu3bpn4rdpfs6jlyycfrkfxl5i", - "b0a574df6f5d59491a685a31a8ed99fb345c850a91df62ef232fbe0cca716ed1" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "5hn7hszlizeqq3leqi6lrdmyy5ssv6zs", - "36e24bc3bd27b125fdeb30d51d2554e44288877c0ce6df5a878bb4e8a1d5847a" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "qk3ecxakadq4naakng6mhdfkwauef3dn", - "9d974c0d2b546d18f0ec35e08d5ba114bf2867f7ff7c7ea990b79d019ece6380" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "2omdsvzshkn2u3l5vwvwoey4es5cowfu", - "cbf72eb932ac847f87b1640f8e70e26f5261967288f7d6db19206ef352e36a88" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "ifgzrctoh2ibrmitp6ushrvrnaeqtkr7", - "1c609df7351286fe09aa3452fa7ed7fedf903e9fa12cde89b916a0fc4c022949" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "esfzjhodgh5be22hvh3trg2ojzrmhzwt", - "8d070cdb2a5103cde3e6f873b1eb11d25f60464f3059d8643f943e5c9a9ec76c" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "5b4uhkhrvtvdmsnctjx2isrxciy6v2o2", - "336b8b1202a8a28a0e34a98e5780ae0e2b2370b342ce67434551009b1a7c8db9" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "czapgrrey6llnsu2m4qaamv3so2lybxm", - "16bdfe4b08ee8da38f3e2c7d5cc44a38d87696cc2b6de0971a4de25efb8ad8e4" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "7za6vsetahbghs4d2qe4ajtf2iyiacwx", - "730ae7e6096ec8b83a0fc9464dda62bd6c2fec1f8565bb291f4d1ffe7746703b" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "zulnxrmchldtasffqw6qacmgg4y2qumj", - "8988325db53c0c650f64372c21571ac85e9ba4577975d14ae7dba8ab7728b5fc" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "lx54ebqzwtjpfgch7kagoxkmul56z7fa", - "81d64229299e76f9dc81f88d286bc94725e7cbcbb29ad0d66aaeaff73dd6473a" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "isu2rjoicl4xzmbl3k2c4bg35gvejkgz", - "fcc4b052832cfd327d11f657c2b7715d981b0894ed03bbce18b23a842c7d706d" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "ob3k3g2wjy7cw33lfobjar44sqmojyth", - "f51fd6256bfd3afc8470614d87df61e5c9dd582fcc70f707ca66ba2b7255da12" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "norpsmparkl5dfuzdqj4537o77vjbgsl", - "477c041857b60f29ff9d6c7d2982b7eb49a2e02ebbc98af11488c32e2fb24081" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "gypv5loj2ml73duq6sr76yg5rj25te2m", - "c855d7d32aadec37c41e51f19b83558b32bc0b946a9565dba0e659c6820bd6c3" - ] - ], - "python": "python@2.7+ucs4", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "rjopyx7hum3hqhgsdyw3st7frdfgrv3p", - "0e555f9bc99b4e4152939b30b2257f4f353941d152659e716bf6123c0ce11a60" - ] - ], - "python": "python@2.7~ucs4", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "2l45t4kw3cqqwj6vbxhfwhzlo6b3q2p4", - "6cb90de5a3d123b7408cfef693a9a78bb69c66abbfed746c1e85aa0acb848d03" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "4psiezojm7dexequtbnav77wvgcajigq", - "b3fc33b5482357613294becb54968bd74de638abeae69e27c6c4319046a7e352" - ] - ], - "python": "python@3.5", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "dzhvhynye4z7oalowdcy5zt25lej3m2n", - "61c5f3e80bcc7acfc65e335f1910762df2cc5ded9d7e1e5977380a24de553dd7" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "dtwevigmwgke4g6ee5byktpmzmrp2kvx", - "636937244b58611ec2eedb4422a1076fcaf09f3998593befb5a6ff1a74e1d5f7" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "shqedxgvjnhiwdcdrvjhbd73jaevv7wt", - "b3615b2a94a8a15fddaa74cf4d9f9b3a516467a843cdeab597f72dcf6be5e31d" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "z6v6zvc6awioeompbvo735b4flr3yuyz", - "1389192bd74c1f7059d95c4a41500201cbc2905cbba553678613e0b7e3b96c71" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - } - ] -} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.3/gnupg.json b/share/spack/bootstrap/github-actions-v0.3/gnupg.json deleted file mode 100644 index 2f568297892227..00000000000000 --- a/share/spack/bootstrap/github-actions-v0.3/gnupg.json +++ /dev/null @@ -1,204 +0,0 @@ -{ - "verified": [ - { - "binaries": [ - [ - "libiconv", - "d6dhoguolmllbzy2h6pnvjm3tti6uy6f", - "7fe765a87945991d4e57782ed67c4bf42a10f95582eecd6f57de80a545bde821" - ], - [ - "npth", - "x6fb7zx6n7mos5knvi6wlnaadd7r2szx", - "fd1e5a62107339f45219c32ba20b5e82aa0880c31ac86d1b245d388ca4546990" - ], - [ - "zlib", - "c5wm3jilx6zsers3sfgdisjqusoza4wr", - "7500a717c62736872aa65df4599f797ef67b21086dd6236b4c7712cfffac9bf3" - ], - [ - "libassuan", - "3qv4bprobfwes37clg764cfipdzjdbto", - "d85cd9d2c63a296300d4dcbd667421956df241109daef5e12d3ca63fa241cb14" - ], - [ - "libgcrypt", - "3y4ubdgxvgpvhxr3bk4l5mkw4gv42n7e", - "9dad7c2635344957c4db68378964d3af84ea052d45dbe8ded9a6e6e47211daa8" - ], - [ - "libgpg-error", - "doido34kfwsvwpj4c4jcocahjb5ltebw", - "20e5c238bee91d2a841f0b4bd0358ded59a0bd665d7f251fd9cd42f83e0b283b" - ], - [ - "libksba", - "mttecm7gzdv544lbzcoahchnboxysrvi", - "1c0ae64e828a597e4cf15dd997c66cd677e41f68c63db09b9551480a197052a2" - ], - [ - "pinentry", - "se7xgv7yf4ywpjnbv7voxgeuuvs77ahb", - "2fd13fbee7ca2361dc5dd09708c72d0489611301b60635cb0206bc5b94add884" - ], - [ - "gnupg", - "yannph34bpaqkhsv5mz2icwhy3epiqxd", - "1de8b4e119fa3455d0170466fa0fb8e04957fab740aec32535b4667279312b3f" - ] - ], - "spec": "gnupg@2.3: %apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "zlib", - "t2hjzsyf3txkg64e4bq3nihe26rzzdws", - "171e720840a28af50b62141be77bc525e666cffd1fbbe2ee62673214e8b0280f" - ], - [ - "libiconv", - "yjdji2wj4njz72fyrg46jlz5f5wfbhfr", - "94c773c3d0294cf248ec1f3e9862669dfa743fe1a76de580d9425c14c8f7dcd2" - ], - [ - "npth", - "kx3vzmpysee7jxwsudarthrmyop6hzgc", - "f8cc6204fa449ce576d450396ec2cad40a75d5712c1381a61ed1681a54f9c79f" - ], - [ - "libassuan", - "e5n5l5ftzwxs4ego5furrdbegphb6hxp", - "ef0428874aa81bcb9944deed88e1fc639f629fe3d522cab3c281235ae2a53db9" - ], - [ - "libgcrypt", - "wyncpahrpqsmpk4b7nlhg5ekkjzyjdzs", - "2309548c51a17f580f036445b701feb85d2bc552b9c4404418c2f223666cfe3b" - ], - [ - "libgpg-error", - "vhcdd6jkbiday2seg3rlkbzpf6jzfdx7", - "79dd719538d9223d6287c0bba07b981944ab6d3ab11e5060274f1b7c727daf55" - ], - [ - "libksba", - "azcgpgncynoox3dce45hkz46bp2tb5rr", - "15d301f201a5162234261fcfccd579b0ff484131444a0b6f5c0006224bb155d6" - ], - [ - "pinentry", - "e3z5ekbv4jlsie4qooubcfvsk2sb6t7l", - "5fd27b8e47934b06554e84f1374a90a93e71e60a14dbde672a8da414b27b97f4" - ], - [ - "gnupg", - "i5agfvsmzdokuooaqhlh6vro5giwei2t", - "f1bde7a1f0c84c1bbcde5757a96cf7a3e9157c2cfa9907fde799aa8e04c0d51f" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "zlib", - "v5rr6ba37tudzfuv2jszwikgcl4wd3cd", - "371ad4b277af7b97c7871b9931f2764c97362620c7990c5ad8fdb5c42a1d30dc" - ], - [ - "libiconv", - "bvcnx2e4bumjcgya4dczdhjb3fhqyass", - "65a00b717b3a4ee1b5ab9f84163722bdfea8eb20a2eecc9cf657c0eaac0227e9" - ], - [ - "npth", - "dkb6ez6a4c3iyrv67llwf5mzmynqdmtj", - "4d77351661d0e0130b1c89fb6c6a944aee41d701ef80d056d3fc0178a7f36075" - ], - [ - "libassuan", - "tuydcxdbb5jfvw3gri7y24b233kgotgd", - "d8775e7c1dd252437c6fa0781675b1d2202cfc0c8190e60d248928b6fca8bc9f" - ], - [ - "libgcrypt", - "kgxmg4eukwx6nn3bdera3j7cf7hxfy6n", - "6046523f10ed54be50b0211c27191b3422886984fc0c00aed1a85d1f121c42e6" - ], - [ - "libgpg-error", - "ewhrwnltlrzkpqyix2vbkf4ruq6b6ea3", - "3f3bbbf1a3cb82d39313e39bcbe3dad94a176130fc0e9a8045417d6223fb4f31" - ], - [ - "libksba", - "onxt5ry2fotgwiognwmhxlgnekuvtviq", - "3a4df13f8b880441d1df4b234a4ca01de7601d84a6627185c2b3191a34445d40" - ], - [ - "pinentry", - "fm3m4rsszzxxakcpssd34jbbe4ihrhac", - "73afa46176a7ec8f02d01a2caad3e400dc18c3c8a53f92b88a9aa9e3653db3e6" - ], - [ - "gnupg", - "gwr65ovh4wbxjgniaoqlbt3yla6rdikj", - "7a3f7afe69ca67797a339c04028ca45a9630933020b57cb56e28453197fe8a57" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "libiconv", - "vec3ac6t4ag3lb7ycvisafthqmpci74b", - "35d184218e525d8aaea60082fd2d0f1e80449ec32746cceda2ea0ca106e9a095" - ], - [ - "npth", - "jx3kmy3ilc66rgg5mqtbed5z6qwt3vrd", - "74c2c1b087667661da3e24ac83bcecf1bc2d10d69e7678d1fd232875fe295135" - ], - [ - "zlib", - "wnpbp4pu7xca24goggcy773d2y4pobbd", - "bcbd5310e8c5e75cbf33d8155448b212486dc543469d6df7e56dcecb6112ee88" - ], - [ - "libassuan", - "ynn33wutdtoo2lbjjoizgslintxst2zl", - "ac3b060690c6da0c64dcf35da047b84cc81793118fb9ff29b993f3fb9efdc258" - ], - [ - "libgcrypt", - "zzofcjer43vsxwj27c3rxapjxhsz4hlx", - "4b1977d815f657c2d6af540ea4b4ce80838cadcf4ada72a8ba142a7441e571ea" - ], - [ - "libgpg-error", - "gzr2ucybgks5jquvf4lv7iprxq5vx5le", - "a12ecb5cfd083a29d042fd309ebb5ab8fd4ace0b68b27f89b857e9a84d75b5be" - ], - [ - "libksba", - "hw4u4pam6mp3henpw476axtqaahfdy64", - "5424caf98a2d48e0ed0b9134353c242328ebeef6d2b31808d58969165e809b47" - ], - [ - "pinentry", - "hffsjitsewdgoijwgzvub6vpjwm33ywr", - "8ed7504b5b2d13ab7e1f4a0e27a882c33c5a6ebfcb43c51269333c0d6d5e1448" - ], - [ - "gnupg", - "lge4h2kjgvssyspnvutq6t3q2xual5oc", - "6080ce00fcc24185e4051a30f6d52982f86f46eee6d8a2dc4d83ab08d8195be8" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=x86_64" - } - ] -} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.5/clingo.json b/share/spack/bootstrap/github-actions-v0.5/clingo.json new file mode 100644 index 00000000000000..822875a6123338 --- /dev/null +++ b/share/spack/bootstrap/github-actions-v0.5/clingo.json @@ -0,0 +1,389 @@ +{ + "verified": [ + { + "binaries": [ + [ + "clingo-bootstrap", + "riu2vekwzrloc3fktlf6v7kwv6fja7lp", + "7527bc4d2d75671162fe0db3de04c5d3e1e6ab7991dfd85442c302c698febb45" + ] + ], + "python": "python@3.10.13", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "sgf6pgn4ihfcbxutxhevp36n3orfpdkw", + "958531adcb449094bca7703f8f08d0f55a18f9a4c0f10a175ae4190d20982891" + ] + ], + "python": "python@3.11.5", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ie4wlhhnb4snroymbnjksajwvoid6omx", + "4af14c3375a211ead3d2b4a31b59683744adcb79b820cc0c6b168ab162a7d983" + ] + ], + "python": "python@3.12.0", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "5ke32podcipzxxwrj6uzm324bxegbwca", + "a4106c42ee68d07c3d954ab73fe305ca4204f44d90b58fd91a8f784d9b96e7e3" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "scu4cnnf5axmjgozqc7cccpqnj5nc5tj", + "54de4ca141b92222c8f1729e9e336c8a71dad9efa641e76438fcfb79bb58fc7f" + ] + ], + "python": "python@3.7.17", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ajbswc25irhmhbc4qibdcr6ohsvpcdku", + "8b9e7af163a4259256eca4b4a1a92b5d95463a5cf467be2a11c64ab536ca5b04" + ] + ], + "python": "python@3.8.18", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "vwkuxa5z4pj7vviwsmrpw2r6kbbqej2p", + "a3f10024ff859e15b79ccd06c970a5f0e6ba11b0eae423f096ec9a35863816d2" + ] + ], + "python": "python@3.9.18", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "attdjmyzpfnhoobadw55pgg4hwkyp7zk", + "f3258af3a648b47f12285dd3f048b685ed652b2b55b53861ac9913926de0f1c3" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "w4vnbsxjgkhsmgwozudzcsqlvccjsec4", + "19322c2c951fc80234963ac068c78442df57ac63055325b24a39ab705d27a5b9" + ] + ], + "python": "python@3.11", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "dw7ez2xcx6e5dxo3n4jin7pdbo3ihwtw", + "c368edda4b3c8fd767f5f0f098ea416864b088c767dc43135df49cf5f6ef4c93" + ] + ], + "python": "python@3.12", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "audrlxaw3ny3kyjkf6kqywumhokcxh3p", + "db2f44966ec104ffe57c0911f0b1e0d3d052753f4c46c30c0890dfb26d547b09" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "al7brxvvvhih5nlxvtfkavufqc3pe5t2", + "4e09b6d50d42c898e075fd20f7c7eddf91cb80edfd2d1326d26fd779e4d1ffed" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "v3ctpkyogl542wjibng6m2h2426spjbb", + "d9ceb4f9ca23ef1dcc33872e5410ccfef6ea0360247d3e8faedf1751fb1ae4ca" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "zxo5ih5ac6r7lj6miwyx36ot7s6a4dcw", + "f8f5e124d0e7bada34ff687a05e80b2fe207ce4d26205dab09b144edb148f05e" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "wki4qcy3wzpoxav3auxt2u7yb4sk3xcc", + "f5b9251eb51c60a71f7a0359c252f48c1a1121c426e1e6f9181808c626cb5fef" + ] + ], + "python": "python@3.10.13", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "gun6hbksmsecau5wjyrmxodq4hxievzx", + "28839ec43db444d6725bde3fcff99adadf61a392d967041fb16f0ffc0afa2f9d" + ] + ], + "python": "python@3.11.5", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "er73owosuqfmmkxvuw3f7sqnvvj6s4xp", + "99264d48c290256bf16e202c155bf3f8c88fdbbe9894d901344d0db7258abce3" + ] + ], + "python": "python@3.12.0", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "kv6l7qttuzk7zxkxi5fhff52qso3pj7m", + "59aa052e89d3c698fdd35e30ac21a896c8e49bbcc2f589a8f777bd5dafff2af7" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "uw5o2z753otspa3lmmy2bdodh5munkir", + "7a8b6359ce83463541ff68c221296fe9875adf28ea2b2c1416229750cf4935d2" + ] + ], + "python": "python@3.7.17", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "d63pp2l453bfygh6q7afwdj5mw7lhsns", + "425bef3a8605732b2fbe74cdd77ef6a359cbdb62800490bbd05620a57da35b0c" + ] + ], + "python": "python@3.8.18", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "nap44jiznzwlma6n75uxbpznppazs7av", + "316d940ca9af8c6b3bc50f8fdaadba02b0e955c4f24345a63a1a6715b01a752c" + ] + ], + "python": "python@3.9.18", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "qhvnw4yowmk2tofg3u7a4uomisktgzw5", + "d30ec81385377521dd2d1ac091546cc2dec6a852ad31f35c24c65919f94fbf64" + ] + ], + "python": "python@3.10.13", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "b3y37ryfuhjq6ljbkq7piglsafg5stgw", + "3c2f9cca3a6d37685fdf7d7dffb7a0505336c32562715069004631c446e46a7c" + ] + ], + "python": "python@3.11.5", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "dbloojtq5kcfd3pjmj4pislgpzrcvjpn", + "f8aeba80e6c106b769adba164702db94e077255fe1a22d6d265ccc3172b4ab1a" + ] + ], + "python": "python@3.12.0", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "gtlngzdb7iggcjmaottob54qi3b24blt", + "3efc534ba293ee51156971b8c19a597ebcb237b003c98e3c215a49a88064dfd1" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "4ab4wobwa7bvhlkrmhdp2dwgtcq5rpzo", + "3dc6539a989701ec1d83d644a79953af912c11fe6046a8d720970faf8e477991" + ] + ], + "python": "python@3.7.17", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "fgout3h4mt4i64xaovqrpcsdy3ly2aml", + "ade67f0623e941b16f2dd531270b4863de8befd56a9a47bd87af85345bc8bed6" + ] + ], + "python": "python@3.8.18", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "5fv2q4agg4b4g53f4zhnymrbv6ogiwpy", + "18047d48538a770f014cce73756258c1a320d4ac143abef3c5d8bc09dd7a03cc" + ] + ], + "python": "python@3.9.18", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "smkmkb5xqz4v2f7tl22g4e2ghamglox5", + "a850c80c7a48dab506f807cc936b9e54e6f5640fe96543ff58281c046140f112" + ] + ], + "python": "python@3.10.13", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "55qeu52pkt5shpwd7ulugv7wzt5j7vqd", + "e5e1a10b3b2d543b1555f5caef9ac1a9ccdcddb36a1278d3bf68bf0e9f490626" + ] + ], + "python": "python@3.11.5", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "zcw5ieomfwwpzpzpabetix2plfqzpvwd", + "ed409165109488d13afe8ef12edd3b373ed08967903dc802889523b5d3bccd14" + ] + ], + "python": "python@3.12.0", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "t4yf34cuvquqp5xd66zybmcfyhwbdlsf", + "b14e26e86bcfdac98b3a55109996265683f32910d3452e034ddc0d328bf62d67" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "grkrpj76lxsxa753uzndwfmrj3pwvyhp", + "11a535d4a8a9dbb18c2f995e10bc90b27b6ebc61f7ac2090f15db9b4f9be1a64" + ] + ], + "python": "python@3.7.17", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "zowwoarrf3hvo6i3iereolfujr42iyro", + "154d3a725f02c1775644d99a0b74f9e2cdf6736989a264ccfd5d9a8bce77a16b" + ] + ], + "python": "python@3.8.18", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "bhqgwuvef354fwuxq7heeighavunpber", + "399dec8cb6b8cd1b03737e68ea32e6ed69030b57e5f05d983e8856024143ea78" + ] + ], + "python": "python@3.9.18", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + } + ] +} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.5/gnupg.json b/share/spack/bootstrap/github-actions-v0.5/gnupg.json new file mode 100644 index 00000000000000..25d607b7a8c44e --- /dev/null +++ b/share/spack/bootstrap/github-actions-v0.5/gnupg.json @@ -0,0 +1,254 @@ +{ + "verified": [ + { + "binaries": [ + [ + "libgpg-error", + "stcmj3wdfxrohn2a53ecvsfsxe7rzrn4", + "942b0f0918798f0a5f007de0f104d71273e6988165c7a34a874e0846b1aa8977" + ], + [ + "libassuan", + "z27suzptvelnavipmldx6dcntiwqmguq", + "c703d6b534e89e383893913fb3b71b47322726c5e19f69178e4d1a3a42a76426" + ], + [ + "libgcrypt", + "if4uocx75kk6nc5vwvvuxq4dvaoljxkm", + "a2320f8cfc8201d15c0e9e244b824ce3d76542c148f4f0631648987957759f07" + ], + [ + "libiconv", + "nccvt7adwkq5anilrjspffdzl4hggon5", + "e23aa0184eb6661331bc850292fa22579005fd8ed62efd4c0c7a87489d8acaf6" + ], + [ + "libksba", + "lbfaarmpo2tupbezmqhfjvyspvwepv4r", + "96888ed37642a2425e2262a5904b82a38f9eecfb18a900493e32d4ab742f994b" + ], + [ + "npth", + "yc7h5c7cp7mupstvh5wlujp3xqet3xxq", + "3ac8e284878c5a556e38aab706e4303daf0a4d2bbb9fac2644495f8a362f9988" + ], + [ + "pinentry", + "rlo36pidutbjxxc3atooiwruaptfwmml", + "70114fe6c9e8723daa960f1a3dc36ed8b5a6c6f9cc828d43f79b8f59f7363605" + ], + [ + "zlib-ng", + "hewnrm76ju4qcjaezxole5htrulkij25", + "7babbe4d3d6e58631a944472356c07f0f4ad4a0759eaeefcf8584f33cce51ca6" + ], + [ + "gnupg", + "5cguax2vflgy2cwmt2ikvixtynommlmr", + "23fdd223493f441fa2e5f82d7e02837ecfad831fbfa4c27c175b3e294ed977d1" + ] + ], + "spec": "gnupg@2.3: %apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "libgpg-error", + "7yjoei55i6wxycmzbopyrw7nrquc22ac", + "c29cfe32521a4a1e2108c711233964c27ca74ffc7505eea86cb8c047ace5715b" + ], + [ + "libassuan", + "b4pkkugfhdtitffvlh4o3dexmthr6rmk", + "27ee6fc272f011f9ad4f000dc54961cccd67b34d6f24f316ca7faf26673bf98b" + ], + [ + "libgcrypt", + "uqjmpmpeta3w7c66m4e5jojopngpibvp", + "d73fbb6e9327faec75af450d602b663ed6bb65ac9657bd795034a53f6acd32c8" + ], + [ + "libiconv", + "rfsiwcq6tlw6to42a3uxw7wcmcyk5m6r", + "1f0176395130ed8b919538fa4b1cbda9f0ff8b836e51097258efc8cf5e11f753" + ], + [ + "libksba", + "gsobopcvr2p7d7rpgrbk2ulrnhvrpt6u", + "0e404a8353f91918f385db8cf661f53f91ffd805798fcd83fb1168a1f1758fe8" + ], + [ + "npth", + "gib2edyujm2oymkvu2hllm2yeghttvn3", + "e04e579e514cd965baf71b7f160b063bff8b116e991e6931c6919cd5f3270e59" + ], + [ + "pinentry", + "5ndbckveeaywx77rqmujglfnqwpxu3t6", + "0ec02dca08ad2e8b3dd1c71195ed3fe3bb8856b746726708f5e5d450619e1285" + ], + [ + "zlib-ng", + "fg366ys6nx3hthuiix4xooi6xx4qe5d2", + "cc372a21608885182233c7800355c7c0bbaff47ea16e190827a9618b0c4703e2" + ], + [ + "gnupg", + "2x5ftl46zcnxk6knz5y3nuhyn7zcttk3", + "b9481e122e2cb26f69b70505830d0fcc0d200aadbb6c6572339825f17ad1e52d" + ] + ], + "spec": "gnupg@2.3: %apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "libgpg-error", + "b7o5zrguyniw5362eey3peglzhlmig7l", + "b4373f2b0a2567b3b87e6bfc934135ce7790432aea58c802139bb5352f24b6a9" + ], + [ + "libassuan", + "6k2arop3mjwfhe4cwga6a775ud5m4scp", + "1e5143d35b0938a206ecf1ecb39b77e732629897d2b936cb8274239770055d90" + ], + [ + "libgcrypt", + "eh5h3zisjkupzr2pgqarvgs2fm7pun5r", + "b57eff265b48d0472243babfd1221c7c16189a4e324ea26e65d1a0a8c1391020" + ], + [ + "libiconv", + "vgk2zgjeflpnksj3lywuwdzs2nez63qv", + "d153953c40c630fd2bf271f3de901d7671f80e8161cf746cb54afbf28d934d03" + ], + [ + "libksba", + "au3xdl4oyfbxat6dknp3mldid7gupgt5", + "f1b1a1a02138109bc41b0b2ba54e689b43f35e2828f58b5de74280ce754fac0b" + ], + [ + "npth", + "ja7cauk7yhhyj7msnprlirue7cn3jpnj", + "cf6fd998a8f92ce1cf34c63db09c77b1891bf8f5915deef03c0cae5492bd691b" + ], + [ + "pinentry", + "6yo4flozla2tvw3ojkh2atvnfxuqx6ym", + "e78826a269109b3d67a54b1d01ff0a93be043dddcb4f52d329770ae1f75313f3" + ], + [ + "zlib-ng", + "4cgenrt3rcinueq6peyolxhegnryoeem", + "918a1e48f823806f1562c95569953a4658b2fbc54a2606a09bcd7e259b62f492" + ], + [ + "gnupg", + "lrmigjenpqj5fy4ojcs5jy6doktiu4qz", + "228ccb475932f7f40a64e9d87dec045931cc57f71b1dfd4b4c3926107222d96c" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "libgpg-error", + "km6l24czfhnmlya74nu6cxwufgimyhzz", + "23c3b7b487b36b9b03eeebbcc484adc6c8190c1bbcaa458943847148c915c6b2" + ], + [ + "libassuan", + "crkk525xdgsn2k5s4xqdaxkudz6pjqbm", + "ae3048a8059c0709d3efe832de1a8f82594373ba853d4bc2dfa05fb9dbfbc782" + ], + [ + "libgcrypt", + "4s5lkowqilor35fscjwvtmg4wasdknkc", + "62d3d13278d60d0329af1a9649b06591153ff68de4584f57777d13d693c7012e" + ], + [ + "libiconv", + "kbijqx45l3n64dlhenbuwgqpmf434g2d", + "dddf581a14a35b85cb69a8c785dd8e250f41e6de7697e34bb0ab2a942e0c2128" + ], + [ + "libksba", + "jnll3rfuh6xhgqxbwfnpizammcwloxjc", + "6200f2b6150aaf6d0e69771dfd5621582bd99ed0024fe83e7bc777cb66cabb29" + ], + [ + "npth", + "6j6b4hbkhwkb5gfigysqgn5lpu3i4kw5", + "0be0c70f3d9d45c4fe7490d8fdb8d7584de6324c3bfac8d884072409799c9951" + ], + [ + "pinentry", + "cdpcdd4iah6jot4odehm3xmulw3t3e32", + "5b447c770d0f705fbc97564fccdfbb0dfff8b6f8e2b4abbea326a538bc1bff80" + ], + [ + "zlib-ng", + "ogchs3i5tosoqrtsp3czp2azxvm7icig", + "acfa12c4e73560416e1169b37adabfbec5ee9a580a684b23e75d7591d8e39a03" + ], + [ + "gnupg", + "jwpu2wrofbwylpztltmi257benj2wp6z", + "98e2bcb4064ec0830d896938bc1fe5264dac611da71ea546b9ca03349b752041" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "libgpg-error", + "dwcgnnqt364enpf5554dio7kklspmrko", + "bfe9b506ccba0cca619133a3d2e05aa23c929749428bf6eecbff0c6985447009" + ], + [ + "libassuan", + "yl5rfsfuxd6if36h7rap7zbbpbfztkpw", + "4343dabbeed0851885992acd7b63fd74cb9d1acc06501a8af934e7e103801a15" + ], + [ + "libgcrypt", + "ka3t3dq73bkz4bs5ilyz6kymkypgbzxl", + "ec1bcc324e9f9d660395e2c586094431361a02196da43fce91be41cca5da9636" + ], + [ + "libiconv", + "5tog27ephuzc4j6kdxavhjsjm2kd5nu6", + "928fab3c32a1ae09651bb8491ee3855ccaf3c57a146ee72a289a073accd3fc8f" + ], + [ + "libksba", + "4ezfhjkmfc4fr34ozzl5q6b4x6jqqmsw", + "3045841c50c19a41beb0f32b4e8a960901397b95e82af3a73817babf35d4cfca" + ], + [ + "npth", + "bn4zrugdajgpk5dssoeccbl7o2gfgmcp", + "ef90ef85a818456afbff709b4a0757a077d69fd3c07d1b7612e1d461d837c46f" + ], + [ + "pinentry", + "cdwqocmusjomjjavnz6nn764oo54j5xj", + "b251047c1cb4be1bb884a7843d4419fae40fdbe5e1d36904e35f5e3fef5e4ced" + ], + [ + "zlib-ng", + "ozawh46coczjwtlul27msr3swe6pl6l5", + "0a397b53d64ac8191a36de8b32c5ced28a4c7a6dbafe9396dd897c55bcf7a168" + ], + [ + "gnupg", + "jra2dbsvpr5c5gj3ittejusa2mjh2sf5", + "054fac6eaad7c862ea4661461d847fb069876eb114209416b015748266f7d166" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=x86_64" + } + ] +} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.3/metadata.yaml b/share/spack/bootstrap/github-actions-v0.5/metadata.yaml similarity index 83% rename from share/spack/bootstrap/github-actions-v0.3/metadata.yaml rename to share/spack/bootstrap/github-actions-v0.5/metadata.yaml index d27e261721ca07..0fd413a618a5fd 100644 --- a/share/spack/bootstrap/github-actions-v0.3/metadata.yaml +++ b/share/spack/bootstrap/github-actions-v0.5/metadata.yaml @@ -3,6 +3,6 @@ description: | Buildcache generated from a public workflow using Github Actions. The sha256 checksum of binaries is checked before installation. info: - url: https://mirror.spack.io/bootstrap/github-actions/v0.3 + url: https://mirror.spack.io/bootstrap/github-actions/v0.5 homepage: https://github.com/spack/spack-bootstrap-mirrors releases: https://github.com/spack/spack-bootstrap-mirrors/releases diff --git a/share/spack/bootstrap/github-actions-v0.3/patchelf.json b/share/spack/bootstrap/github-actions-v0.5/patchelf.json similarity index 56% rename from share/spack/bootstrap/github-actions-v0.3/patchelf.json rename to share/spack/bootstrap/github-actions-v0.5/patchelf.json index 699c51c8abff53..f26fd9ce86f088 100644 --- a/share/spack/bootstrap/github-actions-v0.3/patchelf.json +++ b/share/spack/bootstrap/github-actions-v0.5/patchelf.json @@ -4,8 +4,8 @@ "binaries": [ [ "patchelf", - "cn4gsqzdnnffk7ynvbcai6wrt5ehqqrl", - "8c6a28cbe8133d719be27ded11159f0aa2c97ed1d0881119ae0ebd71f8ccc755" + "4txke6ixd2zg2yzg33l3fqnjyassono7", + "102800775f789cc293e244899f39a22f0b7a19373305ef0497ca3189223123f3" ] ], "spec": "patchelf@0.13: %gcc platform=linux target=aarch64" @@ -14,8 +14,8 @@ "binaries": [ [ "patchelf", - "mgq6n2heyvcx2ebdpchkbknwwn3u63s6", - "1d4ea9167fb8345a178c1352e0377cc37ef2b421935cf2b48fb6fa03a94fca3d" + "tnbgxc22uebqsiwrhchf3nieatuqlsrr", + "91cf0a9d4750c04575c5ed3bcdefc4754e1cf9d1cd1bf197eb1fe20ccaa869f1" ] ], "spec": "patchelf@0.13: %gcc platform=linux target=ppc64le" @@ -24,8 +24,8 @@ "binaries": [ [ "patchelf", - "htk62k7efo2z22kh6kmhaselru7bfkuc", - "833df21b20eaa7999ac4c5779ae26aa90397d9027aebaa686a428589befda693" + "afv7arjarb7nzmlh7c5slkfxykybuqce", + "73f4bde46b843c96521e3f5c31ab94756491404c1ad6429c9f61dbafbbfa6470" ] ], "spec": "patchelf@0.13: %gcc platform=linux target=x86_64" From 1c8cd1ae8760fb2fb0c86ecbeecb8fe82c32867c Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 10:04:26 -0700 Subject: [PATCH 119/408] restic: add v0.16.0 (#40439) --- var/spack/repos/builtin/packages/restic/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/restic/package.py b/var/spack/repos/builtin/packages/restic/package.py index c7eb94de479df9..493e0c098331f2 100644 --- a/var/spack/repos/builtin/packages/restic/package.py +++ b/var/spack/repos/builtin/packages/restic/package.py @@ -14,6 +14,7 @@ class Restic(Package): maintainers("alecbcs") + version("0.16.0", sha256="b91f5ef6203a5c50a72943c21aaef336e1344f19a3afd35406c00f065db8a8b9") version("0.15.2", sha256="52aca841486eaf4fe6422b059aa05bbf20db94b957de1d3fca019ed2af8192b7") version("0.15.1", sha256="fce382fdcdac0158a35daa640766d5e8a6e7b342ae2b0b84f2aacdff13990c52") version("0.15.0", sha256="85a6408cfb0798dab52335bcb00ac32066376c32daaa75461d43081499bc7de8") From 87bc827b0a4bdb42853c4b618561dda8949226a5 Mon Sep 17 00:00:00 2001 From: Laura Weber Date: Wed, 11 Oct 2023 10:05:37 -0700 Subject: [PATCH 120/408] tecplot: Add version 2023r1 (#40425) --- var/spack/repos/builtin/packages/tecplot/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/tecplot/package.py b/var/spack/repos/builtin/packages/tecplot/package.py index bc4a96392def20..a877c9da76d95f 100644 --- a/var/spack/repos/builtin/packages/tecplot/package.py +++ b/var/spack/repos/builtin/packages/tecplot/package.py @@ -19,6 +19,11 @@ class Tecplot(Package): maintainers("LRWeber") + version( + "2023r1", + sha256="58e7f4de875e65047f4edd684013d0ff538df6246f00c059458989f281be4c93", + expand=False, + ) version( "2022r2", sha256="e30cb7bf894e7cd568a2b24beb4bf667f1781ae27b59bb73410fafe12ddfdcdf", From f609ad15203f2dd609343f4a0e9a8f86696e7ca9 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Wed, 11 Oct 2023 19:19:58 +0200 Subject: [PATCH 121/408] c-blosc2: add 2.10.5 (#40428) --- var/spack/repos/builtin/packages/c-blosc2/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/c-blosc2/package.py b/var/spack/repos/builtin/packages/c-blosc2/package.py index f2f73245aa7267..8eceeca8952917 100644 --- a/var/spack/repos/builtin/packages/c-blosc2/package.py +++ b/var/spack/repos/builtin/packages/c-blosc2/package.py @@ -17,9 +17,10 @@ class CBlosc2(CMakePackage): maintainers("ax3l", "robert-mijakovic") version("develop", branch="master") - # 2.10.1+ adds Blosc2 CMake CONFIG files + version("2.10.5", sha256="a88f94bf839c1371aab8207a6a43698ceb92c72f65d0d7fe5b6e59f24c138b4d") # 2.10.2+ fixes regressions with external dependencies version("2.10.2", sha256="069785bc14c006c7dab40ea0c620bdf3eb8752663fd55c706d145bceabc2a31d") + # 2.10.1+ adds Blosc2 CMake CONFIG files version("2.10.1", sha256="1dd65be2d76eee205c06e8812cc1360448620eee5e368b25ade4ea310654cd01") version("2.10.0", sha256="cb7f7c0c62af78982140ecff21a2f3ca9ce6a0a1c02e314fcdce1a98da0fe231") version("2.9.3", sha256="1f36b7d79d973505582b9a804803b640dcc0425af3d5e676070847ac4eb38176") @@ -38,7 +39,7 @@ class CBlosc2(CMakePackage): variant("zlib", default=True, description="support for ZLIB") variant("zstd", default=True, description="support for ZSTD") - depends_on("cmake@2.8.10:", type="build") + depends_on("cmake@3.16.3:", type="build") depends_on("lizard", when="+lizard") depends_on("lz4", when="+lz4") depends_on("snappy", when="+snappy") From 36c0bbae3c16e392026ff586316afaa898578979 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 11 Oct 2023 19:20:49 +0200 Subject: [PATCH 122/408] cmake: add v3.27.7 (#40441) --- var/spack/repos/builtin/packages/cmake/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index 796eb4c397045f..44f6b596aa1dd5 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -27,6 +27,7 @@ class Cmake(Package): executables = ["^cmake[0-9]*$"] version("master", branch="master") + version("3.27.7", sha256="08f71a106036bf051f692760ef9558c0577c42ac39e96ba097e7662bd4158d8e") version("3.27.6", sha256="ef3056df528569e0e8956f6cf38806879347ac6de6a4ff7e4105dc4578732cfb") version("3.27.4", sha256="0a905ca8635ca81aa152e123bdde7e54cbe764fdd9a70d62af44cad8b92967af") version("3.27.3", sha256="66afdc0f181461b70b6fedcde9ecc4226c5cd184e7203617c83b7d8e47f49521") From 31a63865c7caf035fea1b02cb33833f8159377db Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:37:48 -0700 Subject: [PATCH 123/408] krb5: Fix spack install krb5 ^openssl~shared (#40306) --- var/spack/repos/builtin/packages/krb5/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/krb5/package.py b/var/spack/repos/builtin/packages/krb5/package.py index db9218cdff6871..84dc616066e01d 100644 --- a/var/spack/repos/builtin/packages/krb5/package.py +++ b/var/spack/repos/builtin/packages/krb5/package.py @@ -35,6 +35,7 @@ class Krb5(AutotoolsPackage): depends_on("openssl") depends_on("gettext") depends_on("findutils", type="build") + depends_on("pkgconfig", type="build", when="^openssl~shared") variant( "shared", default=True, description="install shared libraries if True, static if false" @@ -80,6 +81,11 @@ def configure_args(self): if "%gcc@10:" in self.spec: args.append("CFLAGS=-fcommon") + if self.spec["openssl"].satisfies("~shared"): + pkgconf = which("pkg-config") + ssllibs = pkgconf("--static", "--libs", "openssl", output=str) + args.append(f"LDFLAGS={ssllibs}") + return args def flag_handler(self, name, flags): From e04e0f05f38556bd5f5944ad9a34448bb53d07a3 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 10:43:31 -0700 Subject: [PATCH 124/408] adol-c: use f-strings (#40438) --- var/spack/repos/builtin/packages/adol-c/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/adol-c/package.py b/var/spack/repos/builtin/packages/adol-c/package.py index c493a53cbaae48..475edbd088e715 100644 --- a/var/spack/repos/builtin/packages/adol-c/package.py +++ b/var/spack/repos/builtin/packages/adol-c/package.py @@ -83,12 +83,12 @@ def configure_args(self): configure_args = [] if "+boost" in spec: - configure_args.append("--with-boost={0}".format(spec["boost"].prefix)) + configure_args.append(f"--with-boost={spec['boost'].prefix}") else: configure_args.append("--with-boost=no") if "+openmp" in spec: - configure_args.append("--with-openmp-flag={0}".format(self.compiler.openmp_flag)) + configure_args.append(f"--with-openmp-flag={self.compiler.openmp_flag}") configure_args.extend( self.enable_or_disable("advanced-branching", variant="advanced_branching") From 7d6cb256a728a223d5ce6b9cb06abf544ed6214a Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 10:44:57 -0700 Subject: [PATCH 125/408] 3proxy: use f-strings (#40429) --- var/spack/repos/builtin/packages/3proxy/package.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/3proxy/package.py b/var/spack/repos/builtin/packages/3proxy/package.py index 8cae9f900528cc..e9a408698b87ac 100644 --- a/var/spack/repos/builtin/packages/3proxy/package.py +++ b/var/spack/repos/builtin/packages/3proxy/package.py @@ -24,9 +24,7 @@ class _3proxy(MakefilePackage): depends_on("m4", type="build") def build(self, spec, prefix): - make("-f", "Makefile.{0}".format(platform.system())) + make("-f", f"Makefile.{platform.system()}") def install(self, spec, prefix): - make( - "-f", "Makefile.{0}".format(platform.system()), "prefix={0}".format(prefix), "install" - ) + make("-f", f"Makefile.{platform.system()}", f"prefix={prefix}", "install") From 1b68e89721b295ecaf7356f33bf19b281b4535a4 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 10:46:23 -0700 Subject: [PATCH 126/408] 7zip: use f-strings (#40430) --- var/spack/repos/builtin/packages/7zip/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/7zip/package.py b/var/spack/repos/builtin/packages/7zip/package.py index 2e9d09bbb9c63b..123e233ab188d2 100644 --- a/var/spack/repos/builtin/packages/7zip/package.py +++ b/var/spack/repos/builtin/packages/7zip/package.py @@ -75,8 +75,8 @@ def is_64bit(self): def build(self, spec, prefix): link_type = "1" if "static" in spec.variants["link_type"].value else "0" nmake_args = [ - "PLATFORM=%s" % self.plat_arch, - "MY_STATIC_LINK=%s" % link_type, + f"PLATFORM={self.plat_arch}", + f"MY_STATIC_LINK={link_type}", "NEW_COMPILER=1", ] # 7zips makefile is configured in such as way that if this value is set From 3f5d5d165296e7136830c5e2be3f6b5230888851 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 10:47:42 -0700 Subject: [PATCH 127/408] adios: use f-strings (#40436) --- var/spack/repos/builtin/packages/adios/package.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/adios/package.py b/var/spack/repos/builtin/packages/adios/package.py index 043bf586b2a183..86bc26e89e9046 100644 --- a/var/spack/repos/builtin/packages/adios/package.py +++ b/var/spack/repos/builtin/packages/adios/package.py @@ -119,7 +119,7 @@ def validate(self, spec): def with_or_without_hdf5(self, activated): if activated: - return "--with-phdf5={0}".format(self.spec["hdf5"].prefix) + return f"--with-phdf5={self.spec['hdf5'].prefix}" return "--without-phdf5" @@ -134,7 +134,7 @@ def configure_args(self): extra_args = [ # required, otherwise building its python bindings will fail - "CFLAGS={0}".format(self.compiler.cc_pic_flag) + f"CFLAGS={self.compiler.cc_pic_flag}" ] extra_args += self.enable_or_disable("shared") @@ -148,7 +148,7 @@ def configure_args(self): extra_args += self.with_or_without("infiniband") if "+zlib" in spec: - extra_args.append("--with-zlib={0}".format(spec["zlib-api"].prefix)) + extra_args.append(f"--with-zlib={spec['zlib-api'].prefix}") else: extra_args.append("--without-zlib") From 47cf25473a141f710f1d32c68b4b2ea6708afbed Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 10:48:32 -0700 Subject: [PATCH 128/408] abacus: use f-string (#40431) --- var/spack/repos/builtin/packages/abacus/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/abacus/package.py b/var/spack/repos/builtin/packages/abacus/package.py index 68ee2f4894cc26..ccf89f82b8db66 100644 --- a/var/spack/repos/builtin/packages/abacus/package.py +++ b/var/spack/repos/builtin/packages/abacus/package.py @@ -65,7 +65,7 @@ def edit(self, spec, prefix): spec["fftw"].prefix, spec["elpa"].prefix, inc_var, - "{0}".format(spec["elpa"].version), + f"{spec['elpa'].version}", spec["cereal"].prefix, ) ) From 28fd091ff39e3b237161ea94060096fe51ff8d45 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 10:48:39 -0700 Subject: [PATCH 129/408] adiak: use f-strings (#40435) --- var/spack/repos/builtin/packages/adiak/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/adiak/package.py b/var/spack/repos/builtin/packages/adiak/package.py index 5bc8804dea9bac..05f936e3f92c5a 100644 --- a/var/spack/repos/builtin/packages/adiak/package.py +++ b/var/spack/repos/builtin/packages/adiak/package.py @@ -36,8 +36,8 @@ class Adiak(CMakePackage): def cmake_args(self): args = [] if self.spec.satisfies("+mpi"): - args.append("-DMPI_CXX_COMPILER=%s" % self.spec["mpi"].mpicxx) - args.append("-DMPI_C_COMPILER=%s" % self.spec["mpi"].mpicc) + args.append(f"-DMPI_CXX_COMPILER={self.spec['mpi'].mpicxx}") + args.append(f"-DMPI_C_COMPILER={self.spec['mpi'].mpicc}") args.append("-DENABLE_MPI=ON") else: args.append("-DENABLE_MPI=OFF") From 1c9898953631147069f8a344db864657772ba401 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 10:51:26 -0700 Subject: [PATCH 130/408] abduco: use f-string (#40432) --- var/spack/repos/builtin/packages/abduco/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/abduco/package.py b/var/spack/repos/builtin/packages/abduco/package.py index 8287ecc7d94a35..9c3ea0c4487961 100644 --- a/var/spack/repos/builtin/packages/abduco/package.py +++ b/var/spack/repos/builtin/packages/abduco/package.py @@ -21,4 +21,4 @@ class Abduco(MakefilePackage): version("0.4", sha256="bda3729df116ce41f9a087188d71d934da2693ffb1ebcf33b803055eb478bcbb") def install(self, spec, prefix): - make("PREFIX={0}".format(prefix), "install") + make(f"PREFIX={prefix}", "install") From 73c06861b5ada0a4d90e9687f1e65197f0d9c300 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 11:03:30 -0700 Subject: [PATCH 131/408] acts: use f-strings (#40434) --- var/spack/repos/builtin/packages/acts/package.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py index b9f3168e4aee8c..9b06fd3d444360 100644 --- a/var/spack/repos/builtin/packages/acts/package.py +++ b/var/spack/repos/builtin/packages/acts/package.py @@ -336,15 +336,15 @@ def cmake_args(self): def cmake_variant(cmake_label, spack_variant): enabled = spec.satisfies("+" + spack_variant) - return "-DACTS_BUILD_{0}={1}".format(cmake_label, enabled) + return f"-DACTS_BUILD_{cmake_label}={enabled}" def enable_cmake_variant(cmake_label, spack_variant): enabled = spec.satisfies(spack_variant) - return "-DACTS_ENABLE_{0}={1}".format(cmake_label, enabled) + return f"-DACTS_ENABLE_{cmake_label}={enabled}" def example_cmake_variant(cmake_label, spack_variant, type="BUILD"): enabled = spec.satisfies("+examples +" + spack_variant) - return "-DACTS_{0}_EXAMPLES_{1}={2}".format(type, cmake_label, enabled) + return f"-DACTS_{type}_EXAMPLES_{cmake_label}={enabled}" def plugin_label(plugin_name): if spec.satisfies("@0.33:"): @@ -400,7 +400,7 @@ def plugin_cmake_variant(plugin_name, spack_variant): ] log_failure_threshold = spec.variants["log_failure_threshold"].value - args.append("-DACTS_LOG_FAILURE_THRESHOLD={0}".format(log_failure_threshold)) + args.append(f"-DACTS_LOG_FAILURE_THRESHOLD={log_failure_threshold}") if spec.satisfies("@19.4.0:"): args.append("-DACTS_ENABLE_LOG_FAILURE_THRESHOLD=ON") @@ -431,11 +431,11 @@ def plugin_cmake_variant(plugin_name, spack_variant): if "+cuda" in spec: cuda_arch = spec.variants["cuda_arch"].value if cuda_arch != "none": - args.append("-DCUDA_FLAGS=-arch=sm_{0}".format(cuda_arch[0])) + args.append(f"-DCUDA_FLAGS=-arch=sm_{cuda_arch[0]}") if "+python" in spec: python = spec["python"].command.path - args.append("-DPython_EXECUTABLE={0}".format(python)) + args.append(f"-DPython_EXECUTABLE={python}") args.append(self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd")) From 4b997a80f9ac52eac1f34c901b949926277c425d Mon Sep 17 00:00:00 2001 From: Miroslav Stoyanov <30537612+mkstoyanov@users.noreply.github.com> Date: Wed, 11 Oct 2023 14:33:31 -0400 Subject: [PATCH 132/408] update for the tasmanain versions (#40453) --- .../builtin/packages/tasmanian/addons70.patch | 25 ----- .../builtin/packages/tasmanian/package.py | 106 ++++-------------- .../packages/tasmanian/packageconf70.patch | 15 --- 3 files changed, 24 insertions(+), 122 deletions(-) delete mode 100644 var/spack/repos/builtin/packages/tasmanian/addons70.patch delete mode 100644 var/spack/repos/builtin/packages/tasmanian/packageconf70.patch diff --git a/var/spack/repos/builtin/packages/tasmanian/addons70.patch b/var/spack/repos/builtin/packages/tasmanian/addons70.patch deleted file mode 100644 index 8d983c6308b730..00000000000000 --- a/var/spack/repos/builtin/packages/tasmanian/addons70.patch +++ /dev/null @@ -1,25 +0,0 @@ -diff --git a/Addons/CMakeLists.txt b/Addons/CMakeLists.txt -index 1279ada..0b6d9be 100644 ---- a/Addons/CMakeLists.txt -+++ b/Addons/CMakeLists.txt -@@ -49,19 +49,7 @@ endif() - - # The Tasmanian MPI capabilities are templated into the Addons - if (Tasmanian_ENABLE_MPI) -- target_link_libraries(Tasmanian_addons INTERFACE ${MPI_CXX_LIBRARIES}) -- -- if (DEFINED MPI_CXX_INCLUDE_PATH) -- target_include_directories(Tasmanian_addons INTERFACE "${MPI_CXX_INCLUDE_PATH}") -- endif() -- -- if(DEFINED MPI_CXX_COMPILE_FLAGS) -- target_compile_options(Tasmanian_addons INTERFACE "${MPI_CXX_COMPILE_FLAGS}") -- endif() -- -- if(DEFINED MPI_CXX_LINK_FLAGS) -- set_target_properties(Tasmanian_addons PROPERTIES INTERFACE_LINK_OPTIONS "${MPI_CXX_LINK_FLAGS}") -- endif() -+ target_link_libraries(Tasmanian_addons INTERFACE MPI::MPI_CXX) - - add_executable(Tasmanian_mpitester testMPI.cpp testMPI.hpp testMPIDream.hpp) - set_target_properties(Tasmanian_mpitester PROPERTIES OUTPUT_NAME "mpitester") diff --git a/var/spack/repos/builtin/packages/tasmanian/package.py b/var/spack/repos/builtin/packages/tasmanian/package.py index 2a3db2ade34ba3..92aa21833973b0 100644 --- a/var/spack/repos/builtin/packages/tasmanian/package.py +++ b/var/spack/repos/builtin/packages/tasmanian/package.py @@ -12,7 +12,7 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): interpolation as well as parameter calibration.""" homepage = "https://ornl.github.io/TASMANIAN/stable/" - url = "https://github.com/ORNL/TASMANIAN/archive/v7.9.tar.gz" + url = "https://github.com/ORNL/TASMANIAN/archive/v8.0.tar.gz" git = "https://github.com/ORNL/TASMANIAN.git" tags = ["e4s"] @@ -22,38 +22,15 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="master") + version("8.0", sha256="248c941346150bf6cfb386ba86b69bd4697f4fc93bff0e8d5f57e555614fd534") version("7.9", sha256="decba62e6bbccf1bc26c6e773a8d4fd51d7f3e3e534ddd386ec41300694ce5cc") version("7.7", sha256="85fb3a7b302ea21a3b700712767a59a623d9ab93da03308fa47d4413654c3878") version("7.5", sha256="d621bd36dced4db86ef638693ba89b336762e7a3d7fedb3b5bcefb03390712b3") - version("7.3", sha256="5bd1dd89cc5c84506f6900b6569b17e50becd73eb31ec85cfa11d6f1f912c4fa") - # API is very stable since 7.0, but the refactoring made 7.0 and 7.1 rocky + # Tasmanian is backwards compatible, no need to use 7.3 from back in 2020 version( - "7.1", - sha256="9c24a591506a478745b802f1fa5c557da7bc80b12d8070855de6bc7aaca7547a", - deprecated=True, - ) - version( - "7.0", - sha256="4094ba4ee2f1831c575d00368c8471d3038f813398be2e500739cef5c7c4a47b", - deprecated=True, - ) # use for xsdk-0.5.0 - # 5.0, 5.1 and 6.0 use older API from 2018, all users have moved up by now - version( - "6.0", - sha256="ceab842e9fbce2f2de971ba6226967caaf1627b3e5d10799c3bd2e7c3285ba8b", - deprecated=True, - ) # use for xsdk-0.4.0 - version( - "5.1", - sha256="b0c1be505ce5f8041984c63edca9100d81df655733681858f5cc10e8c0c72711", - deprecated=True, - ) - - version( - "5.0", - sha256="2540bb63dea987ab205f7b375aff41f320b1de9bd7f1d1064ef96b22eeda1251", - url="https://tasmanian.ornl.gov/documents/Tasmanian_v5.0.zip", + "7.3", + sha256="5bd1dd89cc5c84506f6900b6569b17e50becd73eb31ec85cfa11d6f1f912c4fa", deprecated=True, ) @@ -73,7 +50,7 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): variant("python", default=False, description="add Python binding for Tasmanian") - variant("fortran", default=False, description="add Fortran 90/95 interface to Tasmanian") + variant("fortran", default=False, description="add Fortran 2003 interface to Tasmanian") variant( "build_type", @@ -82,12 +59,10 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): values=("Debug", "Release"), ) - depends_on("cmake@2.8:", type="build") - depends_on("cmake@3.5:", type="build", when="@6.0:") depends_on("cmake@3.10:", type=("build", "run"), when="@7.0:") - depends_on("cmake@3.22:", type=("build", "run"), when="@develop") + depends_on("cmake@3.22:", type=("build", "run"), when="@8.0:") - depends_on("python@2.7:", when="+python", type=("build", "run")) + depends_on("python@3.0:", when="+python", type=("build", "run")) depends_on("py-numpy", when="+python", type=("build", "run")) extends("python", when="+python", type=("build", "run")) @@ -97,15 +72,14 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): depends_on("blas", when="+blas", type=("build", "run")) # openblas 0.2.18 or newer depends_on("lapack", when="+blas @7.1:", type=("build", "run")) # lapack used since 7.1 - depends_on("cuda@8.0.61:", when="+cuda", type=("build", "run")) - depends_on("cuda@8.0.61:", when="+magma", type=("build", "run")) + depends_on("cuda@10.0:", when="+cuda", type=("build", "run")) + depends_on("cuda@10.0:", when="+magma", type=("build", "run")) - depends_on("hip@3.8:", when="+rocm", type=("build", "run")) - depends_on("rocblas@3.8:", when="+rocm", type=("build", "run")) - depends_on("rocsparse@3.8:", when="+rocm", type=("build", "run")) - depends_on("rocsolver@3.8:", when="+rocm", type=("build", "run")) + depends_on("hip@5.0:", when="+rocm", type=("build", "run")) + depends_on("rocblas@5.0:", when="+rocm", type=("build", "run")) + depends_on("rocsparse@5.0:", when="+rocm", type=("build", "run")) + depends_on("rocsolver@5.0:", when="+rocm", type=("build", "run")) - depends_on("magma@2.4.0:", when="+magma @6.0:", type=("build", "run")) depends_on("magma@2.5.0:", when="+magma @7.0:", type=("build", "run")) # https://github.com/spack/spack/issues/39536#issuecomment-1685161942 @@ -114,16 +88,6 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): conflicts("+magma", when="~cuda~rocm") # currently MAGMA only works with CUDA conflicts("+cuda", when="+rocm") # can pick CUDA or ROCm, not both - # old versions - conflicts("+rocm", when="@:7.3") # ROCm was added in 7.3, tested in 7.5 - conflicts("+magma", when="@:5.1") # magma does not work prior to 6.0 - conflicts("+mpi", when="@:5.1") # MPI is broken prior to 6.0 - conflicts("+xsdkflags", when="@:5.1") # 6.0 is the first version included in xSDK - - # patching some bugs - patch("addons70.patch", when="@7.0") - patch("packageconf70.patch", when="@7.0") - def setup_build_environment(self, env): # needed for the hipcc compiler if "+rocm" in self.spec: @@ -132,29 +96,16 @@ def setup_build_environment(self, env): def cmake_args(self): spec = self.spec - # 7.1 is the last version to use xSDK legacy build options - if "+xsdkflags" in spec and spec.satisfies("@:7.1"): - args = [ - "-DUSE_XSDK_DEFAULTS:BOOL=ON", - self.define_from_variant("XSDK_ENABLE_PYTHON", "python"), - self.define_from_variant("TPL_ENABLE_MPI", "mpi"), - self.define_from_variant("XSDK_ENABLE_OPENMP", "openmp"), - self.define_from_variant("TPL_ENABLE_BLAS", "blas"), - self.define_from_variant("XSDK_ENABLE_CUDA", "cuda"), - self.define_from_variant("TPL_ENABLE_MAGMA", "magma"), - self.define_from_variant("XSDK_ENABLE_FORTRAN", "fortran"), - ] - else: - args = [ - self.define_from_variant("Tasmanian_ENABLE_OPENMP", "openmp"), - self.define_from_variant("Tasmanian_ENABLE_BLAS", "blas"), - self.define_from_variant("Tasmanian_ENABLE_PYTHON", "python"), - self.define_from_variant("Tasmanian_ENABLE_MPI", "mpi"), - self.define_from_variant("Tasmanian_ENABLE_CUDA", "cuda"), - self.define_from_variant("Tasmanian_ENABLE_HIP", "rocm"), - self.define_from_variant("Tasmanian_ENABLE_MAGMA", "magma"), - self.define_from_variant("Tasmanian_ENABLE_FORTRAN", "fortran"), - ] + args = [ + self.define_from_variant("Tasmanian_ENABLE_OPENMP", "openmp"), + self.define_from_variant("Tasmanian_ENABLE_BLAS", "blas"), + self.define_from_variant("Tasmanian_ENABLE_PYTHON", "python"), + self.define_from_variant("Tasmanian_ENABLE_MPI", "mpi"), + self.define_from_variant("Tasmanian_ENABLE_CUDA", "cuda"), + self.define_from_variant("Tasmanian_ENABLE_HIP", "rocm"), + self.define_from_variant("Tasmanian_ENABLE_MAGMA", "magma"), + self.define_from_variant("Tasmanian_ENABLE_FORTRAN", "fortran"), + ] if spec.satisfies("+blas"): args.append("-DBLAS_LIBRARIES={0}".format(spec["blas"].libs.joined(";"))) @@ -165,15 +116,6 @@ def cmake_args(self): "-DPYTHON_EXECUTABLE:FILEPATH={0}".format(self.spec["python"].command.path) ) - # See https://github.com/ROCmSoftwarePlatform/rocFFT/issues/322 - if self.spec.satisfies("+rocm") and self.spec.satisfies("^cmake@3.21:"): - args.append(self.define("__skip_rocmclang", "ON")) - - # _CUBLAS and _CUDA were separate options prior to 6.0 - # skipping _CUBLAS leads to peformance regression - if spec.satisfies("@:5.1"): - args.append(self.define_from_variant("Tasmanian_ENABLE_CUBLAS", "cuda")) - return args @run_after("install") diff --git a/var/spack/repos/builtin/packages/tasmanian/packageconf70.patch b/var/spack/repos/builtin/packages/tasmanian/packageconf70.patch deleted file mode 100644 index c53255687f08b6..00000000000000 --- a/var/spack/repos/builtin/packages/tasmanian/packageconf70.patch +++ /dev/null @@ -1,15 +0,0 @@ -diff --git a/Config/TasmanianConfig.in.cmake b/Config/TasmanianConfig.in.cmake -index 8912e4c..df54aaf 100644 ---- a/Config/TasmanianConfig.in.cmake -+++ b/Config/TasmanianConfig.in.cmake -@@ -7,6 +7,10 @@ cmake_minimum_required(VERSION 3.10) - # but this doesn't seem to work, not sure if this is a "relocatable package" (low concern) - include("@CMAKE_INSTALL_PREFIX@/lib/@CMAKE_PROJECT_NAME@/@CMAKE_PROJECT_NAME@.cmake") - -+if (@Tasmanian_ENABLE_MPI@) -+ find_package(MPI REQUIRED) -+endif() -+ - add_executable(Tasmanian::tasgrid IMPORTED) - set_property(TARGET Tasmanian::tasgrid PROPERTY IMPORTED_LOCATION "@CMAKE_INSTALL_PREFIX@/bin/tasgrid${CMAKE_EXECUTABLE_SUFFIX_CXX}") - From 17ecf92e597893b8fb64a1bbe199e4ee0e0db1d8 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 11 Oct 2023 21:04:09 +0200 Subject: [PATCH 133/408] curl: add v8.4.0, allow r@8.3: to use it (#40442) * curl: 8.4.0 * fix r curl upperbound range --- .../repos/builtin/packages/curl/package.py | 20 ++++++++++++++++--- var/spack/repos/builtin/packages/r/package.py | 6 +++--- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py index f67a2a55ace16e..46894046df60a4 100644 --- a/var/spack/repos/builtin/packages/curl/package.py +++ b/var/spack/repos/builtin/packages/curl/package.py @@ -26,11 +26,25 @@ class Curl(NMakePackage, AutotoolsPackage): maintainers("alecbcs") - version("8.1.2", sha256="b54974d32fd610acace92e3df1f643144015ac65847f0a041fdc17db6f43f243") - version("8.0.1", sha256="9b6b1e96b748d04b968786b6bdf407aa5c75ab53a3d37c1c8c81cdb736555ccf") - version("7.88.1", sha256="8224b45cce12abde039c12dc0711b7ea85b104b9ad534d6e4c5b4e188a61c907") + version("8.4.0", sha256="e5250581a9c032b1b6ed3cf2f9c114c811fc41881069e9892d115cc73f9e88c6") # Deprecated versions due to CVEs + # CVE-2023-38545 + version( + "8.1.2", + sha256="b54974d32fd610acace92e3df1f643144015ac65847f0a041fdc17db6f43f243", + deprecated=True, + ) + version( + "8.0.1", + sha256="9b6b1e96b748d04b968786b6bdf407aa5c75ab53a3d37c1c8c81cdb736555ccf", + deprecated=True, + ) + version( + "7.88.1", + sha256="8224b45cce12abde039c12dc0711b7ea85b104b9ad534d6e4c5b4e188a61c907", + deprecated=True, + ) # https://nvd.nist.gov/vuln/detail/CVE-2022-43551 version( "7.87.0", diff --git a/var/spack/repos/builtin/packages/r/package.py b/var/spack/repos/builtin/packages/r/package.py index f72c192071bd45..dfe397ca2c0d7c 100644 --- a/var/spack/repos/builtin/packages/r/package.py +++ b/var/spack/repos/builtin/packages/r/package.py @@ -70,9 +70,9 @@ class R(AutotoolsPackage): depends_on("blas", when="+external-lapack") depends_on("lapack", when="+external-lapack") depends_on("bzip2") - # R didn't anticipate the celebratory - # non-breaking major version bump of curl 8. - depends_on("curl+libidn2@:7") + depends_on("curl+libidn2") + # R didn't anticipate the celebratory non-breaking major version bump of curl 8. + depends_on("curl@:7", when="@:4.2") depends_on("icu4c") depends_on("java") depends_on("ncurses") From 727e9f0583639b5b00e8cb8bc95a9913087fb0a7 Mon Sep 17 00:00:00 2001 From: Victor Brunini Date: Wed, 11 Oct 2023 14:30:44 -0700 Subject: [PATCH 134/408] cmake: drop CMAKE_STATIC_LINKER_FLAGS (#40423) Because those end up being passed to ar which does not understand linker arguments. This was making ldflags largely unusuable for statically linked cmake packages. --- lib/spack/spack/build_systems/cmake.py | 6 +++--- lib/spack/spack/test/flag_handlers.py | 1 - 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py index e51dbf922013ca..1859e40fe51d20 100644 --- a/lib/spack/spack/build_systems/cmake.py +++ b/lib/spack/spack/build_systems/cmake.py @@ -142,10 +142,10 @@ def flags_to_build_system_args(self, flags): # We specify for each of them. if flags["ldflags"]: ldflags = " ".join(flags["ldflags"]) - ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}" # cmake has separate linker arguments for types of builds. - for type in ["EXE", "MODULE", "SHARED", "STATIC"]: - self.cmake_flag_args.append(ld_string.format(type, ldflags)) + self.cmake_flag_args.append(f"-DCMAKE_EXE_LINKER_FLAGS={ldflags}") + self.cmake_flag_args.append(f"-DCMAKE_MODULE_LINKER_FLAGS={ldflags}") + self.cmake_flag_args.append(f"-DCMAKE_SHARED_LINKER_FLAGS={ldflags}") # CMake has libs options separated by language. Apply ours to each. if flags["ldlibs"]: diff --git a/lib/spack/spack/test/flag_handlers.py b/lib/spack/spack/test/flag_handlers.py index ae83b05885fa39..3e680c1ab7d037 100644 --- a/lib/spack/spack/test/flag_handlers.py +++ b/lib/spack/spack/test/flag_handlers.py @@ -121,7 +121,6 @@ def test_ld_flags_cmake(self, temp_env): "-DCMAKE_EXE_LINKER_FLAGS=-mthreads", "-DCMAKE_MODULE_LINKER_FLAGS=-mthreads", "-DCMAKE_SHARED_LINKER_FLAGS=-mthreads", - "-DCMAKE_STATIC_LINKER_FLAGS=-mthreads", } def test_ld_libs_cmake(self, temp_env): From 87b8bf3c34423db8486d2603dd0c1f37de68459b Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Wed, 11 Oct 2023 14:38:28 -0700 Subject: [PATCH 135/408] apr-util: Fix spack install apr-util +crypto ^openssl~shared (#40301) --- var/spack/repos/builtin/packages/apr-util/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/var/spack/repos/builtin/packages/apr-util/package.py b/var/spack/repos/builtin/packages/apr-util/package.py index 5ac152ba479d6d..df1379a0fa0ac1 100644 --- a/var/spack/repos/builtin/packages/apr-util/package.py +++ b/var/spack/repos/builtin/packages/apr-util/package.py @@ -32,6 +32,7 @@ class AprUtil(AutotoolsPackage): depends_on("postgresql", when="+pgsql") depends_on("sqlite", when="+sqlite") depends_on("unixodbc", when="+odbc") + depends_on("pkg-config", type="build", when="+crypto ^openssl~shared") @property def libs(self): @@ -85,6 +86,13 @@ def configure_args(self): else: args.append("--without-odbc") + if spec.satisfies("+crypto ^openssl~shared"): + # Need pkg-config to get zlib and -ldl flags + # (see https://dev.apr.apache.narkive.com/pNnO9F1S/configure-bug-openssl) + pkgconf = which("pkg-config") + ssl_libs = pkgconf("--libs", "--static", "openssl", output=str) + args.append(f"LIBS={ssl_libs}") + return args def check(self): From dcbb40d15c42edda2b69595ae49ce35b9a93cd48 Mon Sep 17 00:00:00 2001 From: afzpatel <122491982+afzpatel@users.noreply.github.com> Date: Wed, 11 Oct 2023 17:43:21 -0400 Subject: [PATCH 136/408] fix ck build for 5.6.1 (#40304) * initial commit to fix ck build for 5.6.1 * disable mlir for miopen-hip * use satisfies for checking specs and add nlohmann-json dependency for 5.4 onwards --- .../repos/builtin/packages/composable-kernel/package.py | 7 ++++++- var/spack/repos/builtin/packages/miopen-hip/package.py | 2 ++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/composable-kernel/package.py b/var/spack/repos/builtin/packages/composable-kernel/package.py index efa05197f9338c..85b383896a8af9 100644 --- a/var/spack/repos/builtin/packages/composable-kernel/package.py +++ b/var/spack/repos/builtin/packages/composable-kernel/package.py @@ -64,9 +64,14 @@ def cmake_args(self): ] if "auto" not in self.spec.variants["amdgpu_target"]: args.append(self.define_from_variant("AMDGPU_TARGETS", "amdgpu_target")) + if self.spec.satisfies("@5.6.1:"): + args.append(self.define("INSTANCES_ONLY", "ON")) return args def build(self, spec, prefix): with working_dir(self.build_directory): # only instances is necessary to build and install - make("instances") + if self.spec.satisfies("@5.6.1:"): + make() + else: + make("instances") diff --git a/var/spack/repos/builtin/packages/miopen-hip/package.py b/var/spack/repos/builtin/packages/miopen-hip/package.py index 4843ae1173440a..6ab2967ac24b23 100644 --- a/var/spack/repos/builtin/packages/miopen-hip/package.py +++ b/var/spack/repos/builtin/packages/miopen-hip/package.py @@ -160,6 +160,7 @@ class MiopenHip(CMakePackage): depends_on("nlohmann-json", type="link") depends_on("composable-kernel@" + ver, when="@" + ver) for ver in ["5.4.0", "5.4.3", "5.5.0"]: + depends_on("nlohmann-json", type="link") depends_on("rocmlir@" + ver, when="@" + ver) def setup_build_environment(self, env): @@ -216,6 +217,7 @@ def cmake_args(self): if self.spec.satisfies("@5.5.1:"): args.append(self.define("MIOPEN_USE_COMPOSABLEKERNEL", "ON")) args.append(self.define("MIOPEN_ENABLE_AI_KERNEL_TUNING", "OFF")) + args.append(self.define("MIOPEN_USE_MLIR", "OFF")) args.append( "-DNLOHMANN_JSON_INCLUDE={0}".format(self.spec["nlohmann-json"].prefix.include) ) From eb66587da97faab77a1eba2d88da8171c201e20d Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Wed, 11 Oct 2023 18:13:57 -0600 Subject: [PATCH 137/408] buildcache: Tell servers not to cache index or hash (#40339) --- lib/spack/spack/binary_distribution.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index a9b1d6280b7f27..5559e898203c08 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -913,7 +913,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di index_json_path, url_util.join(cache_prefix, "index.json"), keep_original=False, - extra_args={"ContentType": "application/json"}, + extra_args={"ContentType": "application/json", "CacheControl": "no-cache"}, ) # Push the hash @@ -921,7 +921,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di index_hash_path, url_util.join(cache_prefix, "index.json.hash"), keep_original=False, - extra_args={"ContentType": "text/plain"}, + extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"}, ) From 4dc8cb3e5369cd489ef49e3cb97c84958ce3c124 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 11 Oct 2023 20:08:00 -0700 Subject: [PATCH 138/408] adios2: use f-strings (#40437) --- var/spack/repos/builtin/packages/adios2/package.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py index 47affe6f8a2cac..bfb08227e6ff9d 100644 --- a/var/spack/repos/builtin/packages/adios2/package.py +++ b/var/spack/repos/builtin/packages/adios2/package.py @@ -109,19 +109,19 @@ class Adios2(CMakePackage, CudaPackage): depends_on("cmake@3.12.0:", type="build") for _platform in ["linux", "darwin", "cray"]: - depends_on("pkgconfig", type="build", when="platform=%s" % _platform) + depends_on("pkgconfig", type="build", when=f"platform={_platform}") variant( "pic", default=False, description="Build pic-enabled static libraries", - when="platform=%s" % _platform, + when=f"platform={_platform}", ) # libffi and libfabric and not currently supported on Windows # see Paraview's superbuild handling of libfabric at # https://gitlab.kitware.com/paraview/paraview-superbuild/-/blob/master/projects/adios2.cmake#L3 - depends_on("libffi", when="+sst platform=%s" % _platform) # optional in DILL + depends_on("libffi", when=f"+sst platform={_platform}") # optional in DILL depends_on( - "libfabric@1.6.0:", when="+sst platform=%s" % _platform + "libfabric@1.6.0:", when=f"+sst platform={_platform}" ) # optional in EVPath and SST # depends_on('bison', when='+sst') # optional in FFS, broken package # depends_on('flex', when='+sst') # optional in FFS, depends on BISON @@ -241,8 +241,8 @@ def cmake_args(self): args.extend(["-DCMAKE_Fortran_SUBMODULE_EXT=.smod", "-DCMAKE_Fortran_SUBMODULE_SEP=."]) if "+python" in spec or self.run_tests: - args.append("-DPYTHON_EXECUTABLE:FILEPATH=%s" % spec["python"].command.path) - args.append("-DPython_EXECUTABLE:FILEPATH=%s" % spec["python"].command.path) + args.append(f"-DPYTHON_EXECUTABLE:FILEPATH={spec['python'].command.path}") + args.append(f"-DPython_EXECUTABLE:FILEPATH={spec['python'].command.path}") return args From 76de2bbd442b200ebe5f6b3fedba2a544a27e70b Mon Sep 17 00:00:00 2001 From: Leonhard Reichenbach Date: Thu, 12 Oct 2023 07:29:00 +0200 Subject: [PATCH 139/408] opendatadetector: add version v3.0.0 (#39693) --- var/spack/repos/builtin/packages/opendatadetector/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/opendatadetector/package.py b/var/spack/repos/builtin/packages/opendatadetector/package.py index db8c56dbbd6f7b..d54d3013a905f2 100644 --- a/var/spack/repos/builtin/packages/opendatadetector/package.py +++ b/var/spack/repos/builtin/packages/opendatadetector/package.py @@ -18,6 +18,7 @@ class Opendatadetector(CMakePackage): tags = ["hep"] version("main", branch="main") + version("v3.0.0", tag="v3.0.0", commit="e3b1eceae96fd5dddf10223753964c570ee868c9") version("v2", tag="v2", commit="7041ae086dff4ee4a8d5b65f5d9559acc6dbec47") version("v1", tag="v1", commit="81c43c6511723c13c15327479082d3dcfa1947c7") From b22c5cec9d37984b2c4ba032e8785a151b3e8b43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Thu, 12 Oct 2023 07:31:46 +0200 Subject: [PATCH 140/408] [add] py-cylc-rose: new recipe (#39980) * [add] py-cylc-rose: new recipe * py-cylc-rose: update recipe --------- Co-authored-by: LydDeb --- .../builtin/packages/py-cylc-rose/package.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-cylc-rose/package.py diff --git a/var/spack/repos/builtin/packages/py-cylc-rose/package.py b/var/spack/repos/builtin/packages/py-cylc-rose/package.py new file mode 100644 index 00000000000000..37805c66d0fe8a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cylc-rose/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCylcRose(PythonPackage): + """A Cylc plugin providing support for the Rose rose-suite.conf file.""" + + homepage = "https://cylc.github.io/cylc-doc/latest/html/plugins/cylc-rose.html" + pypi = "cylc-rose/cylc-rose-1.3.0.tar.gz" + + maintainers("LydDeb") + + version("1.3.0", sha256="017072b69d7a50fa6d309a911d2428743b07c095f308529b36b1b787ebe7ab88") + + depends_on("py-setuptools", type="build") + depends_on("py-metomi-rose@2.1", type=("build", "run")) + depends_on("py-cylc-flow@8.2", type=("build", "run")) + depends_on("py-metomi-isodatetime", type=("build", "run")) + depends_on("py-jinja2", type=("build", "run")) From b31f0592aa125e260c259c23b32e6715ea912bba Mon Sep 17 00:00:00 2001 From: Tim Haines Date: Thu, 12 Oct 2023 00:34:22 -0500 Subject: [PATCH 141/408] must: add versions 1.8.0 and 1.9.0 (#40141) --- var/spack/repos/builtin/packages/must/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/must/package.py b/var/spack/repos/builtin/packages/must/package.py index 6cca6c9cf278b0..1b3b0b152dc52b 100644 --- a/var/spack/repos/builtin/packages/must/package.py +++ b/var/spack/repos/builtin/packages/must/package.py @@ -19,6 +19,8 @@ class Must(CMakePackage): maintainers("jgalarowicz", "dmont") + version("1.9.0", sha256="24998f4ca6bce718d69347de90798600f2385c21266c2d1dd39a87dd8bd1fba4") + version("1.8.0", sha256="9754fefd2e4c8cba812f8b56a5dd929bc84aa599b2509305e1eb8518be0a8a39") version("1.8.0-rc1", sha256="49fd2487fbd1aa41f4252c7e37efebd3f6ff48218c88e82f34b88d59348fe406") version( "1.8-preview", sha256="67b4b061db7a893e22a6610e2085072716d11738bc6cc3cb3ffd60d6833e8bad" From ce76d31f21f2f415495a4728a7d422014daeb482 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Thu, 12 Oct 2023 02:30:22 -0700 Subject: [PATCH 142/408] acfl: use f-strings (#40433) --- var/spack/repos/builtin/packages/acfl/package.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/acfl/package.py b/var/spack/repos/builtin/packages/acfl/package.py index 4d546e5780200d..2e7790a422a9f4 100644 --- a/var/spack/repos/builtin/packages/acfl/package.py +++ b/var/spack/repos/builtin/packages/acfl/package.py @@ -185,8 +185,7 @@ def get_acfl_prefix(spec): ) else: return join_path( - spec.prefix, - "arm-linux-compiler-{0}_{1}".format(spec.version, get_os(spec.version.string)), + spec.prefix, f"arm-linux-compiler-{spec.version}_{get_os(spec.version.string)}" ) @@ -238,7 +237,7 @@ class Acfl(Package): # Run the installer with the desired install directory def install(self, spec, prefix): exe = Executable( - "./arm-compiler-for-linux_{0}_{1}.sh".format(spec.version, get_os(spec.version.string)) + f"./arm-compiler-for-linux_{spec.version}_{get_os(spec.version.string)}.sh" ) exe("--accept", "--force", "--install-to", prefix) From 91c91cc1543a8be5ddecd767490c2b1dab67bd13 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 12 Oct 2023 12:11:22 +0200 Subject: [PATCH 143/408] clingo: fix build with Python 3.12 (#40154) --- var/spack/repos/builtin/packages/clingo/package.py | 7 +++++++ .../repos/builtin/packages/clingo/setuptools.patch | 14 ++++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 var/spack/repos/builtin/packages/clingo/setuptools.patch diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index 73797762b5cc81..ab5fe9a0430da1 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -42,6 +42,7 @@ class Clingo(CMakePackage): # See https://github.com/potassco/clingo/blob/v5.5.2/INSTALL.md depends_on("cmake@3.1:", type="build") depends_on("cmake@3.18:", type="build", when="@5.5:") + depends_on("py-setuptools", when="@5.6.2:", type="build") depends_on("doxygen", type="build", when="+docs") @@ -68,6 +69,12 @@ class Clingo(CMakePackage): patch("size-t.patch", when="%msvc") patch("vs2022.patch", when="%msvc@19.30:") + # TODO: Simplify this after Spack 0.21 release. The old concretizer has problems with + # py-setuptools ^python@3.6, so we only apply the distutils -> setuptools patch for Python 3.12 + with when("@:5.6.1 ^python@3.12:"): + patch("setuptools.patch") + depends_on("py-setuptools", type="build") + def patch(self): # Doxygen is optional but can't be disabled with a -D, so patch # it out if it's really supposed to be disabled diff --git a/var/spack/repos/builtin/packages/clingo/setuptools.patch b/var/spack/repos/builtin/packages/clingo/setuptools.patch new file mode 100644 index 00000000000000..4a38a7e6d9ad9e --- /dev/null +++ b/var/spack/repos/builtin/packages/clingo/setuptools.patch @@ -0,0 +1,14 @@ +diff --git a/cmake/python-site.py b/cmake/python-site.py +index 1e7fc8ce..95ef827f 100644 +--- a/cmake/python-site.py ++++ b/cmake/python-site.py +@@ -1,4 +1,7 @@ +-from distutils.sysconfig import get_python_lib, get_config_vars ++try: ++ from setuptools.sysconfig import get_python_lib, get_config_vars ++except ImportError: ++ from distutils.sysconfig import get_python_lib, get_config_vars + import sys + if sys.argv[1] == "prefix": + print(get_python_lib(True, False, sys.argv[2] if len(sys.argv) > 2 else None)) + From 9bacd3206edf77275be2ed43285c266dd9820938 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 12 Oct 2023 12:12:15 +0200 Subject: [PATCH 144/408] Remove deprecated "extra_instructions" option for containers (#40365) --- lib/spack/docs/containers.rst | 8 +------- lib/spack/spack/container/writers/__init__.py | 7 ------- lib/spack/spack/schema/container.py | 15 --------------- lib/spack/spack/test/container/docker.py | 17 ----------------- share/spack/templates/container/Dockerfile | 7 ------- share/spack/templates/container/singularity.def | 6 ------ 6 files changed, 1 insertion(+), 59 deletions(-) diff --git a/lib/spack/docs/containers.rst b/lib/spack/docs/containers.rst index ec9c02635ceb37..17609d740e9345 100644 --- a/lib/spack/docs/containers.rst +++ b/lib/spack/docs/containers.rst @@ -212,18 +212,12 @@ under the ``container`` attribute of environments: final: - libgomp - # Extra instructions - extra_instructions: - final: | - RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ "' >> ~/.bashrc - # Labels for the image labels: app: "gromacs" mpi: "mpich" -A detailed description of the options available can be found in the -:ref:`container_config_options` section. +A detailed description of the options available can be found in the :ref:`container_config_options` section. ------------------- Setting Base Images diff --git a/lib/spack/spack/container/writers/__init__.py b/lib/spack/spack/container/writers/__init__.py index 4e15ae6f58d815..dfed52e47e48a1 100644 --- a/lib/spack/spack/container/writers/__init__.py +++ b/lib/spack/spack/container/writers/__init__.py @@ -272,13 +272,6 @@ def _os_pkg_manager(self): raise spack.error.SpackError(msg) return os_pkg_manager - @tengine.context_property - def extra_instructions(self): - Extras = namedtuple("Extra", ["build", "final"]) - extras = self.container_config.get("extra_instructions", {}) - build, final = extras.get("build", None), extras.get("final", None) - return Extras(build=build, final=final) - @tengine.context_property def labels(self): return self.container_config.get("labels", {}) diff --git a/lib/spack/spack/schema/container.py b/lib/spack/spack/schema/container.py index 030b23829092bf..df386c3de4aeb8 100644 --- a/lib/spack/spack/schema/container.py +++ b/lib/spack/spack/schema/container.py @@ -68,12 +68,6 @@ "labels": {"type": "object"}, # Use a custom template to render the recipe "template": {"type": "string", "default": None}, - # Add a custom extra section at the bottom of a stage - "extra_instructions": { - "type": "object", - "additionalProperties": False, - "properties": {"build": {"type": "string"}, "final": {"type": "string"}}, - }, # Reserved for properties that are specific to each format "singularity": { "type": "object", @@ -89,15 +83,6 @@ "docker": {"type": "object", "additionalProperties": False, "default": {}}, "depfile": {"type": "boolean", "default": False}, }, - "deprecatedProperties": { - "properties": ["extra_instructions"], - "message": ( - "container:extra_instructions has been deprecated and will be removed " - "in Spack v0.21. Set container:template appropriately to use custom Jinja2 " - "templates instead." - ), - "error": False, - }, } properties = {"container": container_schema} diff --git a/lib/spack/spack/test/container/docker.py b/lib/spack/spack/test/container/docker.py index d6b6f4488bd687..5e0b8c3d4ea1d2 100644 --- a/lib/spack/spack/test/container/docker.py +++ b/lib/spack/spack/test/container/docker.py @@ -82,23 +82,6 @@ def test_strip_is_set_from_config(minimal_configuration): assert writer.strip is False -def test_extra_instructions_is_set_from_config(minimal_configuration): - writer = writers.create(minimal_configuration) - assert writer.extra_instructions == (None, None) - - test_line = "RUN echo Hello world!" - e = minimal_configuration["spack"]["container"] - e["extra_instructions"] = {} - e["extra_instructions"]["build"] = test_line - writer = writers.create(minimal_configuration) - assert writer.extra_instructions == (test_line, None) - - e["extra_instructions"]["final"] = test_line - del e["extra_instructions"]["build"] - writer = writers.create(minimal_configuration) - assert writer.extra_instructions == (None, test_line) - - def test_custom_base_images(minimal_configuration): """Test setting custom base images from configuration file""" minimal_configuration["spack"]["container"]["images"] = { diff --git a/share/spack/templates/container/Dockerfile b/share/spack/templates/container/Dockerfile index 27c2dbf5cfd4f5..2fad37affb37ed 100644 --- a/share/spack/templates/container/Dockerfile +++ b/share/spack/templates/container/Dockerfile @@ -39,9 +39,6 @@ RUN find -L {{ paths.view }}/* -type f -exec readlink -f '{}' \; | \ RUN cd {{ paths.environment }} && \ spack env activate --sh -d . > activate.sh -{% if extra_instructions.build %} -{{ extra_instructions.build }} -{% endif %} {% endblock build_stage %} {% endif %} @@ -70,10 +67,6 @@ RUN {% if os_package_update %}{{ os_packages_final.update }} \ && {% endif %}{{ os_packages_final.install }} {{ os_packages_final.list | join | replace('\n', ' ') }} \ && {{ os_packages_final.clean }} {% endif %} -{% if extra_instructions.final %} - -{{ extra_instructions.final }} -{% endif %} {% endblock final_stage %} {% for label, value in labels.items() %} LABEL "{{ label }}"="{{ value }}" diff --git a/share/spack/templates/container/singularity.def b/share/spack/templates/container/singularity.def index 4184db92b622e2..3b8f57dfb1b86e 100644 --- a/share/spack/templates/container/singularity.def +++ b/share/spack/templates/container/singularity.def @@ -39,9 +39,6 @@ EOF grep 'x-executable\|x-archive\|x-sharedlib' | \ awk -F: '{print $1}' | xargs strip {% endif %} -{% if extra_instructions.build %} -{{ extra_instructions.build }} -{% endif %} {% endblock build_stage %} {% if apps %} {% for application, help_text in apps.items() %} @@ -80,9 +77,6 @@ Stage: final {% endif %} # Modify the environment without relying on sourcing shell specific files at startup cat {{ paths.environment }}/environment_modifications.sh >> $SINGULARITY_ENVIRONMENT -{% if extra_instructions.final %} -{{ extra_instructions.final }} -{% endif %} {% endblock final_stage %} {% if runscript %} From 07a66c656c06c6505e2d6e6e7e75df32b1edf36e Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 12 Oct 2023 12:40:38 +0200 Subject: [PATCH 145/408] gettext: Add 0.22.3 and fix keyerror: "shared" (#39423) After the merge of #37957 (Add static and pic variants), if a gettext install from a build before that merge is present, building any package using gettext fails with keyerror: "shared" because the use of self.spec.variants["shared"] does not check for the presence of the new variant in the old installation but expects that the new key variants["shared"] exists always. Fix it with a fallback to the default of True and update gettext to v22.3 Co-authored-by: Bernharad Kaindl <43588962+bernhardkaindl@users.noreply.github.com> --- var/spack/repos/builtin/packages/gettext/package.py | 5 ++++- var/spack/repos/builtin/packages/procps/package.py | 8 ++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py index d7cccfb3391106..ee502c07853974 100644 --- a/var/spack/repos/builtin/packages/gettext/package.py +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -19,6 +19,7 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): executables = [r"^gettext$"] + version("0.22.3", sha256="b838228b3f8823a6c1eddf07297197c4db13f7e1b173b9ef93f3f945a63080b6") version("0.21.1", sha256="50dbc8f39797950aa2c98e939947c527e5ac9ebd2c1b99dd7b06ba33a6767ae6") version("0.21", sha256="d20fcbb537e02dcf1383197ba05bd0734ef7bf5db06bdb241eb69b7d16b73192") version("0.20.2", sha256="b22b818e644c37f6e3d1643a1943c32c3a9bff726d601e53047d2682019ceaba") @@ -127,10 +128,12 @@ def configure_args(self): @property def libs(self): + # Do not fail if the installed gettext did not yet have the shared variant: + shared_variant = self.spec.variants.get("shared") libs = find_libraries( ["libasprintf", "libgettextlib", "libgettextpo", "libgettextsrc", "libintl"], root=self.prefix, recursive=True, - shared=self.spec.variants["shared"].value, + shared=True if not shared_variant else shared_variant.value, ) return libs diff --git a/var/spack/repos/builtin/packages/procps/package.py b/var/spack/repos/builtin/packages/procps/package.py index 238116aeadd004..791625102bc809 100644 --- a/var/spack/repos/builtin/packages/procps/package.py +++ b/var/spack/repos/builtin/packages/procps/package.py @@ -17,6 +17,7 @@ class Procps(AutotoolsPackage): url = "https://gitlab.com/procps-ng/procps/-/archive/v4.0.3/procps-v4.0.3.tar.gz" version("master", branch="master") + version("4.0.4", sha256="3214fab0f817d169f2c117842ba635bafb1cd6090273e311a8b5c6fc393ddb9d") version("4.0.3", sha256="14cc21219c45d196772274ea3f194f6d668b6cc667fbde9ee6d8039121b73fa6") version("4.0.2", sha256="b03e4b55eaa5661e726acb714e689356d80bc056b09965c2284d039ba8dc21e8") version("4.0.1", sha256="1eaff353306aba12816d14881f2b88c7c9d06023825f7224700f0c01f66c65cd") @@ -35,8 +36,11 @@ class Procps(AutotoolsPackage): depends_on("pkgconfig@0.9.0:", type="build") depends_on("dejagnu", type="test") depends_on("iconv") - depends_on("gettext", type="build") - depends_on("gettext", when="+nls") + depends_on("gettext", type="build") # required by autogen.sh + with when("+nls"): + depends_on("gettext") + # msgfmt 0.22 gives parsing errors + depends_on("gettext@:0.21", when="@:4.0.3") depends_on("ncurses") conflicts("platform=darwin", msg="procps is linux-only") From bc2dcd48271f4a279105c34180fb815fe24b3f4b Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 12 Oct 2023 08:27:03 -0500 Subject: [PATCH 146/408] PyTorch: fix build with Xcode 15 (#40460) --- var/spack/repos/builtin/packages/py-torch/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 30666314eea1f8..b876bf06362b98 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -634,6 +634,10 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False): # https://github.com/pytorch/pytorch/issues/60332 # env.set("USE_SYSTEM_XNNPACK", "ON") + # https://github.com/pytorch/pytorch/issues/111086 + if self.spec.satisfies("%apple-clang@15:"): + env.append_flags("LDFLAGS", "-Wl,-ld_classic") + @run_before("install") def build_amd(self): if "+rocm" in self.spec: From dec13a6507d8e0bf084691bfac1de3ebdcb08e21 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Thu, 12 Oct 2023 06:40:04 -0700 Subject: [PATCH 147/408] go: add v1.21.3 and deprecate previous versions due to CVE-2023-39533 (#40454) --- var/spack/repos/builtin/packages/go/package.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/go/package.py b/var/spack/repos/builtin/packages/go/package.py index a3c0de73911b92..439b7ca90c5197 100644 --- a/var/spack/repos/builtin/packages/go/package.py +++ b/var/spack/repos/builtin/packages/go/package.py @@ -39,10 +39,15 @@ class Go(Package): maintainers("alecbcs") - version("1.20.6", sha256="62ee5bc6fb55b8bae8f705e0cb8df86d6453626b4ecf93279e2867092e0b7f70") - version("1.19.11", sha256="e25c9ab72d811142b7f41ff6da5165fec2d1be5feec3ef2c66bc0bdecb431489") + version("1.21.3", sha256="186f2b6f8c8b704e696821b09ab2041a5c1ee13dcbc3156a13adcf75931ee488") # Deprecated Versions + # https://nvd.nist.gov/vuln/detail/CVE-2023-39533 + version( + "1.20.6", + sha256="62ee5bc6fb55b8bae8f705e0cb8df86d6453626b4ecf93279e2867092e0b7f70", + deprecated=True, + ) # https://nvd.nist.gov/vuln/detail/CVE-2023-29405 version( "1.20.4", @@ -54,6 +59,11 @@ class Go(Package): sha256="e447b498cde50215c4f7619e5124b0fc4e25fb5d16ea47271c47f278e7aa763a", deprecated=True, ) + version( + "1.19.11", + sha256="e25c9ab72d811142b7f41ff6da5165fec2d1be5feec3ef2c66bc0bdecb431489", + deprecated=True, + ) version( "1.19.9", sha256="131190a4697a70c5b1d232df5d3f55a3f9ec0e78e40516196ffb3f09ae6a5744", @@ -64,7 +74,6 @@ class Go(Package): sha256="1d7a67929dccafeaf8a29e55985bc2b789e0499cb1a17100039f084e3238da2f", deprecated=True, ) - # https://nvd.nist.gov/vuln/detail/CVE-2023-24538 version( "1.20.2", @@ -106,7 +115,7 @@ def build(self, spec, prefix): bash = which("bash") with working_dir("src"): - bash("{0}.bash".format("all" if self.run_tests else "make")) + bash(f"{'all' if self.run_tests else 'make'}.bash") def install(self, spec, prefix): install_tree(".", prefix) From ad089d03f213806406ed8f701a1712e153b71bdb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 12 Oct 2023 14:38:33 +0000 Subject: [PATCH 148/408] build(deps): bump python-levenshtein in /lib/spack/docs (#40461) Bumps [python-levenshtein](https://github.com/maxbachmann/python-Levenshtein) from 0.22.0 to 0.23.0. - [Release notes](https://github.com/maxbachmann/python-Levenshtein/releases) - [Changelog](https://github.com/maxbachmann/python-Levenshtein/blob/main/HISTORY.md) - [Commits](https://github.com/maxbachmann/python-Levenshtein/compare/v0.22.0...v0.23.0) --- updated-dependencies: - dependency-name: python-levenshtein dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 6d95f1d40d1ff2..8b6c32750401e2 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -2,7 +2,7 @@ sphinx==7.2.6 sphinxcontrib-programoutput==0.17 sphinx_design==0.5.0 sphinx-rtd-theme==1.3.0 -python-levenshtein==0.22.0 +python-levenshtein==0.23.0 docutils==0.18.1 pygments==2.16.1 urllib3==2.0.6 From ab15436cfb7b0b849ec1c942d5aeee3bb7f6987f Mon Sep 17 00:00:00 2001 From: Dennis Klein Date: Thu, 12 Oct 2023 17:15:00 +0200 Subject: [PATCH 149/408] libzmq: Revert "libzmq: make location of libsodium explicit (#34553)" (#40477) and make variants independent of upstream defaults --- var/spack/repos/builtin/packages/libzmq/package.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/libzmq/package.py b/var/spack/repos/builtin/packages/libzmq/package.py index 086b0a6e4afcdc..f8adbfc37b15db 100644 --- a/var/spack/repos/builtin/packages/libzmq/package.py +++ b/var/spack/repos/builtin/packages/libzmq/package.py @@ -105,19 +105,16 @@ def autoreconf(self, spec, prefix): def configure_args(self): config_args = [] + config_args.extend(self.with_or_without("docs")) config_args.extend(self.enable_or_disable("drafts")) config_args.extend(self.enable_or_disable("libbsd")) + config_args.extend(self.with_or_without("libsodium")) config_args.extend(self.enable_or_disable("libunwind")) # the package won't compile with newer compilers because warnings # are converted to errors. Hence, disable such conversion. # this option was only added in version 4.2.3. if self.spec.version >= Version("4.2.3"): config_args.append("--disable-Werror") - - if "+libsodium" in self.spec: - config_args.append("--with-libsodium=" + self.spec["libsodium"].prefix) - if "~docs" in self.spec: - config_args.append("--without-docs") if "clang" in self.compiler.cc: config_args.append("CFLAGS=-Wno-gnu") config_args.append("CXXFLAGS=-Wno-gnu") From e2f1d87606f288881a11c608c59de0c17c3c7748 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 12 Oct 2023 18:28:16 +0200 Subject: [PATCH 150/408] modules:prefix_inspections: allow empty dict (#40485) Currently ``` modules: prefix_inspections:: {} ``` gives you the builtin defaults instead of no mapping. --- lib/spack/spack/user_environment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/user_environment.py b/lib/spack/spack/user_environment.py index 7ad1d408c79d36..0be11c046cdf07 100644 --- a/lib/spack/spack/user_environment.py +++ b/lib/spack/spack/user_environment.py @@ -26,8 +26,8 @@ def prefix_inspections(platform): A dictionary mapping subdirectory names to lists of environment variables to modify with that directory if it exists. """ - inspections = spack.config.get("modules:prefix_inspections", {}) - if inspections: + inspections = spack.config.get("modules:prefix_inspections") + if isinstance(inspections, dict): return inspections inspections = { From d3e827e9e7fc0a698dfd761b9406e3cdbd81d854 Mon Sep 17 00:00:00 2001 From: Julius Plehn Date: Thu, 12 Oct 2023 19:27:06 +0200 Subject: [PATCH 151/408] Updates Variorum to 0.7.0 (#40488) --- var/spack/repos/builtin/packages/variorum/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/variorum/package.py b/var/spack/repos/builtin/packages/variorum/package.py index 513e68a7868afd..9d4a385d0ed118 100644 --- a/var/spack/repos/builtin/packages/variorum/package.py +++ b/var/spack/repos/builtin/packages/variorum/package.py @@ -17,6 +17,7 @@ class Variorum(CMakePackage): maintainers("slabasan", "rountree") + version("0.7.0", sha256="36ec0219379ea2b7c8f9770b3271335c776ff5a3de71585714c33356345b2f0c") version("0.6.0", sha256="c0928a0e6901808ee50142d1034de15edc2c90d7d1b9fbce43757226e7c04306") version("0.5.0", sha256="de331762e7945ee882d08454ff9c66436e2b6f87f761d2b31c6ab3028723bfed") version("0.4.1", sha256="be7407b856bc2239ecaa27d3df80aee2f541bb721fbfa183612bd9c0ce061f28") From 08ac36bdff4cb64a0ef9128178dde74931c4350e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Thu, 12 Oct 2023 23:13:15 +0200 Subject: [PATCH 152/408] ospray: new versions 2.11.0 and 2.12.0 (#40394) * openimagedenoise: checksum 2.0.1 * ospray: new versions 2.11.0 and 2.12.0 - both depend on embree@4 - also update dependency versions for rkcommon, openvkl, openimagedenois and ispc - expose that dependency on openvkl is optional since @2.11 with variant "volumes" * ospray: limit embree to @3 for ospray @:2.10 --- .../packages/openimagedenoise/package.py | 1 + .../repos/builtin/packages/ospray/package.py | 30 +++++++++++++++---- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/openimagedenoise/package.py b/var/spack/repos/builtin/packages/openimagedenoise/package.py index 90eaa559ca00fc..9ccce30a86c266 100644 --- a/var/spack/repos/builtin/packages/openimagedenoise/package.py +++ b/var/spack/repos/builtin/packages/openimagedenoise/package.py @@ -17,6 +17,7 @@ class Openimagedenoise(CMakePackage): # maintainers("github_user1", "github_user2") + version("2.0.1", sha256="328eeb9809d18e835dca7203224af3748578794784c026940c02eea09c695b90") version("1.4.3", sha256="3276e252297ebad67a999298d8f0c30cfb221e166b166ae5c955d88b94ad062a") version("1.4.2", sha256="e70d27ce24b41364782376c1b3b4f074f77310ccfe5f8ffec4a13a347e48a0ea") version("1.4.1", sha256="9088966685a78adf24b8de075d66e4c0019bd7b2b9d29c6e45aaf35d294e3f6f") diff --git a/var/spack/repos/builtin/packages/ospray/package.py b/var/spack/repos/builtin/packages/ospray/package.py index fcac3239a4d99c..85a79894bbf246 100644 --- a/var/spack/repos/builtin/packages/ospray/package.py +++ b/var/spack/repos/builtin/packages/ospray/package.py @@ -16,6 +16,8 @@ class Ospray(CMakePackage): # maintainers("aumuell") + version("2.12.0", sha256="268b16952b2dd44da2a1e40d2065c960bc2442dd09b63ace8b65d3408f596301") + version("2.11.0", sha256="55974e650d9b78989ee55adb81cffd8c6e39ce5d3cf0a3b3198c522bf36f6e81") version("2.10.0", sha256="bd478284f48d2cb775fc41a2855a9d9f5ea16c861abda0f8dc94e02ea7189cb8") version("2.9.0", sha256="0145e09c3618fb8152a32d5f5cff819eb065d90975ee4e35400d2db9eb9f6398") version("2.8.0", sha256="2dabc75446a0e2e970952d325f930853a51a9b4d1868c8135f05552a4ae04d39") @@ -27,21 +29,35 @@ class Ospray(CMakePackage): variant("denoiser", default=True, description="Enable denoiser image operation") variant("glm", default=False, description="Build ospray_cpp GLM tests/tutorial") variant("mpi", default=True, description="Enable MPI support") + variant("volumes", default=True, description="Enable volumetric rendering with Open VKL") + + conflicts("~volumes", when="@:2.10") depends_on("rkcommon@1.5:") depends_on("rkcommon@1.7:1.9", when="@2.7.0:2.8") depends_on("rkcommon@1.9", when="@2.9.0") depends_on("rkcommon@1.10:", when="@2.10.0:") + depends_on("rkcommon@1.11:", when="@2.11:") depends_on("embree@3.12: +ispc") depends_on("embree@3.13.1:", when="@2.7.0:") - depends_on("openvkl@0.13.0:") - depends_on("openvkl@1.0.1:", when="@2.7.0:") - depends_on("openvkl@1.2.0:", when="@2.9.0:") - depends_on("openvkl@1.3.0:", when="@2.10.0:") - depends_on("openimagedenoise@1.2.3:", when="+denoiser") + depends_on("embree@:3", when="@:2.10") + depends_on("embree@4:", when="@2.11:") + with when("+volumes"): + depends_on("openvkl@0.13.0:") + depends_on("openvkl@1.0.1:", when="@2.7.0:") + depends_on("openvkl@1.2.0:", when="@2.9.0:") + depends_on("openvkl@1.3.0:", when="@2.10.0:") + depends_on("openvkl@1.3.2:", when="@2.11:") + with when("+denoiser"): + depends_on("openimagedenoise@1.2.3:") + depends_on("openimagedenoise@1.3:", when="@2.5:") + depends_on("openimagedenoise@:1", when="@:2.11") + depends_on("openimagedenoise@2:", when="@2.12:") depends_on("ispc@1.14.1:", type=("build")) depends_on("ispc@1.16.0:", when="@2.7.0:", type=("build")) depends_on("ispc@1.18.0:", when="@2.10.0:", type=("build")) + depends_on("ispc@1.19.0:", when="@2.11.0:", type=("build")) + depends_on("ispc@1.20.0:", when="@2.12.0:", type=("build")) depends_on("tbb") depends_on("mpi", when="+mpi") @@ -58,6 +74,10 @@ def cmake_args(self): self.define_from_variant("OSPRAY_APPS_ENABLE_GLM", "glm"), ] + # support for volumetric data + if self.spec.satisfies("@2.11:"): + args.append(self.define_from_variant("OSPRAY_ENABLE_VOLUMES", "volumes")) + # Apps enable_apps_arg = "" if self.spec.satisfies("@2.9:") else "ENABLE_" args.extend( From 53f77f66a656d71685e15f22c169563bfa7d374d Mon Sep 17 00:00:00 2001 From: Nils Vu Date: Thu, 12 Oct 2023 19:13:01 -0700 Subject: [PATCH 153/408] catch2: add +pic and +shared options (#40337) Also add latest version --- var/spack/repos/builtin/packages/catch2/package.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/var/spack/repos/builtin/packages/catch2/package.py b/var/spack/repos/builtin/packages/catch2/package.py index bb731a43f1a223..82eb629bb82661 100644 --- a/var/spack/repos/builtin/packages/catch2/package.py +++ b/var/spack/repos/builtin/packages/catch2/package.py @@ -19,6 +19,7 @@ class Catch2(CMakePackage): version("develop", branch="devel") # Releases + version("3.4.0", sha256="122928b814b75717316c71af69bd2b43387643ba076a6ec16e7882bfb2dfacbb") version("3.3.2", sha256="8361907f4d9bff3ae7c1edb027f813659f793053c99b67837a0c0375f065bae2") version("3.3.1", sha256="d90351cdc55421f640c553cfc0875a8c834428679444e8062e9187d05b18aace") version("3.3.0", sha256="fe2f29a54ca775c2dd04bb97ffb79d398e6210e3caa174348b5cd3b7e4ca887d") @@ -104,6 +105,11 @@ class Catch2(CMakePackage): version("1.3.5", sha256="f15730d81b4173fb860ce3561768de7d41bbefb67dc031d7d1f5ae2c07f0a472") version("1.3.0", sha256="245f6ee73e2fea66311afa1da59e5087ddab8b37ce64994ad88506e8af28c6ac") + variant( + "pic", when="@3: ~shared", default=True, description="Build with position-independent code" + ) + variant("shared", when="@3:", default=False, description="Build shared library") + def cmake_args(self): spec = self.spec args = [] @@ -112,6 +118,10 @@ def cmake_args(self): args.append("-DNO_SELFTEST={0}".format("OFF" if self.run_tests else "ON")) elif spec.satisfies("@2.1.1:"): args.append(self.define("BUILD_TESTING", self.run_tests)) + if spec.satisfies("@3:"): + args.append(self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic")) + args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared")) + return args @when("@:1.6") From 1a34d2b271f3625f9ac9d2717c06a661612c3540 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 12 Oct 2023 21:32:48 -0500 Subject: [PATCH 154/408] py-cmocean: add v3.0.3 (#40482) --- var/spack/repos/builtin/packages/py-cmocean/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-cmocean/package.py b/var/spack/repos/builtin/packages/py-cmocean/package.py index 032827edba473b..89e9dc22dded35 100644 --- a/var/spack/repos/builtin/packages/py-cmocean/package.py +++ b/var/spack/repos/builtin/packages/py-cmocean/package.py @@ -13,8 +13,11 @@ class PyCmocean(PythonPackage): homepage = "https://matplotlib.org/cmocean/" pypi = "cmocean/cmocean-2.0.tar.gz" + version("3.0.3", sha256="abaf99383c1a60f52970c86052ae6c14eafa84fc16984488040283c02db77c0b") version("2.0", sha256="13eea3c8994d8e303e32a2db0b3e686f6edfb41cb21e7b0e663c2b17eea9b03a") + depends_on("python@3.8:", when="@3:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-matplotlib", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) + depends_on("py-packaging", when="@3:", type=("build", "run")) From 0c34f03d19266e4f155acede22452745abf30c38 Mon Sep 17 00:00:00 2001 From: Matthew Chan Date: Thu, 12 Oct 2023 23:59:44 -0700 Subject: [PATCH 155/408] containerize: update docs to activate env before using container templates (#40493) --- lib/spack/docs/containers.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/spack/docs/containers.rst b/lib/spack/docs/containers.rst index 17609d740e9345..64ca1df926bbec 100644 --- a/lib/spack/docs/containers.rst +++ b/lib/spack/docs/containers.rst @@ -519,6 +519,13 @@ the example below: COPY data /share/myapp/data {% endblock %} +The Dockerfile is generated by running: + +.. code-block:: console + + $ spack -e /opt/environment containerize + +Note that the environment must be active for spack to read the template. The recipe that gets generated contains the two extra instruction that we added in our template extension: .. code-block:: Dockerfile From 19f25ccf55f161b9d9060e5d2c32046e0a4aee55 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 13 Oct 2023 11:18:55 +0200 Subject: [PATCH 156/408] Better error message when wrong platform is used (#40492) fixes #40299 --- lib/spack/spack/solver/concretize.lp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index fcd546cbeef0d9..622547800f44e5 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -927,7 +927,8 @@ pkg_fact(Package, variant_single_value("dev_path")) %----------------------------------------------------------------------------- % if no platform is set, fall back to the default -:- attr("node_platform", _, Platform), not allowed_platform(Platform). +error(100, "platform '{0}' is not allowed on the current host", Platform) + :- attr("node_platform", _, Platform), not allowed_platform(Platform). attr("node_platform", PackageNode, Platform) :- attr("node", PackageNode), From b0fe088d95c8a935eaa3308cae54c183fcf41e72 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 13 Oct 2023 14:59:44 +0200 Subject: [PATCH 157/408] Expand multiple build systems section (#39589) Co-authored-by: Massimiliano Culpo --- lib/spack/docs/packaging_guide.rst | 153 +++++++++++++++++++---------- 1 file changed, 102 insertions(+), 51 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index d25009532ad7a1..acc79ea3424090 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -3635,7 +3635,8 @@ regardless of the build system. The arguments for the phase are: The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always correspond to ``self.spec`` and ``self.spec.prefix`` respectively. -If the ``package.py`` encodes builders explicitly, the signature for a phase changes slightly: +If the ``package.py`` has build instructions in a separate +:ref:`builder class `, the signature for a phase changes slightly: .. code-block:: python @@ -3645,56 +3646,6 @@ If the ``package.py`` encodes builders explicitly, the signature for a phase cha In this case the package is passed as the second argument, and ``self`` is the builder instance. -.. _multiple_build_systems: - -^^^^^^^^^^^^^^^^^^^^^^ -Multiple build systems -^^^^^^^^^^^^^^^^^^^^^^ - -There are cases where a software actively supports two build systems, or changes build systems -as it evolves, or needs different build systems on different platforms. Spack allows dealing with -these cases natively, if a recipe is written using builders explicitly. - -For instance, software that supports two build systems unconditionally should derive from -both ``*Package`` base classes, and declare the possible use of multiple build systems using -a directive: - -.. code-block:: python - - class ArpackNg(CMakePackage, AutotoolsPackage): - - build_system("cmake", "autotools", default="cmake") - -In this case the software can be built with both ``autotools`` and ``cmake``. Since the package -supports multiple build systems, it is necessary to declare which one is the default. The ``package.py`` -will likely contain some overriding of default builder methods: - -.. code-block:: python - - class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): - def cmake_args(self): - pass - - class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): - def configure_args(self): - pass - -In more complex cases it might happen that the build system changes according to certain conditions, -for instance across versions. That can be expressed with conditional variant values: - -.. code-block:: python - - class ArpackNg(CMakePackage, AutotoolsPackage): - - build_system( - conditional("cmake", when="@0.64:"), - conditional("autotools", when="@:0.63"), - default="cmake", - ) - -In the example the directive impose a change from ``Autotools`` to ``CMake`` going -from ``v0.63`` to ``v0.64``. - ^^^^^^^^^^^^^^^^^^ Mixin base classes ^^^^^^^^^^^^^^^^^^ @@ -3741,6 +3692,106 @@ for instance: In the example above ``Cp2k`` inherits all the conflicts and variants that ``CudaPackage`` defines. +.. _multiple_build_systems: + +---------------------- +Multiple build systems +---------------------- + +There are cases where a package actively supports two build systems, or changes build systems +as it evolves, or needs different build systems on different platforms. Spack allows dealing with +these cases by splitting the build instructions into separate builder classes. + +For instance, software that supports two build systems unconditionally should derive from +both ``*Package`` base classes, and declare the possible use of multiple build systems using +a directive: + +.. code-block:: python + + class Example(CMakePackage, AutotoolsPackage): + + variant("my_feature", default=True) + + build_system("cmake", "autotools", default="cmake") + +In this case the software can be built with both ``autotools`` and ``cmake``. Since the package +supports multiple build systems, it is necessary to declare which one is the default. + +Additional build instructions are split into separate builder classes: + +.. code-block:: python + + class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): + def cmake_args(self): + return [ + self.define_from_variant("MY_FEATURE", "my_feature") + ] + + class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): + def configure_args(self): + return self.with_or_without("my-feature", variant="my_feature") + +In this example, ``spack install example +feature build_sytem=cmake`` will +pick the ``CMakeBuilder`` and invoke ``cmake -DMY_FEATURE:BOOL=ON``. + +Similarly, ``spack install example +feature build_system=autotools`` will pick +the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``. + +Dependencies are always specified in the package class. When some dependencies +depend on the choice of the build system, it is possible to use when conditions as +usual: + +.. code-block:: python + + class Example(CMakePackage, AutotoolsPackage): + + build_system("cmake", "autotools", default="cmake") + + # Runtime dependencies + depends_on("ncurses") + depends_on("libxml2") + + # Lowerbounds for cmake only apply when using cmake as the build system + with when("build_system=cmake"): + depends_on("cmake@3.18:", when="@2.0:", type="build") + depends_on("cmake@3:", type="build") + + # Specify extra build dependencies used only in the configure script + with when("build_system=autotools"): + depends_on("perl", type="build") + depends_on("pkgconfig", type="build") + +Very often projects switch from one build system to another, or add support +for a new build system from a certain version, which means that the choice +of the build system typically depends on a version range. Those situations can +be handled by using conditional values in the ``build_system`` directive: + +.. code-block:: python + + class Example(CMakePackage, AutotoolsPackage): + + build_system( + conditional("cmake", when="@0.64:"), + conditional("autotools", when="@:0.63"), + default="cmake", + ) + +In the example the directive impose a change from ``Autotools`` to ``CMake`` going +from ``v0.63`` to ``v0.64``. + +The ``build_system`` can be used as an ordinary variant, which also means that it can +be used in ``depends_on`` statements. This can be useful when a package *requires* that +its dependency has a CMake config file, meaning that the dependent can only build when the +dependency is built with CMake, and not Autotools. In that case, you can force the choice +of the build system in the dependent: + +.. code-block:: python + + class Dependent(CMakePackage): + + depends_on("example build_system=cmake") + + .. _install-environment: ----------------------- From ff514445aebc069d234e4a1bc99dad85bb549e62 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Fri, 13 Oct 2023 11:48:23 -0400 Subject: [PATCH 158/408] Add gsi-ncdiag v1.1.2 (#40508) --- var/spack/repos/builtin/packages/gsi-ncdiag/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/gsi-ncdiag/package.py b/var/spack/repos/builtin/packages/gsi-ncdiag/package.py index add2e4f40a99f8..58dc16499c9a52 100644 --- a/var/spack/repos/builtin/packages/gsi-ncdiag/package.py +++ b/var/spack/repos/builtin/packages/gsi-ncdiag/package.py @@ -14,6 +14,7 @@ class GsiNcdiag(CMakePackage): maintainers("ulmononian") + version("1.1.2", sha256="085884106be1f8fd94a70292102e9351c0efdf1e619a233831fafcd9ed32cd99") version("1.1.1", sha256="26fc10cf448dd62daa1385e38921d338778416342956c478337e6c6d1b20bf8c") version("1.1.0", sha256="9195801301209d6f93890944d58ffee4e24a4e35502ab27560a8c440ee53df4c") version("1.0.0", sha256="7251d6139c2bc1580db5f7f019e10a4c73d188ddd52ccf21ecc9e39d50a6af51") From d298af0b3126482c256e54c8b2234b85c6d9915e Mon Sep 17 00:00:00 2001 From: Dom Heinzeller Date: Fri, 13 Oct 2023 11:39:08 -0600 Subject: [PATCH 159/408] texinfo package: fix external detection (#40470) A complete texinfo install includes both `info` and `makeinfo`. Some system installations of texinfo may exclude one or the other. This updates the external finding logic to require both. --- var/spack/repos/builtin/packages/texinfo/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/var/spack/repos/builtin/packages/texinfo/package.py b/var/spack/repos/builtin/packages/texinfo/package.py index a21cf9ec2758ed..052dee4b4db795 100644 --- a/var/spack/repos/builtin/packages/texinfo/package.py +++ b/var/spack/repos/builtin/packages/texinfo/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os import re from spack.package import * @@ -69,6 +70,13 @@ def setup_build_environment(self, env): @classmethod def determine_version(cls, exe): + # On CentOS and Ubuntu, the OS package info installs "info", + # which satisfies spack external find, but "makeinfo" comes + # from texinfo and may not be installed (and vice versa). + (texinfo_path, info_exe) = os.path.split(exe) + makeinfo_exe = os.path.join(texinfo_path, "makeinfo") + if not os.path.exists(makeinfo_exe): + return None output = Executable(exe)("--version", output=str, error=str) match = re.search(r"info \(GNU texinfo\)\s+(\S+)", output) return match.group(1) if match else None From a4e5bd158c9053b102e17ed95ad2ddb24bfea4fe Mon Sep 17 00:00:00 2001 From: Gabriel Cretin Date: Fri, 13 Oct 2023 20:30:20 +0200 Subject: [PATCH 160/408] Fpocket: fix installation (#40499) * Fpocket: fix edit() positional args + add install() * Remove comments * Fix line too long * Fix line too long * Remove extension specification in version Co-authored-by: Alec Scott * Use f-strings Co-authored-by: Alec Scott * Fix styling * Use the default MakefilePackage install stage --------- Co-authored-by: Alec Scott --- .../repos/builtin/packages/fpocket/package.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/fpocket/package.py b/var/spack/repos/builtin/packages/fpocket/package.py index 831283a4ef66dd..bf8d64aa9e8e12 100644 --- a/var/spack/repos/builtin/packages/fpocket/package.py +++ b/var/spack/repos/builtin/packages/fpocket/package.py @@ -3,23 +3,27 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) + from spack.package import * class Fpocket(MakefilePackage): - """fpocket is a very fast open source protein pocket detection algorithm - based on Voronoi tessellation.""" + """The fpocket suite of programs is a very fast open source + protein pocket detection algorithm based on Voronoi tessellation.""" homepage = "https://github.com/Discngine/fpocket" - version("master", branch="master", git="https://github.com/Discngine/fpocket.git") + url = "https://github.com/Discngine/fpocket/archive/refs/tags/4.1.tar.gz" + + version("4.1", "1a2af2d3f2df42de67301996db3b93c7eaff0375f866443c0468dcf4b1750688") depends_on("netcdf-c") + depends_on("netcdf-cxx") def setup_build_environment(self, env): if self.compiler.name == "gcc": env.set("CXX", "g++") - def edit(self): + def edit(self, spec, prefix): makefile = FileFilter("makefile") - makefile.filter("BINDIR .*", "BINDIR = %s/bin" % self.prefix) - makefile.filter("MANDIR .*", "MANDIR = %s/man/man8" % self.prefix) + makefile.filter("BINDIR .*", f"BINDIR = {prefix}/bin") + makefile.filter("MANDIR .*", f"MANDIR = {prefix}/man/man8") From 6c4c1aba080cc5bee274f847218b145e869a6a8d Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 13 Oct 2023 21:43:22 +0200 Subject: [PATCH 161/408] spack checksum: improve interactive filtering (#40403) * spack checksum: improve interactive filtering * fix signature of executable * Fix restart when using editor * Don't show [x version(s) are new] when no known versions (e.g. in spack create ) * Test ^D in test_checksum_interactive_quit_from_ask_each * formatting * colorize / skip header on invalid command * show original total, not modified total * use colify for command list * Warn about possible URL changes * show possible URL change as comment * make mypy happy * drop numbers * [o]pen editor -> [e]dit --- lib/spack/spack/cmd/checksum.py | 38 ++++- lib/spack/spack/cmd/create.py | 12 +- lib/spack/spack/stage.py | 213 +++++++++++++++++++++++---- lib/spack/spack/test/cmd/checksum.py | 140 ++++++++++++++++-- lib/spack/spack/url.py | 2 +- lib/spack/spack/util/editor.py | 2 +- 6 files changed, 352 insertions(+), 55 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index a0d6611d944542..fa969c097995b1 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -7,6 +7,7 @@ import re import sys +import llnl.string import llnl.util.lang from llnl.util import tty @@ -15,6 +16,7 @@ import spack.spec import spack.stage import spack.util.crypto +import spack.util.web as web_util from spack.cmd.common import arguments from spack.package_base import PackageBase, deprecated_version, preferred_version from spack.util.editor import editor @@ -128,18 +130,38 @@ def checksum(parser, args): remote_versions = pkg.fetch_remote_versions(args.jobs) url_dict = remote_versions + # A spidered URL can differ from the package.py *computed* URL, pointing to different tarballs. + # For example, GitHub release pages sometimes have multiple tarballs with different shasum: + # - releases/download/1.0/-1.0.tar.gz (uploaded tarball) + # - archive/refs/tags/1.0.tar.gz (generated tarball) + # We wanna ensure that `spack checksum` and `spack install` ultimately use the same URL, so + # here we check whether the crawled and computed URLs disagree, and if so, prioritize the + # former if that URL exists (just sending a HEAD request that is). + url_changed_for_version = set() + for version, url in url_dict.items(): + possible_urls = pkg.all_urls_for_version(version) + if url not in possible_urls: + for possible_url in possible_urls: + if web_util.url_exists(possible_url): + url_dict[version] = possible_url + break + else: + url_changed_for_version.add(version) + if not url_dict: tty.die(f"Could not find any remote versions for {pkg.name}") - - # print an empty line to create a new output section block - print() + elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty(): + filtered_url_dict = spack.stage.interactive_version_filter( + url_dict, pkg.versions, url_changes=url_changed_for_version + ) + if filtered_url_dict is None: + exit(0) + url_dict = filtered_url_dict + else: + tty.info(f"Found {llnl.string.plural(len(url_dict), 'version')} of {pkg.name}") version_hashes = spack.stage.get_checksums_for_versions( - url_dict, - pkg.name, - keep_stage=args.keep_stage, - batch=(args.batch or len(versions) > 0 or len(url_dict) == 1), - fetch_options=pkg.fetch_options, + url_dict, pkg.name, keep_stage=args.keep_stage, fetch_options=pkg.fetch_options ) if args.verify: diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index e3569d998f8560..474e271d1791f4 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -5,6 +5,7 @@ import os import re +import sys import urllib.parse import llnl.util.tty as tty @@ -823,6 +824,11 @@ def get_versions(args, name): # Find available versions try: url_dict = spack.url.find_versions_of_archive(args.url) + if len(url_dict) > 1 and not args.batch and sys.stdin.isatty(): + url_dict_filtered = spack.stage.interactive_version_filter(url_dict) + if url_dict_filtered is None: + exit(0) + url_dict = url_dict_filtered except UndetectableVersionError: # Use fake versions tty.warn("Couldn't detect version in: {0}".format(args.url)) @@ -834,11 +840,7 @@ def get_versions(args, name): url_dict = {version: args.url} version_hashes = spack.stage.get_checksums_for_versions( - url_dict, - name, - first_stage_function=guesser, - keep_stage=args.keep_stage, - batch=(args.batch or len(url_dict) == 1), + url_dict, name, first_stage_function=guesser, keep_stage=args.keep_stage ) versions = get_version_lines(version_hashes, url_dict) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 73b82c1378d261..c86ed1955bd889 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -7,12 +7,13 @@ import getpass import glob import hashlib +import io import os import shutil import stat import sys import tempfile -from typing import Callable, Dict, Iterable, Optional +from typing import Callable, Dict, Iterable, Optional, Set import llnl.string import llnl.util.lang @@ -27,6 +28,8 @@ partition_path, remove_linked_tree, ) +from llnl.util.tty.colify import colify +from llnl.util.tty.color import colorize import spack.caches import spack.config @@ -35,11 +38,14 @@ import spack.mirror import spack.paths import spack.spec +import spack.stage import spack.util.lock import spack.util.path as sup import spack.util.pattern as pattern import spack.util.url as url_util from spack.util.crypto import bit_length, prefix_bits +from spack.util.editor import editor, executable +from spack.version import StandardVersion, VersionList # The well-known stage source subdirectory name. _source_path_subdir = "spack-src" @@ -860,11 +866,187 @@ def purge(): os.remove(stage_path) +def interactive_version_filter( + url_dict: Dict[StandardVersion, str], + known_versions: Iterable[StandardVersion] = (), + *, + url_changes: Set[StandardVersion] = set(), + input: Callable[..., str] = input, +) -> Optional[Dict[StandardVersion, str]]: + """Interactively filter the list of spidered versions. + + Args: + url_dict: Dictionary of versions to URLs + known_versions: Versions that can be skipped because they are already known + + Returns: + Filtered dictionary of versions to URLs or None if the user wants to quit + """ + # Find length of longest string in the list for padding + sorted_and_filtered = sorted(url_dict.keys(), reverse=True) + version_filter = VersionList([":"]) + max_len = max(len(str(v)) for v in sorted_and_filtered) + orig_url_dict = url_dict # only copy when using editor to modify + print_header = True + VERSION_COLOR = spack.spec.VERSION_COLOR + while True: + if print_header: + has_filter = version_filter != VersionList([":"]) + header = [] + if len(sorted_and_filtered) == len(orig_url_dict): + header.append( + f"Selected {llnl.string.plural(len(sorted_and_filtered), 'version')}" + ) + else: + header.append( + f"Selected {len(sorted_and_filtered)} of {len(orig_url_dict)} versions" + ) + if known_versions: + num_new = sum(1 for v in sorted_and_filtered if v not in known_versions) + header.append(f"{llnl.string.plural(num_new, 'new version')}") + if has_filter: + header.append(colorize(f"Filtered by {VERSION_COLOR}{version_filter}@.")) + + version_with_url = [ + colorize( + f"{VERSION_COLOR}{str(v):{max_len}}@. {url_dict[v]}" + f"{' @K{# NOTE: change of URL}' if v in url_changes else ''}" + ) + for v in sorted_and_filtered + ] + tty.msg(". ".join(header), *llnl.util.lang.elide_list(version_with_url)) + print() + + print_header = True + + print("commands:") + commands = ( + "@*b{[c]}hecksum", + "@*b{[e]}dit", + "@*b{[f]}ilter", + "@*b{[a]}sk each", + "@*b{[n]}ew only", + "@*b{[r]}estart", + "@*b{[q]}uit", + ) + colify(list(map(colorize, commands)), indent=2) + + try: + command = input(colorize("@*g{command>} ")).strip().lower() + except EOFError: + print() + command = "q" + + if command == "c": + break + elif command == "e": + # Create a temporary file in the stage dir with lines of the form + # + # which the user can modify. Once the editor is closed, the file is + # read back in and the versions to url dict is updated. + + # Create a temporary file by hashing its contents. + buffer = io.StringIO() + buffer.write("# Edit this file to change the versions and urls to fetch\n") + for v in sorted_and_filtered: + buffer.write(f"{str(v):{max_len}} {url_dict[v]}\n") + data = buffer.getvalue().encode("utf-8") + + short_hash = hashlib.sha1(data).hexdigest()[:7] + filename = f"{spack.stage.stage_prefix}versions-{short_hash}.txt" + filepath = os.path.join(spack.stage.get_stage_root(), filename) + + # Write contents + with open(filepath, "wb") as f: + f.write(data) + + # Open editor + editor(filepath, exec_fn=executable) + + # Read back in + with open(filepath, "r") as f: + orig_url_dict, url_dict = url_dict, {} + for line in f: + line = line.strip() + # Skip empty lines and comments + if not line or line.startswith("#"): + continue + try: + version, url = line.split(None, 1) + except ValueError: + tty.warn(f"Couldn't parse: {line}") + continue + try: + url_dict[StandardVersion.from_string(version)] = url + except ValueError: + tty.warn(f"Invalid version: {version}") + continue + sorted_and_filtered = sorted(url_dict.keys(), reverse=True) + + os.unlink(filepath) + elif command == "f": + tty.msg( + colorize( + f"Examples filters: {VERSION_COLOR}1.2@. " + f"or {VERSION_COLOR}1.1:1.3@. " + f"or {VERSION_COLOR}=1.2, 1.2.2:@." + ) + ) + try: + # Allow a leading @ version specifier + filter_spec = input(colorize("@*g{filter>} ")).strip().lstrip("@") + except EOFError: + print() + continue + try: + version_filter.intersect(VersionList([filter_spec])) + except ValueError: + tty.warn(f"Invalid version specifier: {filter_spec}") + continue + # Apply filter + sorted_and_filtered = [v for v in sorted_and_filtered if v.satisfies(version_filter)] + elif command == "a": + i = 0 + while i < len(sorted_and_filtered): + v = sorted_and_filtered[i] + try: + answer = input(f" {str(v):{max_len}} {url_dict[v]} [Y/n]? ").strip().lower() + except EOFError: + # If ^D, don't fully exit, but go back to the command prompt, now with possibly + # fewer versions + print() + break + if answer in ("n", "no"): + del sorted_and_filtered[i] + elif answer in ("y", "yes", ""): + i += 1 + else: + # Went over each version, so go to checksumming + break + elif command == "n": + sorted_and_filtered = [v for v in sorted_and_filtered if v not in known_versions] + elif command == "r": + url_dict = orig_url_dict + sorted_and_filtered = sorted(url_dict.keys(), reverse=True) + version_filter = VersionList([":"]) + elif command == "q": + try: + if input("Really quit [y/N]? ").strip().lower() in ("y", "yes"): + return None + except EOFError: + print() + return None + else: + tty.warn(f"Ignoring invalid command: {command}") + print_header = False + continue + return {v: url_dict[v] for v in sorted_and_filtered} + + def get_checksums_for_versions( url_by_version: Dict[str, str], package_name: str, *, - batch: bool = False, first_stage_function: Optional[Callable[[Stage, str], None]] = None, keep_stage: bool = False, concurrency: Optional[int] = None, @@ -890,32 +1072,7 @@ def get_checksums_for_versions( Returns: A dictionary mapping each version to the corresponding checksum """ - sorted_versions = sorted(url_by_version.keys(), reverse=True) - - # Find length of longest string in the list for padding - max_len = max(len(str(v)) for v in sorted_versions) - num_ver = len(sorted_versions) - - tty.msg( - f"Found {llnl.string.plural(num_ver, 'version')} of {package_name}:", - "", - *llnl.util.lang.elide_list( - ["{0:{1}} {2}".format(str(v), max_len, url_by_version[v]) for v in sorted_versions] - ), - ) - print() - - if batch: - archives_to_fetch = len(sorted_versions) - else: - archives_to_fetch = tty.get_number( - "How many would you like to checksum?", default=1, abort="q" - ) - - if not archives_to_fetch: - tty.die("Aborted.") - - versions = sorted_versions[:archives_to_fetch] + versions = sorted(url_by_version.keys(), reverse=True) search_arguments = [(url_by_version[v], v) for v in versions] version_hashes, errors = {}, [] diff --git a/lib/spack/spack/test/cmd/checksum.py b/lib/spack/spack/test/cmd/checksum.py index 323ec2ec02270f..ce7784c7a1d3b1 100644 --- a/lib/spack/spack/test/cmd/checksum.py +++ b/lib/spack/spack/test/cmd/checksum.py @@ -7,12 +7,12 @@ import pytest -import llnl.util.tty as tty - import spack.cmd.checksum import spack.repo import spack.spec from spack.main import SpackCommand +from spack.stage import interactive_version_filter +from spack.version import Version spack_checksum = SpackCommand("checksum") @@ -56,18 +56,134 @@ def test_checksum(arguments, expected, mock_packages, mock_clone_repo, mock_stag assert "version(" in output -@pytest.mark.not_on_windows("Not supported on Windows (yet)") -def test_checksum_interactive(mock_packages, mock_fetch, mock_stage, monkeypatch): - # TODO: mock_fetch doesn't actually work with stage, working around with ignoring - # fail_on_error for now - def _get_number(*args, **kwargs): - return 1 +def input_from_commands(*commands): + """Create a function that returns the next command from a list of inputs for interactive spack + checksum. If None is encountered, this is equivalent to EOF / ^D.""" + commands = iter(commands) + + def _input(prompt): + cmd = next(commands) + if cmd is None: + raise EOFError + assert isinstance(cmd, str) + return cmd + + return _input + + +def test_checksum_interactive_filter(): + # Filter effectively by 1:1.0, then checksum. + input = input_from_commands("f", "@1:", "f", "@:1.0", "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == { + Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + } + + +def test_checksum_interactive_return_from_filter_prompt(): + # Enter and then exit filter subcommand. + input = input_from_commands("f", None, "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + } + + +def test_checksum_interactive_quit_returns_none(): + # Quit after filtering something out (y to confirm quit) + input = input_from_commands("f", "@1:", "q", "y") + assert ( + interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) + is None + ) - monkeypatch.setattr(tty, "get_number", _get_number) - output = spack_checksum("preferred-test", fail_on_error=False) - assert "version of preferred-test" in output - assert "version(" in output +def test_checksum_interactive_reset_resets(): + # Filter 1:, then reset, then filter :0, should just given 0.9 (it was filtered out + # before reset) + input = input_from_commands("f", "@1:", "r", "f", ":0", "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == {Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz"} + + +def test_checksum_interactive_ask_each(): + # Ask each should run on the filtered list. First select 1.x, then select only the second + # entry, which is 1.0.1. + input = input_from_commands("f", "@1:", "a", "n", "y", "n") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == {Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz"} + + +def test_checksum_interactive_quit_from_ask_each(): + # Enter ask each mode, select the second item, then quit from submenu, then checksum, which + # should still include the last item at which ask each stopped. + input = input_from_commands("a", "n", "y", None, "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == { + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + } + + +def test_checksum_interactive_new_only(): + # The 1.0 version is known already, and should be dropped on `n`. + input = input_from_commands("n", "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + known_versions=[Version("1.0")], + input=input, + ) == { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + } def test_checksum_versions(mock_packages, mock_clone_repo, mock_fetch, mock_stage): diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index 080c924596188f..460c42a1af2add 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -647,7 +647,7 @@ def find_versions_of_archive( list_urls |= additional_list_urls # Grab some web pages to scrape. - pages, links = spack.util.web.spider(list_urls, depth=list_depth, concurrency=concurrency) + _, links = spack.util.web.spider(list_urls, depth=list_depth, concurrency=concurrency) # Scrape them for archive URLs regexes = [] diff --git a/lib/spack/spack/util/editor.py b/lib/spack/spack/util/editor.py index 50e6b272c2a9a0..eff896f87e0d83 100644 --- a/lib/spack/spack/util/editor.py +++ b/lib/spack/spack/util/editor.py @@ -61,7 +61,7 @@ def executable(exe: str, args: List[str]) -> int: return cmd.returncode -def editor(*args: List[str], exec_fn: Callable[[str, List[str]], int] = os.execv) -> bool: +def editor(*args: str, exec_fn: Callable[[str, List[str]], int] = os.execv) -> bool: """Invoke the user's editor. This will try to execute the following, in order: From 4594feed65698b13691b6032864a087ed4ef57dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Fri, 13 Oct 2023 21:57:13 +0200 Subject: [PATCH 162/408] wayland: dot is a build dependency (#39854) * wayland: dot is a build dependency otherwise this build failure happens: ../spack-src/doc/meson.build:5:6: ERROR: Program 'dot' not found or not executable * wayland: make building of documentation optional renders several dependencies optional --- .../repos/builtin/packages/wayland/package.py | 27 ++++++++++++++++--- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/wayland/package.py b/var/spack/repos/builtin/packages/wayland/package.py index 03c276a98090c5..baee2fcc817433 100644 --- a/var/spack/repos/builtin/packages/wayland/package.py +++ b/var/spack/repos/builtin/packages/wayland/package.py @@ -27,6 +27,8 @@ class Wayland(MesonPackage, AutotoolsPackage): default="meson", ) + variant("doc", default=False, description="Build documentation") + version("1.22.0", sha256="bbca9c906a8fb8992409ebf51812f19e2a784b2c169d4b784cdd753b4bb448ef") version("1.21.0", sha256="53b7fa67142e653820030ec049971bcb5e84ac99e05cba5bcb9cb55f43fae4b3") version("1.20.0", sha256="20523cd6f2c18c3c86725467157c6221e19de76fbfad944042a2d494af3c7a92") @@ -45,11 +47,28 @@ class Wayland(MesonPackage, AutotoolsPackage): depends_on("meson@0.56.0:", type="build") depends_on("pkgconfig", type="build") - depends_on("doxygen", type="build") - depends_on("xmlto", type="build") - depends_on("libxslt", type="build") - depends_on("docbook-xsl", type="build") depends_on("libxml2") depends_on("chrpath") depends_on("expat") depends_on("libffi") + + with when("+doc"): + depends_on("docbook-xsl", type="build") + depends_on("doxygen", type="build") + depends_on("xmlto", type="build") + depends_on("libxslt", type="build") + depends_on("graphviz+libgd", type="build") + + @when("build_system=autotools") + def configure_args(self): + args = [] + args.extend(self.enable_or_disable("documentation", variant="doc")) + return args + + @when("build_system=meson") + def meson_args(self): + spec = self.spec + opt_bool = lambda c, o: "-D%s=%s" % (o, str(c).lower()) + args = [] + args.append(opt_bool("+doc" in spec, "documentation")) + return args From e05d48e108c57a570f8836fe6e8200fc6be741a6 Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Fri, 13 Oct 2023 13:21:43 -0700 Subject: [PATCH 163/408] cairo: add shared and pic variants (#40302) --- var/spack/repos/builtin/packages/cairo/package.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/var/spack/repos/builtin/packages/cairo/package.py b/var/spack/repos/builtin/packages/cairo/package.py index 4035728e2e2d40..530b4d49042d2c 100644 --- a/var/spack/repos/builtin/packages/cairo/package.py +++ b/var/spack/repos/builtin/packages/cairo/package.py @@ -39,6 +39,8 @@ class Cairo(AutotoolsPackage): variant("fc", default=False, description="Enable cairo's Fontconfig font backend feature") variant("png", default=False, description="Enable cairo's PNG functions feature") variant("svg", default=False, description="Enable cairo's SVN functions feature") + variant("shared", default=True, description="Build shared libraries") + variant("pic", default=True, description="Enable position-independent code (PIC)") depends_on("libx11", when="+X") depends_on("libxext", when="+X") @@ -61,6 +63,7 @@ class Cairo(AutotoolsPackage): conflicts("+png", when="platform=darwin") conflicts("+svg", when="platform=darwin") + conflicts("+shared~pic") # patch from https://gitlab.freedesktop.org/cairo/cairo/issues/346 patch("fontconfig.patch", when="@1.16.0:1.17.2") @@ -84,6 +87,15 @@ def configure_args(self): args.extend(self.enable_or_disable("gobject")) args.extend(self.enable_or_disable("ft")) args.extend(self.enable_or_disable("fc")) + args.extend(self.enable_or_disable("shared")) + args.extend(self.with_or_without("pic")) + + if self.spec.satisfies("+ft ^freetype~shared"): + pkgconf = which("pkg-config") + ldflags = pkgconf("--libs-only-L", "--static", "freetype2", output=str) + libs = pkgconf("--libs-only-l", "--static", "freetype2", output=str) + args.append(f"LDFLAGS={ldflags}") + args.append(f"LIBS={libs}") return args From c194c42fb98429cddf48a6037aae17756302a5d2 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sat, 14 Oct 2023 11:32:25 +0200 Subject: [PATCH 164/408] sqlite: add 3.43.2 (#40520) --- var/spack/repos/builtin/packages/sqlite/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/sqlite/package.py b/var/spack/repos/builtin/packages/sqlite/package.py index 0f331f964096eb..19890314707597 100644 --- a/var/spack/repos/builtin/packages/sqlite/package.py +++ b/var/spack/repos/builtin/packages/sqlite/package.py @@ -17,6 +17,7 @@ class Sqlite(AutotoolsPackage): homepage = "https://www.sqlite.org" + version("3.43.2", sha256="6d422b6f62c4de2ca80d61860e3a3fb693554d2f75bb1aaca743ccc4d6f609f0") version("3.42.0", sha256="7abcfd161c6e2742ca5c6c0895d1f853c940f203304a0b49da4e1eca5d088ca6") version("3.40.1", sha256="2c5dea207fa508d765af1ef620b637dcb06572afa6f01f0815bd5bbf864b33d9") version("3.40.0", sha256="0333552076d2700c75352256e91c78bf5cd62491589ba0c69aed0a81868980e7") From 8a3e2098d34216cbeb861519fff4662392057fc4 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sat, 14 Oct 2023 11:33:16 +0200 Subject: [PATCH 165/408] glib: add 2.78.0, 2.76.6 (#40517) --- var/spack/repos/builtin/packages/glib/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 4f8d1415f1304b..7ccdf2fd2ad0c7 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -26,6 +26,8 @@ class Glib(MesonPackage, AutotoolsPackage): maintainers("michaelkuhn") + version("2.78.0", sha256="44eaab8b720877ce303c5540b657b126f12dc94972d9880b52959f43fb537b30") + version("2.76.6", sha256="1136ae6987dcbb64e0be3197a80190520f7acab81e2bfb937dc85c11c8aa9f04") version("2.76.4", sha256="5a5a191c96836e166a7771f7ea6ca2b0069c603c7da3cba1cd38d1694a395dda") version("2.76.3", sha256="c0be444e403d7c3184d1f394f89f0b644710b5e9331b54fa4e8b5037813ad32a") version("2.76.2", sha256="24f3847857b1d8674cdb0389a36edec0f13c666cd3ce727ecd340eb9da8aca9e") From cf32b478c4f8032ba7d57d318f78f899bec63237 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sat, 14 Oct 2023 11:33:36 +0200 Subject: [PATCH 166/408] Fix pkgconfig dependencies (#40524) `pkgconfig` is the correct virtual dependency. --- var/spack/repos/builtin/packages/apr-util/package.py | 2 +- var/spack/repos/builtin/packages/libpostal/package.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/apr-util/package.py b/var/spack/repos/builtin/packages/apr-util/package.py index df1379a0fa0ac1..2351c48619f49c 100644 --- a/var/spack/repos/builtin/packages/apr-util/package.py +++ b/var/spack/repos/builtin/packages/apr-util/package.py @@ -32,7 +32,7 @@ class AprUtil(AutotoolsPackage): depends_on("postgresql", when="+pgsql") depends_on("sqlite", when="+sqlite") depends_on("unixodbc", when="+odbc") - depends_on("pkg-config", type="build", when="+crypto ^openssl~shared") + depends_on("pkgconfig", type="build", when="+crypto ^openssl~shared") @property def libs(self): diff --git a/var/spack/repos/builtin/packages/libpostal/package.py b/var/spack/repos/builtin/packages/libpostal/package.py index 449ab126c3d5d6..d04b67e51f3b43 100644 --- a/var/spack/repos/builtin/packages/libpostal/package.py +++ b/var/spack/repos/builtin/packages/libpostal/package.py @@ -28,7 +28,7 @@ class Libpostal(AutotoolsPackage): depends_on("libtool", type="build") depends_on("m4", type="build") depends_on("curl", type="build") - depends_on("pkg-config", type="build") + depends_on("pkgconfig", type="build") def autoreconf(self, spec, prefix): which("sh")("bootstrap.sh") From e40b9cd89db5822e323c40accde4052182f2b360 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sat, 14 Oct 2023 11:34:32 +0200 Subject: [PATCH 167/408] rocksdb: add 8.6.7 (#40525) --- var/spack/repos/builtin/packages/rocksdb/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/rocksdb/package.py b/var/spack/repos/builtin/packages/rocksdb/package.py index fa177313026c83..80f4b88b822ea9 100644 --- a/var/spack/repos/builtin/packages/rocksdb/package.py +++ b/var/spack/repos/builtin/packages/rocksdb/package.py @@ -14,6 +14,7 @@ class Rocksdb(MakefilePackage): git = "https://github.com/facebook/rocksdb.git" version("master", git=git, branch="master", submodules=True) + version("8.6.7", sha256="cdb2fc3c6a556f20591f564cb8e023e56828469aa3f76e1d9535c443ba1f0c1a") version("8.1.1", sha256="9102704e169cfb53e7724a30750eeeb3e71307663852f01fa08d5a320e6155a8") version("7.7.3", sha256="b8ac9784a342b2e314c821f6d701148912215666ac5e9bdbccd93cf3767cb611") version("7.2.2", sha256="c4ea6bd2e3ffe3f0f8921c699234d59108c9122d61b0ba2aa78358642a7b614e") From 81cf9235e470e19ac1f24bf640a993628ace7f94 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sat, 14 Oct 2023 11:35:46 +0200 Subject: [PATCH 168/408] mariadb-c-client: add 3.3.7 (#40521) --- var/spack/repos/builtin/packages/mariadb-c-client/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/mariadb-c-client/package.py b/var/spack/repos/builtin/packages/mariadb-c-client/package.py index 047c30b5aa5320..148fc1e81199fa 100644 --- a/var/spack/repos/builtin/packages/mariadb-c-client/package.py +++ b/var/spack/repos/builtin/packages/mariadb-c-client/package.py @@ -22,6 +22,7 @@ class MariadbCClient(CMakePackage): list_url = "https://downloads.mariadb.com/Connectors/c/" list_depth = 1 + version("3.3.7", sha256="975a9a862fed80f84e0206373f7ef05537aada5b65d99b71b36ab892b44240bf") version("3.3.5", sha256="ca72eb26f6db2befa77e48ff966f71bcd3cb44b33bd8bbb810b65e6d011c1e5c") version("3.3.4", sha256="486e5fdf976a8e7fadf583ae912128655e013ac575fa79b2d1af0fb8827a78ed") version("3.3.2", sha256="7e0722e07d30bb906fac9fe10fb582cde1e148e05a83d9ca7b6fcc884b68fbce") From 76d510b53ff7d611131bdbecf1efe172826f664c Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sat, 14 Oct 2023 11:37:07 +0200 Subject: [PATCH 169/408] libfuse: add 3.16.2 (#40519) --- var/spack/repos/builtin/packages/libfuse/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libfuse/package.py b/var/spack/repos/builtin/packages/libfuse/package.py index ed09c794a57bea..7180fa8181a9f1 100644 --- a/var/spack/repos/builtin/packages/libfuse/package.py +++ b/var/spack/repos/builtin/packages/libfuse/package.py @@ -18,6 +18,7 @@ class Libfuse(MesonPackage): keep_werror = "all" + version("3.16.2", sha256="1bc306be1a1f4f6c8965fbdd79c9ccca021fdc4b277d501483a711cbd7dbcd6c") version("3.11.0", sha256="25a00226d2d449c15b2f08467d6d5ebbb2a428260c4ab773721c32adbc6da072") version("3.10.5", sha256="e73f75e58da59a0e333d337c105093c496c0fd7356ef3a5a540f560697c9c4e6") version("3.10.4", sha256="bfcb2520fd83db29e9fefd57d3abd5285f38ad484739aeee8e03fbec9b2d984a") @@ -67,7 +68,7 @@ def url_for_version(self, version): conflicts("platform=darwin", msg="libfuse does not support OS-X, use macfuse instead") # Drops the install script which does system configuration - patch("0001-Do-not-run-install-script.patch", when="@3: ~system_install") + patch("0001-Do-not-run-install-script.patch", when="@3:3.11 ~system_install") patch( "https://src.fedoraproject.org/rpms/fuse3/raw/0519b7bf17c4dd1b31ee704d49f8ed94aa5ba6ab/f/fuse3-gcc11.patch", sha256="3ad6719d2393b46615b5787e71778917a7a6aaa189ba3c3e0fc16d110a8414ec", @@ -117,6 +118,9 @@ def meson_args(self): if "~system_install" in self.spec: # Fix meson's setup if meson does not have the host system's udev package: args.append("-Dudevrulesdir={0}".format(self.prefix.etc.rules.d)) + + if self.spec.satisfies("@3.12:"): + args.append("-Dinitscriptdir=") else: # Likewise, but with +system_install, it may install to /lib/udev/rules.d: args.append("-Dudevrulesdir={0}".format("/lib/udev/rules.d")) From 5f3708c83d4168117583005e9b2932d609648e3a Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sat, 14 Oct 2023 11:38:00 +0200 Subject: [PATCH 170/408] libbson, mongo-c-driver: add 1.24.4 (#40518) --- .../repos/builtin/packages/libbson/package.py | 1 + .../packages/mongo-c-driver/package.py | 25 ++++++++++++------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/var/spack/repos/builtin/packages/libbson/package.py b/var/spack/repos/builtin/packages/libbson/package.py index f3382fe7128bc1..082c50a6acd258 100644 --- a/var/spack/repos/builtin/packages/libbson/package.py +++ b/var/spack/repos/builtin/packages/libbson/package.py @@ -15,6 +15,7 @@ class Libbson(Package): maintainers("michaelkuhn") + version("1.24.4", sha256="2f4a3e8943bfe3b8672c2053f88cf74acc8494dc98a45445f727901eee141544") version("1.23.4", sha256="209406c91fcf7c63aa633179a0a6b1b36ba237fb77e0470fd81f7299a408e334") version("1.23.3", sha256="c8f951d4f965d455f37ae2e10b72914736fc0f25c4ffc14afc3cbadd1a574ef6") version("1.23.2", sha256="123c358827eea07cd76a31c40281bb1c81b6744f6587c96d0cf217be8b1234e3") diff --git a/var/spack/repos/builtin/packages/mongo-c-driver/package.py b/var/spack/repos/builtin/packages/mongo-c-driver/package.py index d36854c931b551..94c92d9c023a99 100644 --- a/var/spack/repos/builtin/packages/mongo-c-driver/package.py +++ b/var/spack/repos/builtin/packages/mongo-c-driver/package.py @@ -14,6 +14,7 @@ class MongoCDriver(Package): maintainers("michaelkuhn") + version("1.24.4", sha256="2f4a3e8943bfe3b8672c2053f88cf74acc8494dc98a45445f727901eee141544") version("1.23.3", sha256="c8f951d4f965d455f37ae2e10b72914736fc0f25c4ffc14afc3cbadd1a574ef6") version("1.21.0", sha256="840ff79480070f98870743fbb332e2c10dd021b6b9c952d08010efdda4d70ee4") version("1.17.6", sha256="8644deec7ae585e8d12566978f2017181e883f303a028b5b3ccb83c91248b150") @@ -52,14 +53,15 @@ class MongoCDriver(Package): depends_on("pkgconfig", type="build") # When updating mongo-c-driver, libbson has to be kept in sync. - depends_on("libbson@1.23.0:1.23", when="@1.23") - depends_on("libbson@1.21.0:1.21", when="@1.21") - depends_on("libbson@1.17.0:1.17", when="@1.17") - depends_on("libbson@1.16.0:1.16", when="@1.16") - depends_on("libbson@1.9.0:1.9", when="@1.9") - depends_on("libbson@1.8.0:1.8", when="@1.8") - depends_on("libbson@1.7.0:1.7", when="@1.7") - depends_on("libbson@1.6.0:1.6", when="@1.6") + depends_on("libbson@1.24", when="@1.24") + depends_on("libbson@1.23", when="@1.23") + depends_on("libbson@1.21", when="@1.21") + depends_on("libbson@1.17", when="@1.17") + depends_on("libbson@1.16", when="@1.16") + depends_on("libbson@1.9", when="@1.9") + depends_on("libbson@1.8", when="@1.8") + depends_on("libbson@1.7", when="@1.7") + depends_on("libbson@1.6", when="@1.6") depends_on("openssl", when="+ssl") depends_on("snappy", when="+snappy") @@ -69,7 +71,12 @@ class MongoCDriver(Package): def cmake_args(self): spec = self.spec - args = ["-DENABLE_AUTOMATIC_INIT_AND_CLEANUP=OFF", "-DENABLE_BSON=SYSTEM"] + args = ["-DENABLE_AUTOMATIC_INIT_AND_CLEANUP=OFF"] + + if spec.satisfies("@1.24:"): + args.append("-DUSE_SYSTEM_LIBBSON=ON") + else: + args.append("-DENABLE_BSON=SYSTEM") if "+ssl" in spec: args.append("-DENABLE_SSL=OPENSSL") From a0f0f034ddbb2bd610d382f91dccb2fe80057958 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sat, 14 Oct 2023 17:28:52 +0200 Subject: [PATCH 171/408] git: add 2.42 (#40528) --- var/spack/repos/builtin/packages/git/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index b63335eabce4bc..23bcb6d4f23675 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -27,6 +27,7 @@ class Git(AutotoolsPackage): # Every new git release comes with a corresponding manpage resource: # https://www.kernel.org/pub/software/scm/git/git-manpages-{version}.tar.gz # https://mirrors.edge.kernel.org/pub/software/scm/git/sha256sums.asc + version("2.42.0", sha256="34aedd54210d7216a55d642bbb4cfb22695b7610719a106bf0ddef4c82a8beed") version("2.41.0", sha256="c4a6a3dd1827895a80cbd824e14d94811796ae54037549e0da93f7b84cb45b9f") version("2.40.1", sha256="55511f10f3b1cdf5db4e0e3dea61819dfb67661b0507a5a2b061c70e4f87e14c") version("2.39.3", sha256="2f9aa93c548941cc5aff641cedc24add15b912ad8c9b36ff5a41b1a9dcad783e") @@ -143,6 +144,7 @@ class Git(AutotoolsPackage): ) for _version, _sha256_manpage in { + "2.42.0": "51643c53d70ce15dde83b6da2bad76ba0c7bbcd4f944d7c378f03a15b9f2e1de", "2.41.0": "7b77c646b36d33c5c0f62677a147142011093270d6fd628ca38c42d5301f3888", "2.40.1": "6bbde434121bd0bf8aa574c60fd9a162388383679bd5ddd99921505149ffd4c2", "2.40.0": "fda16047e9c1dd07d9585cc26bbf4002ebf8462ada54cb72b97a0e48135fd435", From 8600fdf54ba757e5de2cbb50b94a39b1b5181fb9 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sat, 14 Oct 2023 17:29:55 +0200 Subject: [PATCH 172/408] screen: add v4.9.1 (#40529) --- var/spack/repos/builtin/packages/screen/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/screen/package.py b/var/spack/repos/builtin/packages/screen/package.py index 60a1f11da17ae4..0f9002df7d5f45 100644 --- a/var/spack/repos/builtin/packages/screen/package.py +++ b/var/spack/repos/builtin/packages/screen/package.py @@ -14,6 +14,7 @@ class Screen(AutotoolsPackage, GNUMirrorPackage): homepage = "https://www.gnu.org/software/screen/" gnu_mirror_path = "screen/screen-4.3.1.tar.gz" + version("4.9.1", sha256="26cef3e3c42571c0d484ad6faf110c5c15091fbf872b06fa7aa4766c7405ac69") version("4.9.0", sha256="f9335281bb4d1538ed078df78a20c2f39d3af9a4e91c57d084271e0289c730f4") version("4.8.0", sha256="6e11b13d8489925fde25dfb0935bf6ed71f9eb47eff233a181e078fde5655aa1") version("4.6.2", sha256="1b6922520e6a0ce5e28768d620b0f640a6631397f95ccb043b70b91bb503fa3a") From fdd01cbf03913403fd17d593f4ae5c31fa394a43 Mon Sep 17 00:00:00 2001 From: "Seth R. Johnson" Date: Sat, 14 Oct 2023 11:31:53 -0400 Subject: [PATCH 173/408] cpr: new package (#40509) * New package: cpr * Support libcpr version 1.9 * Fix build phase for git * Update var/spack/repos/builtin/packages/cpr/package.py Co-authored-by: Alec Scott * [@spackbot] updating style on behalf of sethrj --------- Co-authored-by: Alec Scott --- .../repos/builtin/packages/cpr/package.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 var/spack/repos/builtin/packages/cpr/package.py diff --git a/var/spack/repos/builtin/packages/cpr/package.py b/var/spack/repos/builtin/packages/cpr/package.py new file mode 100644 index 00000000000000..71e32d9960d536 --- /dev/null +++ b/var/spack/repos/builtin/packages/cpr/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Cpr(CMakePackage): + """C++ Requests: Curl for People, a spiritual port of Python Requests.""" + + homepage = "https://docs.libcpr.org/" + url = "https://github.com/libcpr/cpr/archive/refs/tags/1.10.4.tar.gz" + + maintainers("sethrj") + + version("1.10.4", sha256="88462d059cd3df22c4d39ae04483ed50dfd2c808b3effddb65ac3b9aa60b542d") + version("1.9.2", sha256="3bfbffb22c51f322780d10d3ca8f79424190d7ac4b5ad6ad896de08dbd06bf31") + + depends_on("curl") + depends_on("git", when="build") + + def cmake_args(self): + _force = "_FORCE" if self.spec.satisfies("@:1.9") else "" + + return [ + self.define("CPR_USE_SYSTEM_GTEST", True), + self.define(f"CPR{_force}_USE_SYSTEM_CURL", True), + self.define("CPR_ENABLE_SSL", True), + ] From d346a69ef25da0460e2d9f6d9241c959e37d2ca0 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 14 Oct 2023 19:23:54 +0200 Subject: [PATCH 174/408] py-urllib3: add 2.0.6 (#40207) * py-urllib3: add 2.0.5 * Add py-brotli package * Group brotli dependencies and make limits more specific * Add minimum version limits to variants * Remove python upper limit for py-brotli * Fix restrictions for py-brotli dependency * Fix py-brotli dependency * py-urllib3: add 2.0.6 --- .../builtin/packages/py-brotli/package.py | 17 +++++++++ .../builtin/packages/py-urllib3/package.py | 36 ++++++++++++------- 2 files changed, 40 insertions(+), 13 deletions(-) create mode 100644 var/spack/repos/builtin/packages/py-brotli/package.py diff --git a/var/spack/repos/builtin/packages/py-brotli/package.py b/var/spack/repos/builtin/packages/py-brotli/package.py new file mode 100644 index 00000000000000..44fcbda75a23b3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-brotli/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBrotli(PythonPackage): + """Python bindings for the Brotli compression library.""" + + homepage = "https://github.com/google/brotli" + pypi = "Brotli/Brotli-1.1.0.tar.gz" + + version("1.1.0", sha256="81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-urllib3/package.py b/var/spack/repos/builtin/packages/py-urllib3/package.py index 236ae0f14d9036..d4b061fa0be8fa 100644 --- a/var/spack/repos/builtin/packages/py-urllib3/package.py +++ b/var/spack/repos/builtin/packages/py-urllib3/package.py @@ -12,7 +12,10 @@ class PyUrllib3(PythonPackage): homepage = "https://urllib3.readthedocs.io/" pypi = "urllib3/urllib3-1.25.6.tar.gz" + git = "https://github.com/urllib3/urllib3.git" + version("2.0.6", sha256="b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564") + version("2.0.5", sha256="13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594") version("1.26.12", sha256="3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e") version("1.26.6", sha256="f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f") version("1.25.9", sha256="3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527") @@ -23,22 +26,29 @@ class PyUrllib3(PythonPackage): version("1.20", sha256="97ef2b6e2878d84c0126b9f4e608e37a951ca7848e4855a7f7f4437d5c34a72f") version("1.14", sha256="dd4fb13a4ce50b18338c7e4d665b21fd38632c5d4b1d9f1a1379276bd3c08d37") - variant("socks", default=False, description="SOCKS and HTTP proxy support") + variant("brotli", default=False, when="@1.25:", description="Add Brotli support") variant("secure", default=False, description="Add SSL/TLS support") - variant("brotli", default=False, description="Add Brotli support") + variant("socks", default=False, when="@1.15:", description="SOCKS and HTTP proxy support") - depends_on("python@2.7:2.8,3.4:", when="@:1.25", type=("build", "run")) - depends_on("python@2.7:2.8,3.5:", when="@1.26.6", type=("build", "run")) - depends_on("python@2.7:2.8,3.6:3", when="@1.26.12:", type=("build", "run")) + depends_on("py-hatchling@1.6:1", when="@2:", type="build") - depends_on("py-setuptools", type="build") + with when("+brotli"): + depends_on("py-brotli@1.0.9:", when="@1.26.9:", type=("build", "run")) - depends_on("py-pyopenssl@0.14:", when="+secure") - depends_on("py-cryptography@1.3.4:", when="+secure") - depends_on("py-idna@2:", when="+secure") - depends_on("py-certifi", when="+secure") - depends_on("py-urllib3-secure-extra", when="+secure @1.26.12:") + # Historical dependencies + depends_on("py-brotlipy@0.6:", when="@:1.26.8", type=("build", "run")) - depends_on("py-pysocks@1.5.6,1.5.8:1", when="+socks") + with when("+secure"): + depends_on("py-pyopenssl@17.1:", when="@2:", type=("build", "run")) + depends_on("py-pyopenssl@0.14:", when="@1", type=("build", "run")) + depends_on("py-cryptography@1.9:", when="@2:", type=("build", "run")) + depends_on("py-cryptography@1.3.4:", when="@1", type=("build", "run")) + depends_on("py-idna@2:", type=("build", "run")) + depends_on("py-certifi", type=("build", "run")) + depends_on("py-urllib3-secure-extra", when="@1.26.12:", type=("build", "run")) - depends_on("py-brotlipy@0.6:", when="+brotli") + depends_on("py-pysocks@1.5.6,1.5.8:1", when="+socks", type=("build", "run")) + + # Historical dependencies + depends_on("py-setuptools", when="@1", type="build") + depends_on("python@3.6:3", when="@1.26.12:1", type=("build", "run")) From 5534c1334e1d3e16d487c001a7e056c8fea6c47e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Sat, 14 Oct 2023 19:25:02 +0200 Subject: [PATCH 175/408] [add] py-graphene-tornado: new recipe, required by py-cylc-uiserver (#39985) * [add] py-graphene-tornado: new recipe, required by py-cylc-uiserver * py-graphene-tornado: Taking reviewing into account * py-graphene-tornado: add type run in dependences py-jinja, py-tornado and py-werkzeug Co-authored-by: Adam J. Stewart --------- Co-authored-by: LydDeb Co-authored-by: Adam J. Stewart --- .../packages/py-graphene-tornado/package.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-graphene-tornado/package.py diff --git a/var/spack/repos/builtin/packages/py-graphene-tornado/package.py b/var/spack/repos/builtin/packages/py-graphene-tornado/package.py new file mode 100644 index 00000000000000..ba44045b4c6d79 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-graphene-tornado/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyGrapheneTornado(PythonPackage): + """Graphene Tornado integration.""" + + homepage = "https://github.com/graphql-python/graphene-tornado" + pypi = "graphene-tornado/graphene-tornado-2.6.1.tar.gz" + + maintainers("LydDeb") + + version("2.6.1", sha256="953bf812267177224ce1ac2a93c669069d85a8fa187a9fac681b76b63dffebc2") + + depends_on("py-setuptools", type="build") + depends_on("py-six@1.10.0:", type=("build", "run")) + depends_on("py-graphene@2.1:2", type=("build", "run")) + depends_on("py-jinja2@2.10.1:", type=("build", "run")) + depends_on("py-tornado@5.1.0:", type=("build", "run")) + depends_on("py-werkzeug@0.12.2", type=("build", "run")) + depends_on("py-pytest", type=("build")) From d7e1e63a5b12e42de954f857cf7cccfc16424162 Mon Sep 17 00:00:00 2001 From: Sam Gillingham Date: Sun, 15 Oct 2023 03:28:51 +1000 Subject: [PATCH 176/408] py-python-fmask: update to latest versions (#40378) * tidy and add new version * add comment about dependencies * whitespace --- .../repos/builtin/packages/py-python-fmask/package.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-python-fmask/package.py b/var/spack/repos/builtin/packages/py-python-fmask/package.py index 2cb0ce8f1a6465..08a32be346a532 100644 --- a/var/spack/repos/builtin/packages/py-python-fmask/package.py +++ b/var/spack/repos/builtin/packages/py-python-fmask/package.py @@ -11,11 +11,16 @@ class PyPythonFmask(PythonPackage): the FMASK algorithm for Landsat and Sentinel-2""" homepage = "https://www.pythonfmask.org/en/latest/" - url = "https://github.com/ubarsc/python-fmask/archive/pythonfmask-0.5.4.tar.gz" + url = "https://github.com/ubarsc/python-fmask/releases/download/pythonfmask-0.5.8/python-fmask-0.5.8.tar.gz" - version("0.5.4", sha256="a216aa3108de837fec182602b2b4708442746be31fc1585906802437784a63fe") + version("0.5.8", sha256="d55f54d3fecde818374017fdbe0ad173c893ef74c79ba2a7bc1890b7ec416c2f") + version("0.5.7", sha256="da9dad1b977a50599d068dedaed007100b20322a79ca5d78f702712647c2c3f3") + version("0.5.6", sha256="a63abd12d36fb4ec010e618bcabd5e2f782a0479ebcbf40aec1bcef943c00c5c") + version("0.5.5", sha256="8257227d2527ea5fbd229f726d06d05986914beafd090acef05772a27dbbf062") + version("0.5.4", sha256="ed20776f6b63615f664da89a9e3951c79437b66c2bf88fe19a93c2cc7dc40c82") + + # Note: Dependencies are listed here: https://github.com/ubarsc/python-fmask/blob/master/doc/source/index.rst#introduction - depends_on("python@2.7:2.8,3.4:", type=("build", "run")) # pip silently replaces distutils with setuptools depends_on("py-setuptools", type="build") depends_on("py-rios", type=("build", "run")) From fc2d1dd9c6606eadd47213a977fee147675e836e Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 14 Oct 2023 19:34:45 +0200 Subject: [PATCH 177/408] py-pyqt6: add 6.5.2 (#40413) --- var/spack/repos/builtin/packages/py-pyqt6/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-pyqt6/package.py b/var/spack/repos/builtin/packages/py-pyqt6/package.py index cdef12633236b4..6791b50ff0e1ea 100644 --- a/var/spack/repos/builtin/packages/py-pyqt6/package.py +++ b/var/spack/repos/builtin/packages/py-pyqt6/package.py @@ -13,6 +13,7 @@ class PyPyqt6(SIPPackage): url = "https://files.pythonhosted.org/packages/source/P/PyQt6/PyQt6-6.5.1.tar.gz" list_url = "https://pypi.org/simple/PyQt6/" + version("6.5.2", sha256="1487ee7350f9ffb66d60ab4176519252c2b371762cbe8f8340fd951f63801280") version("6.5.1", sha256="e166a0568c27bcc8db00271a5043936226690b6a4a74ce0a5caeb408040a97c3") # pyproject.toml From 425767e0cfe9a1e31337a7f86fc4212228d4d53e Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 14 Oct 2023 19:40:40 +0200 Subject: [PATCH 178/408] py-bidskit: add 2023.9.7 (#40444) --- var/spack/repos/builtin/packages/py-bidskit/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-bidskit/package.py b/var/spack/repos/builtin/packages/py-bidskit/package.py index d54a2a778eecbd..25e38c5127ef56 100644 --- a/var/spack/repos/builtin/packages/py-bidskit/package.py +++ b/var/spack/repos/builtin/packages/py-bidskit/package.py @@ -12,6 +12,7 @@ class PyBidskit(PythonPackage): homepage = "https://github.com/jmtyszka/bidskit" pypi = "bidskit/bidskit-2022.10.13.tar.gz" + version("2023.9.7", sha256="029d9aecbbcb2df733858ceb3e6d5dd5013c36e431e40fb522a580adc7b667a5") version("2023.2.16", sha256="b2e4e3246d43a6f00af6c0391ec8fecc59405241de1ea9ca68eb4d8128d62c7b") version( "2022.10.13", sha256="576b92cef187032c73f64e2e6a5b0be0c06771442048a33c55e224b3df0aae3a" From 50e158ee7d9e161c97f938948c8ea2aacd7e4dc8 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 14 Oct 2023 19:41:34 +0200 Subject: [PATCH 179/408] py-certifi: add 2023.7.22 (#40445) --- var/spack/repos/builtin/packages/py-certifi/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-certifi/package.py b/var/spack/repos/builtin/packages/py-certifi/package.py index bb26b43fbc6b5e..bcf14be2f068f0 100644 --- a/var/spack/repos/builtin/packages/py-certifi/package.py +++ b/var/spack/repos/builtin/packages/py-certifi/package.py @@ -14,6 +14,7 @@ class PyCertifi(PythonPackage): homepage = "https://github.com/certifi/python-certifi" pypi = "certifi/certifi-2020.6.20.tar.gz" + version("2023.7.22", sha256="539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082") version("2023.5.7", sha256="0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7") version("2022.12.7", sha256="35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3") version("2022.9.14", sha256="36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5") From b799ca37df56d69173077d0eb860182a874e681d Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 14 Oct 2023 19:47:01 +0200 Subject: [PATCH 180/408] py-chardet: add 5.2.0 (#40468) --- var/spack/repos/builtin/packages/py-chardet/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-chardet/package.py b/var/spack/repos/builtin/packages/py-chardet/package.py index 246f00c207f57c..f7a3f5cc621ff0 100644 --- a/var/spack/repos/builtin/packages/py-chardet/package.py +++ b/var/spack/repos/builtin/packages/py-chardet/package.py @@ -12,6 +12,7 @@ class PyChardet(PythonPackage): homepage = "https://github.com/chardet/chardet" pypi = "chardet/chardet-3.0.4.tar.gz" + version("5.2.0", sha256="1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7") version("5.1.0", sha256="0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5") version("5.0.0", sha256="0368df2bfd78b5fc20572bb4e9bb7fb53e2c094f60ae9993339e8671d0afb8aa") version("4.0.0", sha256="0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa") @@ -20,4 +21,6 @@ class PyChardet(PythonPackage): version("2.3.0", sha256="e53e38b3a4afe6d1132de62b7400a4ac363452dc5dfcf8d88e8e0cce663c68aa") depends_on("py-setuptools", type="build") + + # Historical dependencies depends_on("py-pytest-runner", when="@3", type="build") From d286284f83091bd6a645c6f632caa13ef6ee3d94 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 14 Oct 2023 19:56:14 +0200 Subject: [PATCH 181/408] py-click: add 8.1.7 (#40473) --- var/spack/repos/builtin/packages/py-click/package.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-click/package.py b/var/spack/repos/builtin/packages/py-click/package.py index d5830049ea13fd..e32256b016f780 100644 --- a/var/spack/repos/builtin/packages/py-click/package.py +++ b/var/spack/repos/builtin/packages/py-click/package.py @@ -11,8 +11,9 @@ class PyClick(PythonPackage): homepage = "https://click.palletsprojects.com" pypi = "click/click-7.1.2.tar.gz" - git = "https://github.com/pallets/click/" + git = "https://github.com/pallets/click.git" + version("8.1.7", sha256="ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de") version("8.1.3", sha256="7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e") version("8.0.3", sha256="410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b") version("8.0.1", sha256="8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a") @@ -24,9 +25,6 @@ class PyClick(PythonPackage): ) version("6.6", sha256="cc6a19da8ebff6e7074f731447ef7e112bd23adf3de5c597cf9989f2fd8defe9") - depends_on("python@3.7:", when="@8.1:", type=("build", "run")) - depends_on("python@3.6:", when="@8:", type=("build", "run")) - depends_on("python@2.7:2.8,3.5:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-importlib-metadata", when="@8: ^python@:3.7", type=("build", "run")) From 203469ba5c94c00da212e9f276f79b27155e288f Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 14 Oct 2023 19:56:59 +0200 Subject: [PATCH 182/408] py-charset-normalizer: add 3.3.0 (#40471) --- .../repos/builtin/packages/py-charset-normalizer/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-charset-normalizer/package.py b/var/spack/repos/builtin/packages/py-charset-normalizer/package.py index eab217431524cd..706c49c9884606 100644 --- a/var/spack/repos/builtin/packages/py-charset-normalizer/package.py +++ b/var/spack/repos/builtin/packages/py-charset-normalizer/package.py @@ -13,6 +13,7 @@ class PyCharsetNormalizer(PythonPackage): homepage = "https://github.com/ousret/charset_normalizer" pypi = "charset-normalizer/charset-normalizer-2.0.7.tar.gz" + version("3.3.0", sha256="63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6") version("3.1.0", sha256="34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5") version("2.1.1", sha256="5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845") version("2.0.12", sha256="2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597") From f5ca272b95af5c490267bedcd5c66496fbd00167 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 14 Oct 2023 20:03:43 +0200 Subject: [PATCH 183/408] py-cfgv: add 3.4.0 (#40465) --- var/spack/repos/builtin/packages/py-cfgv/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-cfgv/package.py b/var/spack/repos/builtin/packages/py-cfgv/package.py index df26dcae22e23e..9158f42762f5b6 100644 --- a/var/spack/repos/builtin/packages/py-cfgv/package.py +++ b/var/spack/repos/builtin/packages/py-cfgv/package.py @@ -12,10 +12,13 @@ class PyCfgv(PythonPackage): homepage = "https://github.com/asottile/cfgv/" pypi = "cfgv/cfgv-2.0.1.tar.gz" + version("3.4.0", sha256="e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560") version("3.3.1", sha256="f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736") version("2.0.1", sha256="edb387943b665bf9c434f717bf630fa78aecd53d5900d2e05da6ad6048553144") + depends_on("python@3.8:", when="@3.4:", type=("build", "run")) depends_on("python@3.6.1:", when="@3.1:", type=("build", "run")) - depends_on("python@2.7:2.8,3.4:", type=("build", "run")) depends_on("py-setuptools", type="build") + + # Historical dependencies depends_on("py-six", when="@:2", type=("build", "run")) From 3725c1957e4e43d8403d78c5ce7e6edafdcee637 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 14 Oct 2023 20:05:36 +0200 Subject: [PATCH 184/408] py-pybids: add 0.16.3 (#40486) --- var/spack/repos/builtin/packages/py-pybids/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-pybids/package.py b/var/spack/repos/builtin/packages/py-pybids/package.py index 7155b92c5248e4..8a77813ad6a05d 100644 --- a/var/spack/repos/builtin/packages/py-pybids/package.py +++ b/var/spack/repos/builtin/packages/py-pybids/package.py @@ -12,6 +12,7 @@ class PyPybids(PythonPackage): homepage = "https://github.com/bids-standard/pybids" pypi = "pybids/pybids-0.13.1.tar.gz" + version("0.16.3", sha256="10e279350c8d14ca602c0d4469a5e4bf7ff393e8643c831a546ae735b6b82cc3") version("0.16.1", sha256="1a6ab06d375f3b783e738826e6d220b2f4145419b4b02f4edbcc8cb7c9b2208a") version("0.15.3", sha256="4d99c979bc4bc209cff70a02d1da309c9bf8c6b0338e2a0b66ebea77c7f3c461") version("0.15.1", sha256="0253507a04dbfea43eb1f75a1f71aab04be21076bfe96c004888000b802e38f2") @@ -22,7 +23,6 @@ class PyPybids(PythonPackage): version("0.8.0", sha256="fe60fa7d1e171e75a38a04220ed992f1b062531a7452fcb7ce5ba81bb6abfdbc") depends_on("python@3.8:", when="@0.16:", type=("build", "run")) - depends_on("python@3.7:", when="@0.15:", type=("build", "run")) depends_on("py-setuptools", when="@0.15.6:", type="build") depends_on("py-setuptools@30.3:60,61.0.1:", when="@:0.15.5", type="build") depends_on("py-versioneer+toml", when="@0.15.6:", type="build") From 4ed3b6ed1390392c3f9e9e7e429c1424ea6e57a1 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 14 Oct 2023 20:06:51 +0200 Subject: [PATCH 185/408] py-pydicom: add 2.4.3 (#40487) --- var/spack/repos/builtin/packages/py-pydicom/package.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pydicom/package.py b/var/spack/repos/builtin/packages/py-pydicom/package.py index 1001e3ddd47ec8..e2d1fadcecb614 100644 --- a/var/spack/repos/builtin/packages/py-pydicom/package.py +++ b/var/spack/repos/builtin/packages/py-pydicom/package.py @@ -15,15 +15,15 @@ class PyPydicom(PythonPackage): homepage = "https://github.com/pydicom/pydicom" pypi = "pydicom/pydicom-2.1.2.tar.gz" + version("2.4.3", sha256="51906e0b9fb6e184a0f56298cb43ed716b7cf7edc00f6b71d5c769bc1f982402") version("2.4.1", sha256="6cb210dbe5586841036e8eeb2d4feb4df22a48f39161ba7ee0bf3c89faaba946") version("2.3.0", sha256="dbfa081c9ad9ac8ff8a8efbd71784104db9eecf02fd775f7d7773f2183f89386") version("2.1.2", sha256="65f36820c5fec24b4e7ca45b7dae93e054ed269d55f92681863d39d30459e2fd") variant("numpy", default=False, description="Use NumPy for Pixel data") - depends_on("python@3.7:", when="@2.4:", type=("build", "run")) - depends_on("python@3.6.1:", type=("build", "run")) - depends_on("py-flit-core@3.2:3", when="@2.4:", type=("build", "run")) + depends_on("py-flit-core@3.2:3", when="@2.4:", type="build") + depends_on("py-numpy", when="+numpy", type="run") # Historical dependencies From 2e34bcd2bef2d45d0f6d233005d7883dc197290b Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Sat, 14 Oct 2023 11:09:24 -0700 Subject: [PATCH 186/408] Add static support for proj (#40322) * Add static-only option for proj * Update proj * update proj * Update package.py * [@spackbot] updating style on behalf of AlexanderRichert-NOAA * Update package.py * proj: Add pic and static variant support for cmake --- var/spack/repos/builtin/packages/proj/package.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/proj/package.py b/var/spack/repos/builtin/packages/proj/package.py index 065703c64fdded..a49adf19b8cb62 100644 --- a/var/spack/repos/builtin/packages/proj/package.py +++ b/var/spack/repos/builtin/packages/proj/package.py @@ -56,6 +56,8 @@ class Proj(CMakePackage, AutotoolsPackage): variant("tiff", default=True, description="Enable TIFF support") variant("curl", default=True, description="Enable curl support") + variant("shared", default=True, description="Enable shared libraries") + variant("pic", default=False, description="Enable position-independent code (PIC)") # https://github.com/OSGeo/PROJ#distribution-files-and-format # https://github.com/OSGeo/PROJ-data @@ -130,6 +132,8 @@ def cmake_args(self): args = [ self.define_from_variant("ENABLE_TIFF", "tiff"), self.define_from_variant("ENABLE_CURL", "curl"), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"), ] if self.spec.satisfies("@6:") and self.pkg.run_tests: args.append(self.define("USE_EXTERNAL_GTEST", True)) @@ -144,14 +148,18 @@ def configure_args(self): args.append("--with-external-gtest") if self.spec.satisfies("@7:"): - if "+tiff" in self.spec: - args.append("--enable-tiff") - else: - args.append("--disable-tiff") + args.extend(self.enable_or_disable("tiff")) if "+curl" in self.spec: args.append("--with-curl=" + self.spec["curl"].prefix.bin.join("curl-config")) else: args.append("--without-curl") + args.extend(self.enable_or_disable("shared")) + args.extend(self.with_or_without("pic")) + + if self.spec.satisfies("^libtiff+jpeg~shared"): + args.append("LDFLAGS=%s" % self.spec["jpeg"].libs.ld_flags) + args.append("LIBS=%s" % self.spec["jpeg"].libs.link_flags) + return args From 0e4b901c2f3346d4928779eafc6cfc268252ab6b Mon Sep 17 00:00:00 2001 From: Dom Heinzeller Date: Sat, 14 Oct 2023 15:02:41 -0600 Subject: [PATCH 187/408] Bug fixes in py-awscrt to fix build errors reported in #40386 (#40469) * Bug fix in var/spack/repos/builtin/packages/py-awscrt/package.py: on Linux, tell aws-crt-python to use libcrypto from spack (openssl) * Bug fix in var/spack/repos/builtin/packages/py-awscrt/package.py: add missing build dependencies cmake (for all), openssl (for linux) * Update var/spack/repos/builtin/packages/py-awscrt/package.py Co-authored-by: Adam J. Stewart --------- Co-authored-by: Adam J. Stewart --- var/spack/repos/builtin/packages/py-awscrt/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-awscrt/package.py b/var/spack/repos/builtin/packages/py-awscrt/package.py index 652e7164a6f445..81428d4d4f63c0 100644 --- a/var/spack/repos/builtin/packages/py-awscrt/package.py +++ b/var/spack/repos/builtin/packages/py-awscrt/package.py @@ -16,4 +16,11 @@ class PyAwscrt(PythonPackage): version("0.16.16", sha256="13075df2c1d7942fe22327b6483274517ee0f6ae765c4e6b6ae9ef5b4c43a827") + depends_on("cmake@3.1:", type=("build")) + depends_on("openssl", type=("build"), when="platform=linux") depends_on("py-setuptools", type=("build")) + + # On Linux, tell aws-crt-python to use libcrypto from spack (openssl) + def setup_build_environment(self, env): + with when("platform=linux"): + env.set("AWS_CRT_BUILD_USE_SYSTEM_LIBCRYPTO", 1) From 27a233c1dddb48e1d19c75dbab23353576fff491 Mon Sep 17 00:00:00 2001 From: Miroslav Stoyanov <30537612+mkstoyanov@users.noreply.github.com> Date: Sun, 15 Oct 2023 00:55:01 -0400 Subject: [PATCH 188/408] heffte: fix rocm deps (#40514) --- var/spack/repos/builtin/packages/heffte/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py index a9908bb2de6af3..32d7e18cc6f848 100644 --- a/var/spack/repos/builtin/packages/heffte/package.py +++ b/var/spack/repos/builtin/packages/heffte/package.py @@ -84,6 +84,8 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): depends_on("rocfft@5.2.3:", when="@develop+rocm", type=("build", "run")) depends_on("magma@2.5.3:", when="+cuda+magma", type=("build", "run")) depends_on("magma+rocm@2.6.1:", when="+magma+rocm @2.1:", type=("build", "run")) + depends_on("rocblas@3.8:", when="+magma+rocm", type=("build", "run")) + depends_on("rocsparse@3.8:", when="+magma+rocm", type=("build", "run")) depends_on("hipblas@3.8:", when="+magma+rocm", type=("build", "run")) depends_on("hipsparse@3.8:", when="+magma+rocm", type=("build", "run")) From 00d3bc9924d05dbf0a6749d798762606cb3d77e5 Mon Sep 17 00:00:00 2001 From: Veselin Dobrev Date: Sat, 14 Oct 2023 22:01:53 -0700 Subject: [PATCH 189/408] mfem: add a patch for v4.6 for gcc 13, see mfem PR 3903 (#40495) --- var/spack/repos/builtin/packages/mfem/mfem-4.6.patch | 12 ++++++++++++ var/spack/repos/builtin/packages/mfem/package.py | 1 + 2 files changed, 13 insertions(+) create mode 100644 var/spack/repos/builtin/packages/mfem/mfem-4.6.patch diff --git a/var/spack/repos/builtin/packages/mfem/mfem-4.6.patch b/var/spack/repos/builtin/packages/mfem/mfem-4.6.patch new file mode 100644 index 00000000000000..94f1f863f8d267 --- /dev/null +++ b/var/spack/repos/builtin/packages/mfem/mfem-4.6.patch @@ -0,0 +1,12 @@ +diff --git a/general/kdtree.hpp b/general/kdtree.hpp +index eebbdaa27..b35a33ea4 100644 +--- a/general/kdtree.hpp ++++ b/general/kdtree.hpp +@@ -17,6 +17,7 @@ + #include + #include + #include ++#include + #include + + namespace mfem diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index eb869fc255b9e4..b4c66447a16139 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -468,6 +468,7 @@ class Mfem(Package, CudaPackage, ROCmPackage): # upstream. patch("mfem-4.0.0-makefile-syntax-fix.patch", when="@4.0.0") patch("mfem-4.5.patch", when="@4.5.0") + patch("mfem-4.6.patch", when="@4.6.0") phases = ["configure", "build", "install"] From 3e5c3e60403cbc98df94b9cba10043eb8b8446e0 Mon Sep 17 00:00:00 2001 From: Miroslav Stoyanov <30537612+mkstoyanov@users.noreply.github.com> Date: Sun, 15 Oct 2023 01:07:15 -0400 Subject: [PATCH 190/408] tasmanian: patch for clang17 (#40515) --- .../builtin/packages/tasmanian/package.py | 3 + .../packages/tasmanian/tas80_clang17.patch | 101 ++++++++++++++++++ 2 files changed, 104 insertions(+) create mode 100644 var/spack/repos/builtin/packages/tasmanian/tas80_clang17.patch diff --git a/var/spack/repos/builtin/packages/tasmanian/package.py b/var/spack/repos/builtin/packages/tasmanian/package.py index 92aa21833973b0..b4d4ead7bf3842 100644 --- a/var/spack/repos/builtin/packages/tasmanian/package.py +++ b/var/spack/repos/builtin/packages/tasmanian/package.py @@ -88,6 +88,9 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): conflicts("+magma", when="~cuda~rocm") # currently MAGMA only works with CUDA conflicts("+cuda", when="+rocm") # can pick CUDA or ROCm, not both + # patching a bug in the interpretation of the C++ standard + patch("tas80_clang17.patch", when="@8.0") + def setup_build_environment(self, env): # needed for the hipcc compiler if "+rocm" in self.spec: diff --git a/var/spack/repos/builtin/packages/tasmanian/tas80_clang17.patch b/var/spack/repos/builtin/packages/tasmanian/tas80_clang17.patch new file mode 100644 index 00000000000000..241789cddd4939 --- /dev/null +++ b/var/spack/repos/builtin/packages/tasmanian/tas80_clang17.patch @@ -0,0 +1,101 @@ +diff --git a/SparseGrids/tsgGridFourier.cpp b/SparseGrids/tsgGridFourier.cpp +index 31e75a87..438b0631 100644 +--- a/SparseGrids/tsgGridFourier.cpp ++++ b/SparseGrids/tsgGridFourier.cpp +@@ -961,7 +961,7 @@ std::vector GridFourier::getCandidateConstructionPoints(std::functionaddTensor(new_tensors.getIndex(i), [&](int l)->int{ return wrapper.getNumPoints(l); }, tweights[i]); + +- return MultiIndexManipulations::indexesToNodes(dynamic_values->getNodesIndexes(), wrapper); ++ return MultiIndexManipulations::getIndexesToNodes(dynamic_values->getNodesIndexes(), wrapper); + } + std::vector GridFourier::getMultiIndex(const double x[]){ + std::vector p(num_dimensions); +diff --git a/SparseGrids/tsgGridGlobal.cpp b/SparseGrids/tsgGridGlobal.cpp +index dd81ace0..01aa4fa3 100644 +--- a/SparseGrids/tsgGridGlobal.cpp ++++ b/SparseGrids/tsgGridGlobal.cpp +@@ -473,7 +473,7 @@ std::vector GridGlobal::getCandidateConstructionPoints(std::functionaddTensor(new_tensors.getIndex(i), [&](int l)->int{ return wrapper.getNumPoints(l); }, tweights[i]); + +- return MultiIndexManipulations::indexesToNodes(dynamic_values->getNodesIndexes(), wrapper); ++ return MultiIndexManipulations::getIndexesToNodes(dynamic_values->getNodesIndexes(), wrapper); + } + std::vector GridGlobal::getMultiIndex(const double x[]){ + std::vector p(num_dimensions); +diff --git a/SparseGrids/tsgGridLocalPolynomial.cpp b/SparseGrids/tsgGridLocalPolynomial.cpp +index f2cf6809..176736c3 100644 +--- a/SparseGrids/tsgGridLocalPolynomial.cpp ++++ b/SparseGrids/tsgGridLocalPolynomial.cpp +@@ -576,7 +576,7 @@ void GridLocalPolynomial::expandGrid(const std::vector &point, const std::v + surpluses = Data2D(num_outputs, 1, std::vector(value)); // one value is its own surplus + }else{ // merge with existing points + // compute the surplus for the point +- std::vector xnode = MultiIndexManipulations::indexesToNodes(point, *rule); ++ std::vector xnode = MultiIndexManipulations::getIndexesToNodes(point, *rule); + std::vector approximation(num_outputs), surp(num_outputs); + evaluate(xnode.data(), approximation.data()); + std::transform(approximation.begin(), approximation.end(), value.begin(), surp.begin(), [&](double e, double v)->double{ return v - e; }); +@@ -755,7 +755,7 @@ void GridLocalPolynomial::updateSurpluses(MultiIndexSet const &work, int max_lev + for(int s=0; s x = MultiIndexManipulations::indexesToNodes(work.getIndex(i), num_dimensions, *rule); ++ std::vector x = MultiIndexManipulations::getIndexesToNodes(work.getIndex(i), num_dimensions, *rule); + double *surpi = surpluses.getStrip(i); + + std::vector monkey_count(max_level + 1); +@@ -818,7 +818,7 @@ void GridLocalPolynomial::applyTransformationTransposed(double weights[], const + for(int l=active_top_level; l>0; l--){ + for(size_t i=0; i node = MultiIndexManipulations::indexesToNodes(work.getIndex(active_points[i]), num_dimensions, *rule); ++ std::vector node = MultiIndexManipulations::getIndexesToNodes(work.getIndex(active_points[i]), num_dimensions, *rule); + + std::fill(used.begin(), used.end(), false); + +@@ -1071,7 +1071,7 @@ void GridLocalPolynomial::getQuadratureWeights(double *weights) const{ + for(int l=top_level; l>0; l--){ + for(int i=0; i node = MultiIndexManipulations::indexesToNodes(work.getIndex(i), num_dimensions, *rule); ++ std::vector node = MultiIndexManipulations::getIndexesToNodes(work.getIndex(i), num_dimensions, *rule); + + std::vector used(work.getNumIndexes(), false); + +diff --git a/SparseGrids/tsgGridWavelet.cpp b/SparseGrids/tsgGridWavelet.cpp +index b043d077..d2f8115c 100644 +--- a/SparseGrids/tsgGridWavelet.cpp ++++ b/SparseGrids/tsgGridWavelet.cpp +@@ -415,7 +415,7 @@ void GridWavelet::buildInterpolationMatrix() const{ + for(int b=0; b xi = MultiIndexManipulations::indexesToNodes(work.getIndex(i), (size_t) num_dimensions, rule1D); ++ std::vector xi = MultiIndexManipulations::getIndexesToNodes(work.getIndex(i), (size_t) num_dimensions, rule1D); + + // loop over the basis functions to see if supported + int numpntr = 0; +diff --git a/SparseGrids/tsgIndexManipulator.hpp b/SparseGrids/tsgIndexManipulator.hpp +index 16a1321f..0c27a4cd 100644 +--- a/SparseGrids/tsgIndexManipulator.hpp ++++ b/SparseGrids/tsgIndexManipulator.hpp +@@ -562,7 +562,7 @@ OutputIteratorLike indexesToNodes(IteratorLike ibegin, size_t num_entries, RuleL + * \brief Overload that returns the result in a vector. + */ + template +-std::vector indexesToNodes(IndexList const &list, RuleLike const &rule){ ++std::vector getIndexesToNodes(IndexList const &list, RuleLike const &rule){ + std::vector result(std::distance(list.begin(), list.end())); + indexesToNodes(list, rule, result.begin()); + return result; +@@ -573,7 +573,7 @@ std::vector indexesToNodes(IndexList const &list, RuleLike const &rule){ + * \brief Overload that returns the result in a vector. + */ + template +-std::vector indexesToNodes(IteratorLike ibegin, size_t num_entries, RuleLike const &rule){ ++std::vector getIndexesToNodes(IteratorLike ibegin, size_t num_entries, RuleLike const &rule){ + std::vector result(num_entries); + indexesToNodes(ibegin, num_entries, rule, result.begin()); + return result; From c84577976f9d76cc8413b6310f1286ff77c40678 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Sun, 15 Oct 2023 02:28:08 -0700 Subject: [PATCH 191/408] Add matrix space link and badge to README (#40532) --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index cf4b413af8b9e0..ecbaf1ae71bb42 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@ [![Read the Docs](https://readthedocs.org/projects/spack/badge/?version=latest)](https://spack.readthedocs.io) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Slack](https://slack.spack.io/badge.svg)](https://slack.spack.io) +[![Matrix](https://img.shields.io/matrix/spack-space%3Amatrix.org?label=Matrix)](https://matrix.to/#/#spack-space:matrix.org) Spack is a multi-platform package manager that builds and installs multiple versions and configurations of software. It works on Linux, @@ -62,6 +63,7 @@ Resources: * **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com). To get an invitation, visit [slack.spack.io](https://slack.spack.io). +* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org). * [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A. * **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack) * **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to From 8692ebffd4f243a17ce10883ae0263a64047fc16 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sun, 15 Oct 2023 12:16:51 -0500 Subject: [PATCH 192/408] py-click: fix Python 3.6 support (#40535) --- var/spack/repos/builtin/packages/py-click/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-click/package.py b/var/spack/repos/builtin/packages/py-click/package.py index e32256b016f780..16585406035fb4 100644 --- a/var/spack/repos/builtin/packages/py-click/package.py +++ b/var/spack/repos/builtin/packages/py-click/package.py @@ -25,6 +25,8 @@ class PyClick(PythonPackage): ) version("6.6", sha256="cc6a19da8ebff6e7074f731447ef7e112bd23adf3de5c597cf9989f2fd8defe9") + # Needed to ensure that Spack can bootstrap black with Python 3.6 + depends_on("python@3.7:", when="@8.1:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-importlib-metadata", when="@8: ^python@:3.7", type=("build", "run")) From f2c494b8ac7497ffd9276cf96b599abafa5918a6 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sun, 15 Oct 2023 12:18:04 -0500 Subject: [PATCH 193/408] Python: allow OneAPI 2024 when it's released (#40536) --- var/spack/repos/builtin/packages/python/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index b6c78adbe319c4..59306d8f2377ca 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -318,7 +318,7 @@ class Python(Package): # See https://github.com/python/cpython/issues/106424 # datetime.now(timezone.utc) segfaults - conflicts("@3.9:", when="%oneapi@2022.2.1:") + conflicts("@3.9:", when="%oneapi@2022.2.1:2023") # Used to cache various attributes that are expensive to compute _config_vars: Dict[str, Dict[str, str]] = {} From b766df4ab77b06ab7d7dfe1ad84c8f598bea187c Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sun, 15 Oct 2023 15:22:49 -0500 Subject: [PATCH 194/408] py-torchdata: version rename (#40522) --- var/spack/repos/builtin/packages/py-torchdata/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-torchdata/package.py b/var/spack/repos/builtin/packages/py-torchdata/package.py index 51b95c1cca7758..fd9367f31d0179 100644 --- a/var/spack/repos/builtin/packages/py-torchdata/package.py +++ b/var/spack/repos/builtin/packages/py-torchdata/package.py @@ -16,7 +16,7 @@ class PyTorchdata(PythonPackage): maintainers("adamjstewart") version("main", branch="main") - version("0.7.0-rc1", sha256="9d48e1a5bc0fa4e50976b89aea38d028dcb7e580dba594755d266e19c369b747") + version("0.7.0", sha256="0b444719c3abc67201ed0fea92ea9c4100e7f36551ba0d19a09446cc11154eb3") version("0.6.1", sha256="c596db251c5e6550db3f00e4308ee7112585cca4d6a1c82a433478fd86693257") version("0.6.0", sha256="048dea12ee96c0ea1525097959fee811d7b38c2ed05f44a90f35f8961895fb5b") version("0.5.1", sha256="69d80bd33ce8f08e7cfeeb71cefddfc29cede25a85881e33dbae47576b96ed29") From c22089dcbd6fe0869a5fbb2f500be506fe52263d Mon Sep 17 00:00:00 2001 From: "Garth N. Wells" Date: Sun, 15 Oct 2023 22:53:03 +0100 Subject: [PATCH 195/408] Version update and simplify dependencies (#40543) --- var/spack/repos/builtin/packages/py-nanobind/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-nanobind/package.py b/var/spack/repos/builtin/packages/py-nanobind/package.py index 493d6394affa36..19c3d915f98843 100644 --- a/var/spack/repos/builtin/packages/py-nanobind/package.py +++ b/var/spack/repos/builtin/packages/py-nanobind/package.py @@ -23,6 +23,9 @@ class PyNanobind(PythonPackage): maintainers("chrisrichardson", "garth-wells", "ma595") version("master", branch="master", submodules=True) + version( + "1.6.2", tag="v1.6.2", commit="cc5ac7e61def198db2a8b65c6d630343987a9f1d", submodules=True + ) version( "1.5.2", tag="v1.5.2", commit="b0e24d5b0ab0d518317d6b263a257ae72d4d29a2", submodules=True ) @@ -43,8 +46,8 @@ class PyNanobind(PythonPackage): depends_on("py-setuptools@42:", type="build") depends_on("py-scikit-build", type="build") - depends_on("py-cmake@3.17:", type="build") - depends_on("py-ninja", type="build") + depends_on("cmake@3.17:", type="build") + depends_on("ninja", type="build") @property def cmake_prefix_paths(self): From 9f151874c5d865001223c1a0dc1eaba309d8a8ae Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 16 Oct 2023 00:08:11 +0200 Subject: [PATCH 196/408] spack checksum: handle all versions dropped better (#40530) * spack checksum: fix error when all versions are dropped * add test --- lib/spack/spack/cmd/checksum.py | 2 +- lib/spack/spack/stage.py | 6 ++++-- lib/spack/spack/test/cmd/checksum.py | 16 ++++++++++++++++ 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index fa969c097995b1..efa4a268c16b5b 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -154,7 +154,7 @@ def checksum(parser, args): filtered_url_dict = spack.stage.interactive_version_filter( url_dict, pkg.versions, url_changes=url_changed_for_version ) - if filtered_url_dict is None: + if not filtered_url_dict: exit(0) url_dict = filtered_url_dict else: diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index c86ed1955bd889..90fb193a63573c 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -893,7 +893,9 @@ def interactive_version_filter( if print_header: has_filter = version_filter != VersionList([":"]) header = [] - if len(sorted_and_filtered) == len(orig_url_dict): + if not sorted_and_filtered: + header.append("No versions selected") + elif len(sorted_and_filtered) == len(orig_url_dict): header.append( f"Selected {llnl.string.plural(len(sorted_and_filtered), 'version')}" ) @@ -901,7 +903,7 @@ def interactive_version_filter( header.append( f"Selected {len(sorted_and_filtered)} of {len(orig_url_dict)} versions" ) - if known_versions: + if sorted_and_filtered and known_versions: num_new = sum(1 for v in sorted_and_filtered if v not in known_versions) header.append(f"{llnl.string.plural(num_new, 'new version')}") if has_filter: diff --git a/lib/spack/spack/test/cmd/checksum.py b/lib/spack/spack/test/cmd/checksum.py index ce7784c7a1d3b1..8001334e3e2142 100644 --- a/lib/spack/spack/test/cmd/checksum.py +++ b/lib/spack/spack/test/cmd/checksum.py @@ -169,6 +169,22 @@ def test_checksum_interactive_quit_from_ask_each(): } +def test_checksum_interactive_nothing_left(): + """If nothing is left after interactive filtering, return an empty dict.""" + input = input_from_commands("f", "@2", "c") + assert ( + interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) + == {} + ) + + def test_checksum_interactive_new_only(): # The 1.0 version is known already, and should be dropped on `n`. input = input_from_commands("n", "c") From 065569e3f6095283397372c30a9c0db26600b300 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 15 Oct 2023 15:48:05 -0700 Subject: [PATCH 197/408] README.md: tweak matrix description to indicate bridging (#40540) This tweaks the matrix description to indicate that it's bridged with Slack. So people don't think they're missing out (even though the icon says there are only 3 users on Matrix). --- README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index ecbaf1ae71bb42..c4c784cd1a6aba 100644 --- a/README.md +++ b/README.md @@ -63,8 +63,10 @@ Resources: * **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com). To get an invitation, visit [slack.spack.io](https://slack.spack.io). -* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org). -* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A. +* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org): + [bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack. +* [**Github Discussions**](https://github.com/spack/spack/discussions): + not just for discussions, also Q&A. * **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack) * **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to `@mention` us! From 5842a8df9ad7be7efb7c2665101635ec455181fd Mon Sep 17 00:00:00 2001 From: Vanessasaurus <814322+vsoch@users.noreply.github.com> Date: Sun, 15 Oct 2023 17:43:10 -0600 Subject: [PATCH 198/408] fix: flux-core needs libarchive with +iconv (#40541) Signed-off-by: vsoch Co-authored-by: vsoch --- var/spack/repos/builtin/packages/flux-core/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py index 70d8cc9873c24c..09d5b2e4999da3 100644 --- a/var/spack/repos/builtin/packages/flux-core/package.py +++ b/var/spack/repos/builtin/packages/flux-core/package.py @@ -125,7 +125,7 @@ class FluxCore(AutotoolsPackage): conflicts("platform=darwin", msg="flux-core does not support MacOS based platforms.") conflicts("platform=windows", msg="flux-core does not support Windows based platforms.") - depends_on("libarchive", when="@0.38.0:") + depends_on("libarchive+iconv", when="@0.38.0:") depends_on("ncurses@6.2:", when="@0.32.0:") depends_on("libzmq@4.0.4:") depends_on("czmq@3.0.1:") From 3ac3a445a8a68b66e7e321f93f96b6c6c07a9102 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Mon, 16 Oct 2023 13:51:11 +0200 Subject: [PATCH 199/408] [add] py-cylc-uiserver: new recipe (#39983) * [add] py-cylc-uiserver: new recipe * py-cylc-uiserver: remove version constraint on the dependence python * [fix] py-cylc-uiserver: add forgotten dependence py-graphql-core --------- Co-authored-by: LydDeb --- .../packages/py-cylc-uiserver/package.py | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-cylc-uiserver/package.py diff --git a/var/spack/repos/builtin/packages/py-cylc-uiserver/package.py b/var/spack/repos/builtin/packages/py-cylc-uiserver/package.py new file mode 100644 index 00000000000000..57345f8a11257f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cylc-uiserver/package.py @@ -0,0 +1,32 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCylcUiserver(PythonPackage): + """Cylc UI Server.""" + + homepage = "https://github.com/cylc/cylc-uiserver/" + pypi = "cylc-uiserver/cylc-uiserver-1.3.0.tar.gz" + + maintainers("LydDeb") + + version("1.3.0", sha256="f3526e470c7ac2b61bf69e9b8d17fc7a513392219d28baed9b1166dcc7033d7a") + + depends_on("py-wheel", type="build") + depends_on("py-setuptools@40.9.0:", type="build") + depends_on("py-cylc-flow@8.2", type=("build", "run")) + depends_on("py-ansimarkup@1.0.0:", type=("build", "run")) + depends_on("py-graphene", type=("build", "run")) + depends_on("py-graphene-tornado@2.6", type=("build", "run")) + depends_on("py-graphql-ws@0.4.4", type=("build", "run")) + depends_on("py-jupyter-server@1.10.2:1", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) + depends_on("py-tornado@6.1.0:", type=("build", "run")) + depends_on("py-traitlets@5.2.1:", type=("build", "run")) + depends_on("py-pyzmq", type=("build", "run")) + depends_on("py-graphql-core", type=("build", "run")) + depends_on("py-rx@:1", type=("build", "run")) From 2e8c1a56dd063bda2e3a06658a1e6c495c4a70bf Mon Sep 17 00:00:00 2001 From: "Garth N. Wells" Date: Mon, 16 Oct 2023 12:52:59 +0100 Subject: [PATCH 200/408] py-fenics-ufl: update version and add test (#40534) * Update py-ufl vesion * Syntax fix * Syntax fix * Add test * Updates following comments --- .../builtin/packages/py-fenics-ufl/package.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-fenics-ufl/package.py b/var/spack/repos/builtin/packages/py-fenics-ufl/package.py index 7439e2c76ba7fb..890b1934649b07 100644 --- a/var/spack/repos/builtin/packages/py-fenics-ufl/package.py +++ b/var/spack/repos/builtin/packages/py-fenics-ufl/package.py @@ -19,6 +19,7 @@ class PyFenicsUfl(PythonPackage): maintainers("chrisrichardson", "garth-wells", "jhale") version("main", branch="main") + version("2023.2.0", sha256="d1d3209e8ebd4bd70513c26890f51823bac90edc956233c47bd8e686e064436e") version( "2023.1.1.post0", sha256="9e6e87f1447635029cec42604f62a76bba84899beb4b8822af10389d1f93a9b6" ) @@ -36,7 +37,17 @@ class PyFenicsUfl(PythonPackage): ) version("2016.2.0", tag="ufl-2016.2.0", commit="962d56f65821fb9c50ca4a5a858882c472243431") - depends_on("python@3.7:", type=("build", "run")) - depends_on("py-setuptools@58:", type=("build", "run"), when="@2022.1.0:") - depends_on("py-setuptools@40:", type=("build", "run")) - depends_on("py-numpy@1.21:", type=("build", "run")) + depends_on("python@3.8:", when="@2023.2.0:", type=("build", "run")) + + depends_on("py-setuptools@62:", when="@2023.2.0:", type="build") + depends_on("py-setuptools@58:", when="@2022.1.0:2023.1.1.post0", type="build") + depends_on("py-setuptools@40:", when="@2016.2.0:2021.1.0", type="build") + depends_on("py-numpy", type=("build", "run")) + + depends_on("py-pytest", type="test") + + @run_after("install") + @on_package_attributes(run_tests=True) + def check_build(self): + with working_dir("test"): + Executable("py.test")() From 172597c279f59e8f52777dd7ce2694febf08977e Mon Sep 17 00:00:00 2001 From: Stephen Sachs Date: Mon, 16 Oct 2023 16:24:21 +0200 Subject: [PATCH 201/408] intel-oneapi-compilers: ifx uses --gcc-name & --gxx-name (#40557) `ifx` uses the older syntax instead of `--gcc-toolchain`. Tested up to version 2023.2.0. --- .../repos/builtin/packages/intel-oneapi-compilers/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index 72c56471c27e1b..84c560c2409825 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -276,12 +276,12 @@ def extend_config_flags(self): llvm_flags.append("-Wno-unused-command-line-argument") self.write_config_file( - common_flags + llvm_flags, self.component_prefix.linux.bin, ["icx", "icpx", "ifx"] + common_flags + llvm_flags, self.component_prefix.linux.bin, ["icx", "icpx"] ) self.write_config_file( common_flags + classic_flags, self.component_prefix.linux.bin.intel64, - ["icc", "icpc", "ifort"], + ["icc", "icpc", "ifort", "ifx"], ) def _ld_library_path(self): From fd70d81aa97366650b751b6fc32ebd95fba23b71 Mon Sep 17 00:00:00 2001 From: "Garth N. Wells" Date: Mon, 16 Oct 2023 15:53:02 +0100 Subject: [PATCH 202/408] fenics-basix: update for v0.7 (#40440) * Uodate for Basix 0.7 * Version fix for nanobind dependency * Simplification * Version update and simplify dependencies * Add comment on location of pyproject.toml * Update var/spack/repos/builtin/packages/py-fenics-basix/package.py Co-authored-by: Adam J. Stewart --------- Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/fenics-basix/package.py | 1 + .../builtin/packages/py-fenics-basix/package.py | 14 +++++++++----- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/fenics-basix/package.py b/var/spack/repos/builtin/packages/fenics-basix/package.py index e4a3833bc05b9a..0de31ae59ff624 100644 --- a/var/spack/repos/builtin/packages/fenics-basix/package.py +++ b/var/spack/repos/builtin/packages/fenics-basix/package.py @@ -15,6 +15,7 @@ class FenicsBasix(CMakePackage): maintainers("mscroggs", "chrisrichardson", "garth-wells", "jhale") version("main", branch="main") + version("0.7.0", sha256="9bee81b396ee452eec8d9735f278cb44cb6994c6bc30aec8ed9bb4b12d83fa7f") version("0.6.0", sha256="687ae53153c98facac4080dcdc7081701db1dcea8c5e7ae3feb72aec17f83304") version("0.5.1", sha256="69133476ac35f0bd0deccb480676030378c341d7dfb2adaca22cd16b7e1dc1cb") version("0.4.2", sha256="a54f5e442b7cbf3dbb6319c682f9161272557bd7f42e2b8b8ccef88bc1b7a22f") diff --git a/var/spack/repos/builtin/packages/py-fenics-basix/package.py b/var/spack/repos/builtin/packages/py-fenics-basix/package.py index 9e6d5b44b7a925..6d3ffa6cae3754 100644 --- a/var/spack/repos/builtin/packages/py-fenics-basix/package.py +++ b/var/spack/repos/builtin/packages/py-fenics-basix/package.py @@ -15,22 +15,26 @@ class PyFenicsBasix(PythonPackage): maintainers("chrisrichardson", "mscroggs", "garth-wells", "jhale") version("main", branch="main") + version("0.7.0", sha256="9bee81b396ee452eec8d9735f278cb44cb6994c6bc30aec8ed9bb4b12d83fa7f") version("0.6.0", sha256="687ae53153c98facac4080dcdc7081701db1dcea8c5e7ae3feb72aec17f83304") version("0.5.1", sha256="69133476ac35f0bd0deccb480676030378c341d7dfb2adaca22cd16b7e1dc1cb") version("0.4.2", sha256="a54f5e442b7cbf3dbb6319c682f9161272557bd7f42e2b8b8ccef88bc1b7a22f") depends_on("fenics-basix@main", type=("build", "run"), when="@main") + depends_on("fenics-basix@0.7.0", type=("build", "run"), when="@0.7.0") depends_on("fenics-basix@0.6.0", type=("build", "run"), when="@0.6.0") depends_on("fenics-basix@0.5.1", type=("build", "run"), when="@0.5.1") depends_on("fenics-basix@0.4.2", type=("build", "run"), when="@0.4.2") + # See python/CMakeLists.txt + depends_on("cmake@3.16:", type="build") + + # See python/pyproject.toml + depends_on("python@3.8:", when="@0.7.0:", type=("build", "run")) depends_on("py-setuptools@42:", type="build") - depends_on("py-setuptools@40:", type="build") depends_on("py-numpy@1.21:", type=("build", "run")) - depends_on("cmake@3.19:", type="build") - depends_on("python@3.7:", type=("build", "run")) - depends_on("py-pybind11@2.9.1:", type="build") - depends_on("py-pybind11@2.2.4:", type="build") + depends_on("py-pybind11@2.9.1:", when="@:0.7", type="build") + depends_on("py-nanobind@1.5.1:", when="@0.8:", type="build") depends_on("xtensor@0.23.10:", type="build", when="@:0.4") From 8907efa144519c91c7451ee9d5458446f48b43ef Mon Sep 17 00:00:00 2001 From: Diego Alvarez S Date: Mon, 16 Oct 2023 18:06:36 +0200 Subject: [PATCH 203/408] Add nextflow 23.10.0 (#40547) --- var/spack/repos/builtin/packages/nextflow/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py index 71f1e84323abe8..379a3c9ba3307a 100644 --- a/var/spack/repos/builtin/packages/nextflow/package.py +++ b/var/spack/repos/builtin/packages/nextflow/package.py @@ -14,6 +14,11 @@ class Nextflow(Package): maintainers("dialvarezs", "marcodelapierre") + version( + "23.10.0", + sha256="4b7fba61ecc6d53a6850390bb435455a54ae4d0c3108199f88b16b49e555afdd", + expand=False, + ) version( "23.04.3", sha256="258714c0772db3cab567267e8441c5b72102381f6bd58fc6957c2972235be7e0", From 2f4dc6f2f32754e187f0750f8dc5a77b65bb45ef Mon Sep 17 00:00:00 2001 From: Dom Heinzeller Date: Mon, 16 Oct 2023 10:28:38 -0600 Subject: [PATCH 204/408] Fix version incompatibilities of py-pandas and py-openpyxl (#40472) * Fix version incompatibilities of py-pandas and py-openpyxl * Add variant excel for py-pandas * Add package py-pyxlsb * Add versios for py-xlsxwriter * Define excel dependencies for py-pandas 1.4, 1.5, 2.0, 2.1 * Fix variant excel in py-pandas * Add package py-odfpy, which is also a dependency for py-pandas@2.0: * Rearrange excel dependencies for py-pandas * Change url to pypi * Add missing newline to fix style in py-odfpy --- .../builtin/packages/py-odfpy/package.py | 18 ++++++++++++++++++ .../builtin/packages/py-pandas/package.py | 16 ++++++++++++++++ .../builtin/packages/py-pyxlsb/package.py | 19 +++++++++++++++++++ .../builtin/packages/py-xlsxwriter/package.py | 4 ++++ 4 files changed, 57 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-odfpy/package.py create mode 100644 var/spack/repos/builtin/packages/py-pyxlsb/package.py diff --git a/var/spack/repos/builtin/packages/py-odfpy/package.py b/var/spack/repos/builtin/packages/py-odfpy/package.py new file mode 100644 index 00000000000000..338fb3beab4bd3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-odfpy/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyOdfpy(PythonPackage): + """Odfpy is a library to read and write OpenDocument v. 1.2 files.""" + + homepage = "https://github.com/eea/odfpy" + pypi = "odfpy/odfpy-1.4.1.tar.gz" + + version("1.4.1", sha256="db766a6e59c5103212f3cc92ec8dd50a0f3a02790233ed0b52148b70d3c438ec") + + depends_on("py-setuptools", type="build") + depends_on("py-defusedxml", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index e7ed9e7e60da4c..f3d531f3bc382f 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -17,6 +17,8 @@ class PyPandas(PythonPackage): maintainers("adamjstewart") + variant("excel", when="@1.4:", default=False, description="Build with support for Excel") + version("2.1.1", sha256="fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b") version("2.1.0", sha256="62c24c7fc59e42b775ce0679cfa7b14a5f9bfb7643cfbe708c960699e05fb918") version("2.0.3", sha256="c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c") @@ -129,6 +131,20 @@ class PyPandas(PythonPackage): # Optional dependencies # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#optional-dependencies + # Excel dependencies for 1.4+ (not coded up for earlier versions) + depends_on("py-odfpy@1.4.1:", type=("run"), when="@2.0: +excel") + depends_on("py-openpyxl@3.0.10:", type=("run"), when="@2.1: +excel") + depends_on("py-openpyxl@3.0.7:", type=("run"), when="@1.5: +excel") + depends_on("py-openpyxl@3.0.3:", type=("run"), when="@1.4: +excel") + depends_on("py-pyxlsb@1.0.9:", type=("run"), when="@2.1: +excel") + depends_on("py-pyxlsb@1.0.8:", type=("run"), when="@1.5: +excel") + depends_on("py-pyxlsb@1.0.6:", type=("run"), when="@1.4: +excel") + depends_on("py-xlrd@2.0.1:", type=("run"), when="@1.4: +excel") + depends_on("py-xlwt@1.3.0:", type=("run"), when="@1.4:1.5 +excel") + depends_on("py-xlsxwriter@3.0.3:", type=("run"), when="@2.1: +excel") + depends_on("py-xlsxwriter@1.4.3:", type=("run"), when="@1.5: +excel") + depends_on("py-xlsxwriter@1.2.2:", type=("run"), when="@1.4: +excel") + # Historical dependencies depends_on("py-setuptools@61:", when="@2.0", type="build") depends_on("py-setuptools@51:", when="@1.3.2:1", type="build") diff --git a/var/spack/repos/builtin/packages/py-pyxlsb/package.py b/var/spack/repos/builtin/packages/py-pyxlsb/package.py new file mode 100644 index 00000000000000..d873e0b68b0533 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyxlsb/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyPyxlsb(PythonPackage): + """Excel 2007-2010 Binary Workbook (xlsb) parser""" + + pypi = "pyxlsb/pyxlsb-1.0.10.tar.gz" + + version("1.0.10", sha256="8062d1ea8626d3f1980e8b1cfe91a4483747449242ecb61013bc2df85435f685") + version("1.0.8", sha256="dcf26d6494b45d8852d68571f828c2361b74711a2e19ba03eee77f96b9210464") + version("1.0.6", sha256="47e8230582de15ad9824a456d1d4cb36a6535f4ad5e5eb2464d31f0445b9db46") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-xlsxwriter/package.py b/var/spack/repos/builtin/packages/py-xlsxwriter/package.py index af333f03c08615..09c2285311cd76 100644 --- a/var/spack/repos/builtin/packages/py-xlsxwriter/package.py +++ b/var/spack/repos/builtin/packages/py-xlsxwriter/package.py @@ -12,6 +12,10 @@ class PyXlsxwriter(PythonPackage): pypi = "XlsxWriter/XlsxWriter-1.0.2.tar.gz" + version("3.1.7", sha256="353042efb0f8551ce72baa087e98228f3394fcb380e8b96313edf1eec8d50823") + version("3.0.3", sha256="e89f4a1d2fa2c9ea15cde77de95cd3fd8b0345d0efb3964623f395c8c4988b7f") + version("1.4.3", sha256="641db6e7b4f4982fd407a3f372f45b878766098250d26963e95e50121168cbe2") + version("1.2.2", sha256="5a5e2195a4672d17db79839bbdf1006a521adb57eaceea1c335ae4b3d19f088f") version("1.0.2", sha256="a26bbbafff88abffce592ffd5dfaa4c9f08dc44ef4afbf45c70d3e270325f856") depends_on("py-setuptools", type="build") From 09e3dda6e0bdaf7665e1dd823efa66429103dc89 Mon Sep 17 00:00:00 2001 From: renjithravindrankannath <94420380+renjithravindrankannath@users.noreply.github.com> Date: Mon, 16 Oct 2023 10:25:10 -0700 Subject: [PATCH 205/408] Updating patch to enable flag mcode-object-version=none (#40367) * Updating patch to add flag mcode-object-version=none when device libs is buils as part of llvm-amdgpu * Limiting patch to +rocm-device-libs variant and adding appropriate comment for the patch * Updating llvmpatch as per the mailine code Updating hsa-rocr patch as per the latest code Updating the if elif condition for the hip test src path * Updating flags for 5.5 relases and above * Updating build flags and patches --- .../repos/builtin/packages/hip/package.py | 9 ++-- .../0002-Remove-explicit-RPATH-again.patch | 16 +++--- .../builtin/packages/hsa-rocr-dev/package.py | 2 +- .../builtin/packages/hsakmt-roct/package.py | 7 ++- .../builtin/packages/llvm-amdgpu/package.py | 11 ++++- .../llvm-amdgpu/patch-llvm-5.5.0.patch | 49 ++++++++++++++----- 6 files changed, 64 insertions(+), 30 deletions(-) diff --git a/var/spack/repos/builtin/packages/hip/package.py b/var/spack/repos/builtin/packages/hip/package.py index 5e1d6744cb65f4..1200cfdd2cb72d 100644 --- a/var/spack/repos/builtin/packages/hip/package.py +++ b/var/spack/repos/builtin/packages/hip/package.py @@ -715,11 +715,10 @@ def cache_test_sources(self): install test subdirectory for use during `spack test run`.""" if self.spec.satisfies("@:5.1.0"): return - else: - if "@:5.5" in self.spec: - self.test_src_dir = "samples" - else: - self.test_src_dir = "hip-tests/samples" + elif self.spec.satisfies("@5.1:5.5"): + self.test_src_dir = "samples" + elif self.spec.satisfies("@5.6:"): + self.test_src_dir = "hip-tests/samples" self.cache_extra_test_sources([self.test_src_dir]) def test_samples(self): diff --git a/var/spack/repos/builtin/packages/hsa-rocr-dev/0002-Remove-explicit-RPATH-again.patch b/var/spack/repos/builtin/packages/hsa-rocr-dev/0002-Remove-explicit-RPATH-again.patch index 58fd1e8cd94900..7d3150e40c3f29 100644 --- a/var/spack/repos/builtin/packages/hsa-rocr-dev/0002-Remove-explicit-RPATH-again.patch +++ b/var/spack/repos/builtin/packages/hsa-rocr-dev/0002-Remove-explicit-RPATH-again.patch @@ -1,17 +1,17 @@ -From b5a49e6de81e7a6cba86694ee5ba2486cd999976 Mon Sep 17 00:00:00 2001 -From: Harmen Stoppels -Date: Fri, 28 Aug 2020 18:26:54 +0200 -Subject: [PATCH] Remove explicit RPATH again +From fb6bc54d50ec511118557bfad7f1b892adcc1a1d Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Tue, 10 Oct 2023 01:15:08 +0000 +Subject: [PATCH] Updating patch for the latest code --- src/CMakeLists.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt -index 9de7842..66c6880 100644 +index 8fb02b1..b40c972 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt -@@ -134,9 +134,6 @@ target_include_directories( ${CORE_RUNTIME_TARGET} +@@ -122,9 +122,6 @@ target_include_directories( ${CORE_RUNTIME_TARGET} ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/libamdhsacode ) @@ -19,8 +19,8 @@ index 9de7842..66c6880 100644 -set_property(TARGET ${CORE_RUNTIME_TARGET} PROPERTY INSTALL_RPATH "$ORIGIN;$ORIGIN/../../lib;$ORIGIN/../../lib64;$ORIGIN/../lib64" ) - ## ------------------------- Linux Compiler and Linker options ------------------------- - set ( HSA_CXX_FLAGS ${HSA_COMMON_CXX_FLAGS} -Werror -fexceptions -fno-rtti -fvisibility=hidden -Wno-error=missing-braces -Wno-error=sign-compare -Wno-sign-compare -Wno-write-strings -Wno-conversion-null -fno-math-errno -fno-threadsafe-statics -fmerge-all-constants -fms-extensions -Wno-error=comment -Wno-comment -Wno-error=pointer-arith -Wno-pointer-arith -Wno-error=unused-variable -Wno-error=unused-function ) + set ( HSA_CXX_FLAGS ${HSA_COMMON_CXX_FLAGS} -fexceptions -fno-rtti -fvisibility=hidden -Wno-error=missing-braces -Wno-error=sign-compare -Wno-sign-compare -Wno-write-strings -Wno-conversion-null -fno-math-errno -fno-threadsafe-statics -fmerge-all-constants -fms-extensions -Wno-error=comment -Wno-comment -Wno-error=pointer-arith -Wno-pointer-arith -Wno-error=unused-variable -Wno-error=unused-function ) -- -2.25.1 +2.31.1 diff --git a/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py b/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py index 8a240502260ae7..d6e42a83fcd640 100644 --- a/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py +++ b/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py @@ -162,7 +162,7 @@ class HsaRocrDev(CMakePackage): # Both 3.5.0 and 3.7.0 force INSTALL_RPATH in different ways patch("0001-Do-not-set-an-explicit-rpath-by-default-since-packag.patch", when="@3.5.0") - patch("0002-Remove-explicit-RPATH-again.patch", when="@3.7.0:5.5") + patch("0002-Remove-explicit-RPATH-again.patch", when="@3.7.0:5.6") root_cmakelists_dir = "src" diff --git a/var/spack/repos/builtin/packages/hsakmt-roct/package.py b/var/spack/repos/builtin/packages/hsakmt-roct/package.py index 0bebaae6bf3df5..571cffd6577ee1 100644 --- a/var/spack/repos/builtin/packages/hsakmt-roct/package.py +++ b/var/spack/repos/builtin/packages/hsakmt-roct/package.py @@ -132,8 +132,11 @@ def install_targets(self): else: return ["install"] - def cmake_args(self): - args = [self.define_from_variant("BUILD_SHARED_LIBS", "shared")] + args = [] + if self.spec.satisfies("@:5.4.3"): + args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared")) + else: + args.append(self.define("BUILD_SHARED_LIBS", False)) if self.spec.satisfies("@5.4.3:"): args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args diff --git a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py index d69575d9338105..905cfe1f831fff 100644 --- a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py +++ b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py @@ -162,13 +162,17 @@ class LlvmAmdgpu(CMakePackage): # as per 5.2.0 llvm code. It used to be llvm/bin/../lib/libdevice. # Below patch is to look in the old path. patch("adjust-openmp-bitcode-directory-for-llvm-link.patch", when="@5.2.0:") - patch("patch-llvm-5.5.0.patch", when="@5.5:") + + # Below patch is to set the flag -mcode-object-version=none until + # the below fix is available in device-libs release code. + # https://github.com/RadeonOpenCompute/ROCm-Device-Libs/commit/f0356159dbdc93ea9e545f9b61a7842f9c881fdf + patch("patch-llvm-5.5.0.patch", when="@5.5: +rocm-device-libs") # i1 muls can sometimes happen after SCEV. # They resulted in ISel failures because we were missing the patterns for them. # This fix is targeting 6.1 rocm release. # Need patch until https://github.com/llvm/llvm-project/pull/67291 is merged. - patch("001-Add-i1-mul-patterns.patch", when="@5.6:") + patch("001-Add-i1-mul-patterns.patch", when="@5.6") conflicts("^cmake@3.19.0") @@ -285,6 +289,9 @@ def cmake_args(self): args.append(self.define("GCC_INSTALL_PREFIX", self.compiler.prefix)) if self.spec.satisfies("@5.4.3:"): args.append("-DCMAKE_INSTALL_LIBDIR=lib") + if self.spec.satisfies("@5.5.0:"): + args.append("-DCLANG_DEFAULT_RTLIB=compiler-rt") + args.append("-DCLANG_DEFAULT_UNWINDLIB=libgcc") return args @run_after("install") diff --git a/var/spack/repos/builtin/packages/llvm-amdgpu/patch-llvm-5.5.0.patch b/var/spack/repos/builtin/packages/llvm-amdgpu/patch-llvm-5.5.0.patch index 30e18a7da569b2..1495b6d3a39ac1 100644 --- a/var/spack/repos/builtin/packages/llvm-amdgpu/patch-llvm-5.5.0.patch +++ b/var/spack/repos/builtin/packages/llvm-amdgpu/patch-llvm-5.5.0.patch @@ -1,13 +1,38 @@ -diff --git a/clang/include/clang/Driver/Options.td b/clang/include/clang/Driver/Options.td -index bb4374b..36e7f7b 100644 ---- a/clang/include/clang/Driver/Options.td -+++ b/clang/include/clang/Driver/Options.td -@@ -3739,7 +3739,7 @@ def mcode_object_version_EQ : Joined<["-"], "mcode-object-version=">, Group, - NormalizedValuesScope<"TargetOptions">, - NormalizedValues<["COV_None", "COV_2", "COV_3", "COV_4", "COV_5"]>, -- MarshallingInfoEnum, "COV_5">; -+ MarshallingInfoEnum, "COV_4">; +From 7010d5da727825321d31863ceb9e2fe9eb22b5b9 Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Tue, 10 Oct 2023 05:16:47 +0000 +Subject: [PATCH] Condition check for enabling the flag + -mcode-object-version=none is failing in spack when device-libsis built with + llvm-amdgpu. The flag is required here as well as standalon build. + +--- + rocm-device-libs/cmake/OCL.cmake | 13 ++----------- + 1 file changed, 2 insertions(+), 11 deletions(-) + +diff --git a/rocm-device-libs/cmake/OCL.cmake b/rocm-device-libs/cmake/OCL.cmake +index 773c6f62e..30f60030b 100644 +--- a/rocm-device-libs/cmake/OCL.cmake ++++ b/rocm-device-libs/cmake/OCL.cmake +@@ -30,17 +30,8 @@ if (WIN32) + set(CLANG_OCL_FLAGS ${CLANG_OCL_FLAGS} -fshort-wchar) + endif() - defm code_object_v3_legacy : SimpleMFlag<"code-object-v3", - "Legacy option to specify code object ABI V3", +-# Disable code object version module flag if available. +-file(WRITE ${CMAKE_BINARY_DIR}/tmp.cl "") +-execute_process ( +- COMMAND ${LLVM_TOOLS_BINARY_DIR}/clang${EXE_SUFFIX} ${CLANG_OCL_FLAGS} -Xclang -mcode-object-version=none ${CMAKE_BINARY_DIR}/tmp.cl +- RESULT_VARIABLE TEST_CODE_OBJECT_VERSION_NONE_RESULT +- ERROR_QUIET +-) +-file(REMOVE ${CMAKE_BINARY_DIR}/tmp.cl) +-if (NOT TEST_CODE_OBJECT_VERSION_NONE_RESULT) +- set(CLANG_OCL_FLAGS ${CLANG_OCL_FLAGS} -Xclang -mcode-object-version=none) +-endif() ++# Disable code object version module flag. ++set(CLANG_OCL_FLAGS ${CLANG_OCL_FLAGS} -Xclang -mcode-object-version=none) + + set (BC_EXT .bc) + set (LIB_SUFFIX ".lib${BC_EXT}") +-- +2.31.1 + From 1408009a7c9e45f4996ad2495e119684970f2e84 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 16 Oct 2023 14:14:46 -0500 Subject: [PATCH 206/408] py-lightning: add v2.1.0 (#40496) --- .../builtin/packages/py-lightning/package.py | 101 ++++++++++-------- 1 file changed, 55 insertions(+), 46 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-lightning/package.py b/var/spack/repos/builtin/packages/py-lightning/package.py index 2dc383b1b416ec..f5131ec0715265 100644 --- a/var/spack/repos/builtin/packages/py-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-lightning/package.py @@ -7,15 +7,15 @@ class PyLightning(PythonPackage): - """Use Lightning Apps to build everything from production-ready, - multi-cloud ML systems to simple research demos. - """ + """The Deep Learning framework to train, deploy, and ship AI products Lightning fast.""" homepage = "https://github.com/Lightning-AI/lightning" pypi = "lightning/lightning-2.0.0.tar.gz" + skip_modules = ["lightning.app", "lightning.data", "lightning.store"] maintainers("adamjstewart") + version("2.1.0", sha256="1f78f5995ae7dcffa1edf34320db136902b73a0d1b304404c48ec8be165b3a93") version("2.0.9", sha256="2395ece6e29e12064718ff16b8edec5685df7f7095d4fee78edb0a654f5cd7eb") version("2.0.8", sha256="db914e211b5c3b079a821be6e4344e72d0a729163676a65c4e00aae98390ae7b") version("2.0.7", sha256="f05acd4ba846505d40125b4f9f0bda0804b2b0356e2ad2fd4e4bf7d1c61c8cc6") @@ -33,55 +33,64 @@ class PyLightning(PythonPackage): depends_on("py-setuptools", type="build") # src/lightning.egg-info/requires.txt - depends_on("py-jinja2@:4", type=("build", "run")) depends_on("py-pyyaml@5.4:7", type=("build", "run")) - depends_on("py-arrow@1.2:2", type=("build", "run")) - depends_on("py-backoff@2.2.1:3", when="@2.0.5:", type=("build", "run")) - depends_on("py-beautifulsoup4@4.8:5", type=("build", "run")) - depends_on("py-click@:9", type=("build", "run")) - depends_on("py-croniter@1.3:1.4", when="@2.0.5:", type=("build", "run")) - depends_on("py-croniter@1.3", when="@:2.0.4", type=("build", "run")) - depends_on("py-dateutils@:1", type=("build", "run")) - depends_on("py-deepdiff@5.7:7", type=("build", "run")) - depends_on("py-fastapi@0.92:1", when="@2.0.4:", type=("build", "run")) - depends_on("py-fastapi@0.69:0.88", when="@2.0.3", type=("build", "run")) - depends_on("py-fastapi@:0.88", when="@:2.0.2", type=("build", "run")) - depends_on("py-fsspec@2022.5:2024+http", when="@2.0.5:", type=("build", "run")) + depends_on("py-fsspec@2021.6.1:2024+http", when="@2.1:", type=("build", "run")) + depends_on("py-fsspec@2022.5:2024+http", when="@2.0.5:2.0", type=("build", "run")) depends_on("py-fsspec@2022.5:2023+http", when="@:2.0.4", type=("build", "run")) - depends_on("py-inquirer@2.10:4", type=("build", "run")) - depends_on("py-lightning-cloud@0.5.38:", when="@2.0.9:", type=("build", "run")) - depends_on("py-lightning-cloud@0.5.37:", when="@2.0.5:", type=("build", "run")) - depends_on("py-lightning-cloud@0.5.34:", when="@2.0.3:", type=("build", "run")) - depends_on("py-lightning-cloud@0.5.31:", when="@2:", type=("build", "run")) - depends_on("py-lightning-cloud@0.5.27:", when="@:1", type=("build", "run")) - depends_on("py-lightning-utilities@0.7:1", when="@2:", type=("build", "run")) + depends_on("py-lightning-utilities@0.8:1", when="@2.1:", type=("build", "run")) + depends_on("py-lightning-utilities@0.7:1", when="@2.0", type=("build", "run")) depends_on("py-lightning-utilities@0.6.0.post0:1", when="@:1", type=("build", "run")) depends_on("py-numpy@1.17.2:2", type=("build", "run")) - depends_on("py-packaging@17.1:24", type=("build", "run")) - depends_on("py-psutil@:6", type=("build", "run")) - depends_on("py-pydantic@1.7.4:2.1", when="@2.0.7:", type=("build", "run")) - depends_on("py-pydantic@1.7.4:2.0", when="@2.0.6", type=("build", "run")) - depends_on("py-pydantic@1.7.4:1", when="@2.0.5", type=("build", "run")) - depends_on("py-pydantic@1.7.4:3", when="@2.0.3:2.0.4", type=("build", "run")) - depends_on("py-pydantic@:2", when="@:2.0.2", type=("build", "run")) - depends_on("py-python-multipart@0.0.5:1", type=("build", "run")) - depends_on("py-requests@:3", type=("build", "run")) - depends_on("py-rich@12.3:14", when="@2:", type=("build", "run")) - depends_on("py-rich@:14", when="@:1", type=("build", "run")) - depends_on("py-starlette", when="@2.0.3:", type=("build", "run")) - depends_on("py-starlette@:1", when="@:2.0.2", type=("build", "run")) - depends_on("py-starsessions@1.2.1:1", type=("build", "run")) - depends_on("py-torch@1.11:3", when="@2:", type=("build", "run")) + depends_on("py-packaging@20:24", when="@2.1:", type=("build", "run")) + depends_on("py-packaging@17.1:24", when="@:2.0", type=("build", "run")) + depends_on("py-torch@1.12:3", when="@2.1:", type=("build", "run")) + depends_on("py-torch@1.11:3", when="@2.0", type=("build", "run")) depends_on("py-torch@1.10:3", when="@:1", type=("build", "run")) depends_on("py-torchmetrics@0.7:2", when="@2.0.9:", type=("build", "run")) depends_on("py-torchmetrics@0.7:1", when="@:2.0.8", type=("build", "run")) depends_on("py-tqdm@4.57:5", type=("build", "run")) - depends_on("py-traitlets@5.3:6", type=("build", "run")) depends_on("py-typing-extensions@4:5", type=("build", "run")) - depends_on("py-urllib3@:3", when="@2.0.4:", type=("build", "run")) - depends_on("py-urllib3@:2", when="@:2.0.3", type=("build", "run")) - depends_on("py-uvicorn@:1", type=("build", "run")) - depends_on("py-websocket-client@:2", type=("build", "run")) - depends_on("py-websockets@:12", when="@2.0.5:", type=("build", "run")) - depends_on("py-websockets@:11", when="@:2.0.4", type=("build", "run")) - depends_on("py-pytorch-lightning", when="@2:", type=("build", "run")) + + # Only an alias, not actually used by the library + # depends_on("py-pytorch-lightning", when="@2:", type=("build", "run")) + + # Historical requirements + with when("@:2.0"): + depends_on("py-jinja2@:4", type=("build", "run")) + depends_on("py-arrow@1.2:2", type=("build", "run")) + depends_on("py-backoff@2.2.1:3", when="@2.0.5:", type=("build", "run")) + depends_on("py-beautifulsoup4@4.8:5", type=("build", "run")) + depends_on("py-click@:9", type=("build", "run")) + depends_on("py-croniter@1.3:1.4", when="@2.0.5:", type=("build", "run")) + depends_on("py-croniter@1.3", when="@:2.0.4", type=("build", "run")) + depends_on("py-dateutils@:1", type=("build", "run")) + depends_on("py-deepdiff@5.7:7", type=("build", "run")) + depends_on("py-fastapi@0.92:1", when="@2.0.4:", type=("build", "run")) + depends_on("py-fastapi@0.69:0.88", when="@2.0.3", type=("build", "run")) + depends_on("py-fastapi@:0.88", when="@:2.0.2", type=("build", "run")) + depends_on("py-inquirer@2.10:4", type=("build", "run")) + depends_on("py-lightning-cloud@0.5.38:", when="@2.0.9:", type=("build", "run")) + depends_on("py-lightning-cloud@0.5.37:", when="@2.0.5:", type=("build", "run")) + depends_on("py-lightning-cloud@0.5.34:", when="@2.0.3:", type=("build", "run")) + depends_on("py-lightning-cloud@0.5.31:", when="@2:", type=("build", "run")) + depends_on("py-lightning-cloud@0.5.27:", when="@:1", type=("build", "run")) + depends_on("py-psutil@:6", type=("build", "run")) + depends_on("py-pydantic@1.7.4:2.1", when="@2.0.7:", type=("build", "run")) + depends_on("py-pydantic@1.7.4:2.0", when="@2.0.6", type=("build", "run")) + depends_on("py-pydantic@1.7.4:1", when="@2.0.5", type=("build", "run")) + depends_on("py-pydantic@1.7.4:3", when="@2.0.3:2.0.4", type=("build", "run")) + depends_on("py-pydantic@:2", when="@:2.0.2", type=("build", "run")) + depends_on("py-python-multipart@0.0.5:1", type=("build", "run")) + depends_on("py-requests@:3", type=("build", "run")) + depends_on("py-rich@12.3:14", when="@2:", type=("build", "run")) + depends_on("py-rich@:14", when="@:1", type=("build", "run")) + depends_on("py-starlette", when="@2.0.3:", type=("build", "run")) + depends_on("py-starlette@:1", when="@:2.0.2", type=("build", "run")) + depends_on("py-starsessions@1.2.1:1", type=("build", "run")) + depends_on("py-traitlets@5.3:6", type=("build", "run")) + depends_on("py-urllib3@:3", when="@2.0.4:", type=("build", "run")) + depends_on("py-urllib3@:2", when="@:2.0.3", type=("build", "run")) + depends_on("py-uvicorn@:1", type=("build", "run")) + depends_on("py-websocket-client@:2", type=("build", "run")) + depends_on("py-websockets@:12", when="@2.0.5:", type=("build", "run")) + depends_on("py-websockets@:11", when="@:2.0.4", type=("build", "run")) From df8a27945d4dbdfff25ee2f58a993802bddaca8c Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 16 Oct 2023 15:33:32 -0500 Subject: [PATCH 207/408] py-grpcio: cython 3 still not supported (#40537) --- var/spack/repos/builtin/packages/py-grpcio/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-grpcio/package.py b/var/spack/repos/builtin/packages/py-grpcio/package.py index 6b798b792eed3a..8a185d480265f0 100644 --- a/var/spack/repos/builtin/packages/py-grpcio/package.py +++ b/var/spack/repos/builtin/packages/py-grpcio/package.py @@ -37,7 +37,7 @@ class PyGrpcio(PythonPackage): depends_on("py-setuptools", type="build") depends_on("py-six@1.5.2:", when="@:1.48", type=("build", "run")) - depends_on("py-cython@0.23:", type="build") + depends_on("py-cython@0.23:2", type="build") depends_on("openssl") depends_on("zlib-api") depends_on("c-ares") From 8d9487436f7421d1b5718e6062a9a52b4457b462 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 16 Oct 2023 15:33:56 -0500 Subject: [PATCH 208/408] py-shapely: add v2.0.2 (#40523) --- var/spack/repos/builtin/packages/py-shapely/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-shapely/package.py b/var/spack/repos/builtin/packages/py-shapely/package.py index cec0aa4911a999..fa12d498e5f20b 100644 --- a/var/spack/repos/builtin/packages/py-shapely/package.py +++ b/var/spack/repos/builtin/packages/py-shapely/package.py @@ -19,6 +19,7 @@ class PyShapely(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("2.0.2", sha256="1713cc04c171baffc5b259ba8531c58acc2a301707b7f021d88a15ed090649e7") version("2.0.1", sha256="66a6b1a3e72ece97fc85536a281476f9b7794de2e646ca8a4517e2e3c1446893") version("2.0.0", sha256="11f1b1231a6c04213fb1226c6968d1b1b3b369ec42d1e9655066af87631860ea") version("1.8.5", sha256="e82b6d60ecfb124120c88fe106a478596bbeab142116d7e7f64a364dac902a92") @@ -32,7 +33,8 @@ class PyShapely(PythonPackage): version("1.6.4", sha256="b10bc4199cfefcf1c0e5d932eac89369550320ca4bdf40559328d85f1ca4f655") # pyproject.toml - depends_on("py-cython@0.29:0", when="@2:", type="build") + depends_on("py-cython", when="@2.0.2:", type="build") + depends_on("py-cython@0.29:0", when="@2.0.0:2.0.1", type="build") depends_on("py-cython@0.29.24:2", when="@:1", type="build") depends_on("py-setuptools@61:", when="@2:", type="build") depends_on("py-setuptools@:63", when="@:1", type="build") From c806f8285dc38a938128f8a2e156f67ba225e3d0 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 16 Oct 2023 15:34:13 -0500 Subject: [PATCH 209/408] py-fiona: add v1.9.5 (#40497) --- .../builtin/packages/py-fiona/package.py | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-fiona/package.py b/var/spack/repos/builtin/packages/py-fiona/package.py index 37da11dd4cba8b..917a20ab09099d 100644 --- a/var/spack/repos/builtin/packages/py-fiona/package.py +++ b/var/spack/repos/builtin/packages/py-fiona/package.py @@ -10,12 +10,13 @@ class PyFiona(PythonPackage): """Fiona reads and writes spatial data files.""" homepage = "https://github.com/Toblerity/Fiona" - pypi = "Fiona/Fiona-1.8.18.tar.gz" + pypi = "fiona/fiona-1.9.5.tar.gz" git = "https://github.com/Toblerity/Fiona.git" maintainers("adamjstewart") version("master", branch="master") + version("1.9.5", sha256="99e2604332caa7692855c2ae6ed91e1fffdf9b59449aa8032dd18e070e59a2f7") version("1.9.4", sha256="49f18cbcd3b1f97128c1bb038c3451b2e1be25baa52f02ce906c25cf75af95b6") version("1.9.3", sha256="60f3789ad9633c3a26acf7cbe39e82e3c7a12562c59af1d599fc3e4e8f7f8f25") version("1.9.2", sha256="f9263c5f97206bf2eb2c010d52e8ffc54e96886b0e698badde25ff109b32952a") @@ -27,11 +28,13 @@ class PyFiona(PythonPackage): version("1.8.18", sha256="b732ece0ff8886a29c439723a3e1fc382718804bb057519d537a81308854967a") # pyproject.toml - depends_on("python@3.7:", when="@1.9:", type=("build", "link", "run")) - depends_on("python@2.6:", when="@1.8.22:1.8", type=("build", "link", "run")) - depends_on("python@2.6:3.10", when="@1.8.21", type=("build", "link", "run")) - depends_on("python@2.6:3.9", when="@:1.8.20", type=("build", "link", "run")) - depends_on("py-cython@0.29.29:0.29", when="@1.9:", type="build") + depends_on("python@:3.10", when="@1.8.21", type=("build", "link", "run")) + depends_on("python@:3.9", when="@:1.8.20", type=("build", "link", "run")) + depends_on("py-cython", type="build") + # Overly strict version requirements + # depends_on("py-cython@3.0.2:3", when="@1.9.5:", type="build") + # depends_on("py-cython@0.29.29:0.29", when="@1.9.0:1.9.4", type="build") + depends_on("py-setuptools@67.8:", when="@1.9.5:", type="build") depends_on("py-setuptools@61:", when="@1.9:", type="build") depends_on("py-attrs@19.2:", when="@1.9:", type=("build", "run")) depends_on("py-attrs@17:", type=("build", "run")) @@ -43,12 +46,20 @@ class PyFiona(PythonPackage): depends_on("py-importlib-metadata", when="@1.9.2: ^python@:3.9", type=("build", "run")) depends_on("py-six", when="@1.9.4:", type=("build", "run")) depends_on("py-six@1.7:", when="@:1.8", type=("build", "run")) + depends_on("py-setuptools", when="@:1.9.1,1.9.5:", type="run") # setup.py or release notes depends_on("gdal@3.1:", when="@1.9:", type=("build", "link", "run")) depends_on("gdal@1.8:", type=("build", "link", "run")) # Historical dependencies - depends_on("py-setuptools", when="@:1.9.1", type=("build", "run")) depends_on("py-munch@2.3.2:", when="@1.9.0:1.9.3", type=("build", "run")) depends_on("py-munch", when="@:1.8", type=("build", "run")) + + def url_for_version(self, version): + url = "https://files.pythonhosted.org/packages/source/{0}/{0}iona/{0}iona-{1}.tar.gz" + if version >= Version("1.9.5"): + letter = "f" + else: + letter = "F" + return url.format(letter, version) From feebdb4d97968aef5e7e46d7172d45b00710072f Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 16 Oct 2023 22:36:22 +0200 Subject: [PATCH 210/408] Use string representation of deptypes for concrete specs (#40566) --- lib/spack/spack/solver/asp.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 18328acd62235b..1ff5ccf31889e0 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1883,7 +1883,11 @@ class Body: continue # skip build dependencies of already-installed specs if concrete_build_deps or dtype != dt.BUILD: - clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype)) + clauses.append( + fn.attr( + "depends_on", spec.name, dep.name, dt.flag_to_string(dtype) + ) + ) for virtual_name in dspec.virtuals: clauses.append( fn.attr("virtual_on_edge", spec.name, dep.name, virtual_name) From dba5903614fa018342183e0a7e95bbf6d301a666 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 16 Oct 2023 15:37:50 -0500 Subject: [PATCH 211/408] py-grayskull: add new package (#40293) * py-grayskull: add new package * [@spackbot] updating style on behalf of adamjstewart --------- Co-authored-by: adamjstewart --- .../packages/py-conda-souschef/package.py | 21 +++++++++++ .../builtin/packages/py-grayskull/package.py | 35 +++++++++++++++++++ .../builtin/packages/py-rapidfuzz/package.py | 10 ++++-- .../packages/py-ruamel-yaml-jinja2/package.py | 20 +++++++++++ .../packages/py-scikit-build/package.py | 31 ++++++++++++---- .../builtin/packages/py-semver/package.py | 2 ++ 6 files changed, 111 insertions(+), 8 deletions(-) create mode 100644 var/spack/repos/builtin/packages/py-conda-souschef/package.py create mode 100644 var/spack/repos/builtin/packages/py-grayskull/package.py create mode 100644 var/spack/repos/builtin/packages/py-ruamel-yaml-jinja2/package.py diff --git a/var/spack/repos/builtin/packages/py-conda-souschef/package.py b/var/spack/repos/builtin/packages/py-conda-souschef/package.py new file mode 100644 index 00000000000000..5c4ddc7d937e41 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-conda-souschef/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCondaSouschef(PythonPackage): + """Project to handle conda recipes.""" + + homepage = "https://github.com/marcelotrevisani/souschef" + pypi = "conda-souschef/conda-souschef-2.2.3.tar.gz" + + version("2.2.3", sha256="9bf3dba0676bc97616636b80ad4a75cd90582252d11c86ed9d3456afb939c0c3") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools@30.3:", type="build") + depends_on("py-setuptools-scm", type="build") + depends_on("py-ruamel-yaml@0.15.3:", type=("build", "run")) + depends_on("py-ruamel-yaml-jinja2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-grayskull/package.py b/var/spack/repos/builtin/packages/py-grayskull/package.py new file mode 100644 index 00000000000000..f1375aa4567062 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-grayskull/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyGrayskull(PythonPackage): + """Project to generate recipes for conda packages.""" + + homepage = "https://github.com/conda/grayskull" + pypi = "grayskull/grayskull-2.5.0.tar.gz" + + version("2.5.0", sha256="b021138655be550fd1b93b8db08b9c66169fac9cba6bcdad1411263e12fc703f") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools@61:", type="build") + depends_on("py-setuptools-scm@6.2:+toml", type="build") + depends_on("py-beautifulsoup4", type=("build", "run")) + depends_on("py-colorama", type=("build", "run")) + depends_on("py-conda-souschef@2.2.3:", type=("build", "run")) + depends_on("py-packaging@21.3:", type=("build", "run")) + depends_on("py-pip", type=("build", "run")) + depends_on("py-pkginfo", type=("build", "run")) + depends_on("py-progressbar2@3.53:", type=("build", "run")) + depends_on("py-rapidfuzz@3:", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) + depends_on("py-ruamel-yaml@0.16.10:", type=("build", "run")) + depends_on("py-ruamel-yaml-jinja2", type=("build", "run")) + depends_on("py-setuptools@30.3:", type=("build", "run")) + depends_on("py-semver@3.0", type=("build", "run")) + depends_on("py-stdlib-list", type=("build", "run")) + depends_on("py-tomli", type=("build", "run")) + depends_on("py-tomli-w", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py index e5b4c9ae34a141..5bfd1563b5bb78 100644 --- a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py +++ b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py @@ -12,8 +12,14 @@ class PyRapidfuzz(PythonPackage): homepage = "https://github.com/maxbachmann/rapidfuzz" pypi = "rapidfuzz/rapidfuzz-1.8.2.tar.gz" + version("3.3.1", sha256="6783b3852f15ed7567688e2e358757a7b4f38683a915ba5edc6c64f1a3f0b450") version("1.8.2", sha256="d6efbb2b6b18b3a67d7bdfbcd9bb72732f55736852bbef823bdf210f9e0c6c90") - depends_on("python@2.7:", type=("build", "link", "run")) + depends_on("python", type=("build", "link", "run")) + depends_on("py-setuptools@42:", when="@3:", type="build") depends_on("py-setuptools", type="build") - depends_on("py-numpy", type=("build", "run")) + depends_on("py-scikit-build@0.17", when="@3:", type="build") + + # CMakeLists.txt + depends_on("cmake@3.12:", type="build") + depends_on("ninja", type="build") diff --git a/var/spack/repos/builtin/packages/py-ruamel-yaml-jinja2/package.py b/var/spack/repos/builtin/packages/py-ruamel-yaml-jinja2/package.py new file mode 100644 index 00000000000000..0958c65b9ddbcc --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ruamel-yaml-jinja2/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyRuamelYamlJinja2(PythonPackage): + """jinja2 pre and post-processor to update with YAML.""" + + homepage = "https://sourceforge.net/p/ruamel-yaml-jinja2/code/ci/default/tree" + pypi = "ruamel.yaml.jinja2/ruamel.yaml.jinja2-0.2.7.tar.gz" + + version("0.2.7", sha256="8449be29d9a157fa92d1648adc161d718e469f0d38a6b21e0eabb76fd5b3e663") + + depends_on("py-setuptools", type="build") + + # __init__.py + depends_on("py-ruamel-yaml@0.16.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-scikit-build/package.py b/var/spack/repos/builtin/packages/py-scikit-build/package.py index bfca8b7c77e7dc..a83a084fc179b9 100644 --- a/var/spack/repos/builtin/packages/py-scikit-build/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-build/package.py @@ -16,18 +16,37 @@ class PyScikitBuild(PythonPackage): the setuptools Python module and CMake.""" homepage = "https://scikit-build.readthedocs.io/en/latest/" - pypi = "scikit-build/scikit-build-0.15.0.tar.gz" + pypi = "scikit-build/scikit_build-0.17.6.tar.gz" maintainers("coreyjadams") + version("0.17.6", sha256="b51a51a36b37c42650994b5047912f59b22e3210b23e321f287611f9ef6e5c9d") version("0.15.0", sha256="e723cd0f3489a042370b9ea988bbb9cfd7725e8b25b20ca1c7981821fcf65fb9") version("0.12.0", sha256="f851382c469bcd9a8c98b1878bcfdd13b68556279d2fd9a329be41956ae5a7fe") version("0.11.1", sha256="da40dfd69b2456fad1349a894b90180b43712152b8a85d2a00f4ae2ce8ac9a5c") version("0.10.0", sha256="7342017cc82dd6178e3b19377389b8a8d1f8b429d9cdb315cfb1094e34a0f526") - depends_on("py-setuptools@28.0.0:", type=("build", "run")) - depends_on("py-setuptools@42.0.0:", when="@0.15.0:", type=("build", "run")) - depends_on("py-setuptools-scm+toml", when="@0.15.0:", type="build") - depends_on("py-packaging", type=("build", "run")) - depends_on("py-wheel@0.29.0:", type=("build", "run")) + depends_on("py-hatchling", when="@0.17:", type="build") + depends_on("py-hatch-fancy-pypi-readme", when="@0.17:", type="build") + depends_on("py-hatch-vcs", when="@0.17:", type="build") depends_on("py-distro", when="@0.11:", type=("build", "run")) + depends_on("py-packaging", type=("build", "run")) + depends_on("py-setuptools@42:", when="@0.15:", type=("build", "run")) + depends_on("py-setuptools@28:", type=("build", "run")) + depends_on("py-tomli", when="@0.17: ^python@:3.10", type=("build", "run")) + depends_on("py-typing-extensions@3.7:", when="@0.17: ^python@:3.7", type=("build", "run")) + depends_on("py-wheel@0.32:", when="@0.17:", type=("build", "run")) + depends_on("py-wheel@0.29:", type=("build", "run")) + + # Historical dependencies + depends_on("py-setuptools-scm+toml", when="@0.15", type="build") + + def url_for_version(self, version): + url = ( + "https://files.pythonhosted.org/packages/source/s/scikit-build/scikit{}build-{}.tar.gz" + ) + if version >= Version("0.17"): + separator = "_" + else: + separator = "-" + return url.format(separator, version) diff --git a/var/spack/repos/builtin/packages/py-semver/package.py b/var/spack/repos/builtin/packages/py-semver/package.py index d3201097def6b0..59b05d9b64cca6 100644 --- a/var/spack/repos/builtin/packages/py-semver/package.py +++ b/var/spack/repos/builtin/packages/py-semver/package.py @@ -13,6 +13,8 @@ class PySemver(PythonPackage): homepage = "https://semver.org/" pypi = "semver/semver-2.8.1.tar.gz" + version("3.0.1", sha256="9ec78c5447883c67b97f98c3b6212796708191d22e4ad30f4570f840171cbce1") version("2.8.1", sha256="5b09010a66d9a3837211bb7ae5a20d10ba88f8cb49e92cb139a69ef90d5060d8") depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", when="@3:", type="build") From f686212d22cb1327d9248076ec30337dec4152e1 Mon Sep 17 00:00:00 2001 From: "Seth R. Johnson" Date: Mon, 16 Oct 2023 17:10:31 -0400 Subject: [PATCH 212/408] py-furo: new version (#40559) --- var/spack/repos/builtin/packages/py-furo/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-furo/package.py b/var/spack/repos/builtin/packages/py-furo/package.py index c26ababcb0b7ff..a4ca3e1ce4d2d7 100644 --- a/var/spack/repos/builtin/packages/py-furo/package.py +++ b/var/spack/repos/builtin/packages/py-furo/package.py @@ -12,6 +12,7 @@ class PyFuro(PythonPackage): homepage = "https://github.com/pradyunsg/furo" pypi = "furo/furo-2023.5.20.tar.gz" + version("2023.9.10", sha256="5707530a476d2a63b8cad83b4f961f3739a69f4b058bcf38a03a39fa537195b2") version("2023.5.20", sha256="40e09fa17c6f4b22419d122e933089226dcdb59747b5b6c79363089827dea16f") depends_on("py-sphinx-theme-builder@0.2.0a10:", type="build") From b507aefe88e3e46f00b937e206bec461c8725055 Mon Sep 17 00:00:00 2001 From: Stephen Sachs Date: Tue, 17 Oct 2023 04:23:46 +0200 Subject: [PATCH 213/408] intel-oneapi-compilers: ifx is located in bin not bin/intel64 (#40561) This is a fix on top of https://github.com/spack/spack/pull/40557 . Tagging @rscohn2 for review. --- .../repos/builtin/packages/intel-oneapi-compilers/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index 84c560c2409825..be6f4d109b3c81 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -278,10 +278,13 @@ def extend_config_flags(self): self.write_config_file( common_flags + llvm_flags, self.component_prefix.linux.bin, ["icx", "icpx"] ) + self.write_config_file( + common_flags + classic_flags, self.component_prefix.linux.bin, ["ifx"] + ) self.write_config_file( common_flags + classic_flags, self.component_prefix.linux.bin.intel64, - ["icc", "icpc", "ifort", "ifx"], + ["icc", "icpc", "ifort"], ) def _ld_library_path(self): From 00079b532c6793220b0e1e30f60548466d45f4cc Mon Sep 17 00:00:00 2001 From: Eric Berquist Date: Mon, 16 Oct 2023 22:28:51 -0400 Subject: [PATCH 214/408] Add latest versions of rlwrap (#40563) * Add latest versions of rlwrap * rlwrap: fix URL for v0.46.1 --- var/spack/repos/builtin/packages/rlwrap/package.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/var/spack/repos/builtin/packages/rlwrap/package.py b/var/spack/repos/builtin/packages/rlwrap/package.py index 2df72bfa2b3f0b..8203856915270c 100644 --- a/var/spack/repos/builtin/packages/rlwrap/package.py +++ b/var/spack/repos/builtin/packages/rlwrap/package.py @@ -13,6 +13,17 @@ class Rlwrap(AutotoolsPackage): homepage = "https://github.com/hanslub42/rlwrap" url = "https://github.com/hanslub42/rlwrap/releases/download/v0.43/rlwrap-0.43.tar.gz" + version("0.46.1", sha256="2711986a1248f6ac59e2aecf5586205835970040d300a42b4bf8014397e73e37") + version("0.46", sha256="b4bd79fda824426dae65236e338ba7daf3f0d0acad7c1561d4d5e6dadcfd539d") + version("0.45.2", sha256="9f8870deb46e473d21b5db89d709b6497f4ef9fa06d44eebc5f821daa00c8eca") + version("0.44", sha256="cd7ff50cde66e443cbea0049b4abf1cca64a74948371fa4f1b5d9a5bbce1e13c") version("0.43", sha256="8e86d0b7882d9b8a73d229897a90edc207b1ae7fa0899dca8ee01c31a93feb2f") depends_on("readline@4.2:") + + def url_for_version(self, version): + if version < Version("0.46.1"): + return super().url_for_version(version) + # The latest release (0.46.1) removed the "v" prefix. + url_fmt = "https://github.com/hanslub42/rlwrap/releases/download/{0}/rlwrap-{0}.tar.gz" + return url_fmt.format(version) From f7d4ba4018c9d21c6679c29e8a266daa5d7c1b56 Mon Sep 17 00:00:00 2001 From: Patrick Bridges Date: Mon, 16 Oct 2023 21:13:31 -0600 Subject: [PATCH 215/408] Creation of Beatnik package and associated updates to silo and cabana spack package (#40382) * Added initial package for building Beatnik with spack * Fixed github ID for Jason as a maintainer. * Major revision of beatnik spack package to properly support GPU spack builds with CUDA (and ROCm, though that it untested) * Marked that beatnik 1.0 will require cabana 0.6.0. We will wait for the cabana 0.6.0 release before we release beatnik * Update to beatnik package spec to compile with hipcc when +rocm * Updated spack package for cabana for version 0.6.0 and appropriate heffte dependency * Updated beatnik package to require cabana 0.6.0 * More updates to cabana and beatnik to build with cabana 0.6.0 * Finish removing BLT dependence from beatnik * More updates to beatnik package for compiling on cray systems * Updated beatnik package for new cabana package * Changes to silo package for new silo version * Fixed version specs for heffte to be able to concretize and build * Fixed spack style issues for beatnik and silo packages * More spack formatting fixes to beatnik and silo * Patrick adopting silo package as maintainer for now * Should address final style changes to beatnik package spec * Yet more style fixes. * Perhaps this is the final style fixes? :) * Minor fix to cabana package on required versions --- .../repos/builtin/packages/beatnik/package.py | 90 +++++++++++++++++++ .../repos/builtin/packages/cabana/package.py | 25 +++++- .../repos/builtin/packages/silo/package.py | 24 +++-- 3 files changed, 129 insertions(+), 10 deletions(-) create mode 100644 var/spack/repos/builtin/packages/beatnik/package.py diff --git a/var/spack/repos/builtin/packages/beatnik/package.py b/var/spack/repos/builtin/packages/beatnik/package.py new file mode 100644 index 00000000000000..aa39194494bbee --- /dev/null +++ b/var/spack/repos/builtin/packages/beatnik/package.py @@ -0,0 +1,90 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Beatnik(CMakePackage, CudaPackage, ROCmPackage): + """Fluid interface model solver based on Pandya and Shkoller's Z-Model formulation.""" + + homepage = "https://github.com/CUP-ECS/beatnik" + git = "https://github.com/CUP-ECS/beatnik.git" + + maintainers("patrickb314", "JStewart28") + + # Add proper versions and checksums here. Will add 1.0 when a proper SHA is available + # version("1.0", sha256="XXX") + version("develop", branch="develop") + version("main", branch="main") + + # Variants are primarily backends to build on GPU systems and pass the right + # informtion to the packages we depend on + variant("cuda", default=False, description="Use CUDA support from subpackages") + variant("openmp", default=False, description="Use OpenMP support from subpackages") + + # Dependencies for all Beatnik versions + depends_on("mpi") + depends_on("mpi +cuda", when="+cuda") + depends_on("mpi +rocm", when="+rocm") + + # Kokkos dependencies + depends_on("kokkos @4:") + depends_on("kokkos +cuda +cuda_lambda +cuda_constexpr", when="+cuda") + depends_on("kokkos +rocm", when="+rocm") + depends_on("kokkos +wrapper", when="%gcc+cuda") + + # Cabana dependencies + depends_on("cabana @0.6.0 +grid +heffte +silo +hdf5 +mpi") + depends_on("cabana +cuda", when="+cuda") + depends_on("cabana +rocm", when="+rocm") + + # Silo dependencies + depends_on("silo @4.11:") + depends_on("silo @4.11.1:", when="%cce") # Eariler silo versions have trouble cce + + # Heffte dependencies - We always require FFTW so that there's a host + # backend even when we're compiling for GPUs + depends_on("heffte +fftw") + depends_on("heffte +cuda", when="+cuda") + depends_on("heffte +rocm", when="+rocm") + + # If we're using CUDA or ROCM, require MPIs be GPU-aware + conflicts("mpich ~cuda", when="+cuda") + conflicts("mpich ~rocm", when="+rocm") + conflicts("openmpi ~cuda", when="+cuda") + conflicts("^intel-mpi") # Heffte won't build with intel MPI because of needed C++ MPI support + + # Propagate CUDA and AMD GPU targets to cabana + for cuda_arch in CudaPackage.cuda_arch_values: + depends_on("cabana cuda_arch=%s" % cuda_arch, when="+cuda cuda_arch=%s" % cuda_arch) + for amdgpu_value in ROCmPackage.amdgpu_targets: + depends_on( + "cabana +rocm amdgpu_target=%s" % amdgpu_value, + when="+rocm amdgpu_target=%s" % amdgpu_value, + ) + + # CMake specific build functions + def cmake_args(self): + args = [] + + # Use hipcc as the c compiler if we are compiling for rocm. Doing it this way + # keeps the wrapper insted of changeing CMAKE_CXX_COMPILER keeps the spack wrapper + # and the rpaths it sets for us from the underlying spec. + if "+rocm" in self.spec: + env["SPACK_CXX"] = self.spec["hip"].hipcc + + # If we're building with cray mpich, we need to make sure we get the GTL library for + # gpu-aware MPI, since cabana and beatnik require it + if self.spec.satisfies("+rocm ^cray-mpich"): + gtl_dir = join_path(self.spec["cray-mpich"].prefix, "..", "..", "..", "gtl", "lib") + args.append( + "-DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath={0} -L{0} -lmpi_gtl_hsa".format(gtl_dir) + ) + elif self.spec.satisfies("+cuda ^cray-mpich"): + gtl_dir = join_path(self.spec["cray-mpich"].prefix, "..", "..", "..", "gtl", "lib") + args.append( + "-DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath={0} -L{0} -lmpi_gtl_cuda".format(gtl_dir) + ) + return args diff --git a/var/spack/repos/builtin/packages/cabana/package.py b/var/spack/repos/builtin/packages/cabana/package.py index 6b6289b981adeb..ae4d4e9fca35aa 100644 --- a/var/spack/repos/builtin/packages/cabana/package.py +++ b/var/spack/repos/builtin/packages/cabana/package.py @@ -12,7 +12,7 @@ class Cabana(CMakePackage, CudaPackage, ROCmPackage): homepage = "https://github.com/ECP-copa/Cabana" git = "https://github.com/ECP-copa/Cabana.git" - url = "https://github.com/ECP-copa/Cabana/archive/0.5.0.tar.gz" + url = "https://github.com/ECP-copa/Cabana/archive/0.6.0.tar.gz" maintainers("junghans", "streeve", "sslattery") @@ -47,6 +47,7 @@ class Cabana(CMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@3.9:", type="build", when="@:0.4.0") depends_on("cmake@3.16:", type="build", when="@0.5.0:") + depends_on("googletest", type="test", when="+testing") _versions = {":0.2": "-legacy", "0.3:": "@3.1:", "0.4:": "@3.2:", "0.6:": "@3.7:"} for _version in _versions: @@ -63,34 +64,50 @@ class Cabana(CMakePackage, CudaPackage, ROCmPackage): _kk_spec = "kokkos{0}+{1}".format(_kk_version, _backend) depends_on(_kk_spec, when="@{0}+{1}".format(_version, _backend)) + # Propagate cuda architectures down to Kokkos and optional submodules for arch in CudaPackage.cuda_arch_values: cuda_dep = "+cuda cuda_arch={0}".format(arch) depends_on("kokkos {0}".format(cuda_dep), when=cuda_dep) + depends_on("heffte {0}".format(cuda_dep), when="+heffte {0}".format(cuda_dep)) + depends_on("arborx {0}".format(cuda_dep), when="+arborx {0}".format(cuda_dep)) + depends_on("hypre {0}".format(cuda_dep), when="+hypre {0}".format(cuda_dep)) for arch in ROCmPackage.amdgpu_targets: rocm_dep = "+rocm amdgpu_target={0}".format(arch) depends_on("kokkos {0}".format(rocm_dep), when=rocm_dep) + depends_on("heffte {0}".format(rocm_dep), when="+heffte {0}".format(rocm_dep)) + depends_on("arborx {0}".format(rocm_dep), when="+arborx {0}".format(rocm_dep)) + depends_on("hypre {0}".format(rocm_dep), when="+hypre {0}".format(rocm_dep)) conflicts("+cuda", when="cuda_arch=none") + conflicts("+rocm", when="amdgpu_target=none") + depends_on("kokkos+cuda_lambda", when="+cuda") + # Dependencies for subpackages depends_on("arborx", when="@0.3.0:+arborx") depends_on("hypre-cmake@2.22.0:", when="@0.4.0:+hypre") depends_on("hypre-cmake@2.22.1:", when="@0.5.0:+hypre") - # Previous heFFTe pinned at 2.x.0 because its cmakefiles can't roll forward - # compatibilty to later minor versions. depends_on("heffte@2.0.0", when="@0.4.0+heffte") - depends_on("heffte@2.1.0", when="@0.5.0:+heffte") + depends_on("heffte@2.1.0", when="@0.5.0+heffte") depends_on("heffte@2.3.0:", when="@0.6.0:+heffte") depends_on("silo", when="@0.5.0:+silo") depends_on("hdf5", when="@0.6.0:+hdf5") depends_on("mpi", when="+mpi") + # Cabana automatically builds HDF5 support with newer cmake versions + # in version 0.6.0. This is fixed post-0.6 + conflicts("~hdf5", when="@0.6.0 ^cmake@:3.26") + + # Cajita support requires MPI conflicts("+cajita ~mpi") conflicts("+grid ~mpi") + # Conflict variants only available in newer versions of cabana conflicts("+rocm", when="@:0.2.0") conflicts("+sycl", when="@:0.3.0") + conflicts("+silo", when="@:0.3.0") + conflicts("+hdf5", when="@:0.5.0") def cmake_args(self): options = [self.define_from_variant("BUILD_SHARED_LIBS", "shared")] diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py index 6c75493faa2adc..4b4a0194e867ea 100644 --- a/var/spack/repos/builtin/packages/silo/package.py +++ b/var/spack/repos/builtin/packages/silo/package.py @@ -12,8 +12,21 @@ class Silo(AutotoolsPackage): data to binary, disk files.""" homepage = "https://wci.llnl.gov/simulation/computer-codes/silo" + git = "https://github.com/LLNL/Silo.git" url = "https://wci.llnl.gov/sites/wci/files/2021-01/silo-4.10.2.tgz" + maintainers("patrickb314") + version( + "4.11.1", + preferred=True, + sha256="49eddc00304aa4a19074b099559edbdcaa3532c98df32f99aa62b9ec3ea7cee2", + url="https://github.com/LLNL/Silo/releases/download/4.11.1/silo-4.11.1.tar.xz", + ) + version( + "4.11.1-bsd", + sha256="51ccfdf3c09dfc98c7858a0a6f08cc3b2a07ee3c4142ee6482ba7b24e314c2aa", + url="https://github.com/LLNL/Silo/releases/download/4.11.1/silo-4.11.1-bsd.tar.xz", + ) version( "4.11", sha256="ab936c1f4fc158d9fdc4415965f7d9def7f4abeca596fe5a25bd8485654898ac", @@ -68,19 +81,18 @@ class Silo(AutotoolsPackage): patch("H5FD_class_t-terminate.patch", when="@:4.10.2-bsd") # H5EPR_SEMI_COLON.patch was fixed in current dev - # patch("H5EPR_SEMI_COLON.patch", when="@:4.11-bsd") - patch("H5EPR_SEMI_COLON.patch") + patch("H5EPR_SEMI_COLON.patch", when="@:4.11-bsd") # Fix missing F77 init, fixed in 4.9 patch("48-configure-f77.patch", when="@:4.8") # The previously used AX_CHECK_COMPILER_FLAGS macro was dropped from # autoconf-archive in 2011 - patch("configure-AX_CHECK_COMPILE_FLAG.patch") + patch("configure-AX_CHECK_COMPILE_FLAG.patch", when="@:4.11-bsd") # API changes in hdf5-1.13 cause breakage # See https://github.com/LLNL/Silo/pull/260 - patch("hdf5-113.patch", when="@4.11: +hdf5 ^hdf5@1.13:") + patch("hdf5-113.patch", when="@4.11:4.11-bsd +hdf5 ^hdf5@1.13:") conflicts("^hdf5@1.13:", when="@:4.10.2-bsd") # hzip and fpzip are not available in the BSD releases @@ -88,10 +100,10 @@ class Silo(AutotoolsPackage): conflicts("+fpzip", when="@4.10.2-bsd,4.11-bsd") # zfp include missing - patch("zfp_error.patch", when="@4.11 +hdf5") + patch("zfp_error.patch", when="@4.11:4.11-bsd +hdf5") # use /usr/bin/env perl for portability - patch("mkinc-usr-bin-env-perl.patch") + patch("mkinc-usr-bin-env-perl.patch", when="@:4.11-bsd") def flag_handler(self, name, flags): spec = self.spec From 0585f6859c69adca0c88ba09fa761553de36e414 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 17 Oct 2023 08:38:06 +0200 Subject: [PATCH 216/408] packages: use "requires" to allow only selected compilers (#40567) A few packages have encoded an idiom that pre-dates the introduction of the 'requires' directive, and they cycle over all compilers to conflict with the ones that are not supported. Here instead we reverse the logic, and require the ones that are supported. --- .../repos/builtin/packages/blaspp/package.py | 10 +---- .../packages/clingo-bootstrap/package.py | 37 +++++++++---------- .../builtin/packages/exciting/package.py | 20 ++-------- .../builtin/packages/gaussian-src/package.py | 6 +-- .../intel-oneapi-compilers/package.py | 7 +--- .../repos/builtin/packages/knem/package.py | 7 +--- .../builtin/packages/lapackpp/package.py | 10 +---- .../repos/builtin/packages/lbann/package.py | 4 +- .../repos/builtin/packages/nvhpc/package.py | 6 +-- .../repos/builtin/packages/slate/package.py | 10 +---- .../builtin/packages/trilinos/package.py | 12 +++--- .../repos/builtin/packages/xpmem/package.py | 8 +--- 12 files changed, 34 insertions(+), 103 deletions(-) diff --git a/var/spack/repos/builtin/packages/blaspp/package.py b/var/spack/repos/builtin/packages/blaspp/package.py index f6f52d2756dcb5..d43ab4de6bb269 100644 --- a/var/spack/repos/builtin/packages/blaspp/package.py +++ b/var/spack/repos/builtin/packages/blaspp/package.py @@ -72,15 +72,7 @@ class Blaspp(CMakePackage, CudaPackage, ROCmPackage): conflicts("+cuda", when="+sycl", msg=backend_msg) conflicts("+sycl", when="@:2023.06.00", msg="SYCL support requires BLAS++ version 2023.08.25") - # TODO: +sycl requires use of the intel-oneapi compiler, but we cannot express that directly. - # For now, add conflicts for other compilers instead. - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "oneapi": - conflicts( - "%{0}".format(__compiler), - when="+sycl", - msg="blaspp+sycl must be compiled with %oneapi", - ) + requires("%oneapi", when="+sycl", msg="blaspp+sycl must be compiled with %oneapi") def cmake_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py index e3aed932a911ea..7fb34446a11ada 100644 --- a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py +++ b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py @@ -5,7 +5,6 @@ import glob import os -import spack.compilers import spack.paths import spack.user_environment from spack.package import * @@ -53,28 +52,26 @@ class ClingoBootstrap(Clingo): depends_on("cmake@3.16.0:", type="build") # On Linux we bootstrap with GCC or clang - for compiler_spec in [ - c for c in spack.compilers.supported_compilers() if c not in ("gcc", "clang") - ]: - conflicts( - "%{0}".format(compiler_spec), - when="platform=linux", - msg="GCC or clang are required to bootstrap clingo on Linux", - ) - conflicts( - "%{0}".format(compiler_spec), - when="platform=cray", - msg="GCC or clang are required to bootstrap clingo on Cray", - ) + requires( + "%gcc", + "%clang", + when="platform=linux", + msg="GCC or clang are required to bootstrap clingo on Linux", + ) + requires( + "%gcc", + "%clang", + when="platform=cray", + msg="GCC or clang are required to bootstrap clingo on Cray", + ) conflicts("%gcc@:5", msg="C++14 support is required to bootstrap clingo") # On Darwin we bootstrap with Apple Clang - for compiler_spec in [c for c in spack.compilers.supported_compilers() if c != "apple-clang"]: - conflicts( - "%{0}".format(compiler_spec), - when="platform=darwin", - msg="Apple-clang is required to bootstrap clingo on MacOS", - ) + requires( + "%apple-clang", + when="platform=darwin", + msg="Apple-clang is required to bootstrap clingo on MacOS", + ) # Clingo needs the Python module to be usable by Spack conflicts("~python", msg="Python support is required to bootstrap Spack") diff --git a/var/spack/repos/builtin/packages/exciting/package.py b/var/spack/repos/builtin/packages/exciting/package.py index 215b6de5f52a3b..c74da1013c1ebf 100644 --- a/var/spack/repos/builtin/packages/exciting/package.py +++ b/var/spack/repos/builtin/packages/exciting/package.py @@ -39,23 +39,9 @@ class Exciting(MakefilePackage): depends_on("scalapack", when="+scalapack") # conflicts('%gcc@10:', msg='exciting cannot be built with GCC 10') - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "intel": - conflicts( - "%{0}".format(__compiler), - when="^mkl", - msg="Intel MKL only works with the Intel compiler", - ) - conflicts( - "%{0}".format(__compiler), - when="^intel-mkl", - msg="Intel MKL only works with the Intel compiler", - ) - conflicts( - "%{0}".format(__compiler), - when="^intel-mpi", - msg="Intel MPI only works with the Intel compiler", - ) + requires("%intel", when="^mkl", msg="Intel MKL only works with the Intel compiler") + requires("%intel", when="^intel-mkl", msg="Intel MKL only works with the Intel compiler") + requires("%intel", when="^intel-mpi", msg="Intel MPI only works with the Intel compiler") def patch(self): """Fix bad logic in m_makespectrum.f90 for the Oxygen release""" diff --git a/var/spack/repos/builtin/packages/gaussian-src/package.py b/var/spack/repos/builtin/packages/gaussian-src/package.py index d31ee97800d022..5cd0cf9e27e661 100644 --- a/var/spack/repos/builtin/packages/gaussian-src/package.py +++ b/var/spack/repos/builtin/packages/gaussian-src/package.py @@ -28,11 +28,7 @@ class GaussianSrc(Package): depends_on("tcsh", type="build") # All compilers except for pgi are in conflict: - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "pgi": - conflicts( - "%{0}".format(__compiler), msg="Gaussian can only be built with the PGI compiler" - ) + requires("%pgi", msg="Gaussian can only be built with the PGI compiler") patch("16-C.01-replace-deprecated-pgf77-with-pgfortran.patch", when="@16-C.01") patch("16-C.01-fix-building-c-code-with-pgcc.patch", when="@16-C.01") diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index be6f4d109b3c81..5545053cc8d368 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import spack.compilers from spack.build_environment import dso_suffix from spack.package import * @@ -171,11 +170,7 @@ class IntelOneapiCompilers(IntelOneApiPackage): # TODO: effectively gcc is a direct dependency of intel-oneapi-compilers, but we # cannot express that properly. For now, add conflicts for non-gcc compilers # instead. - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "gcc": - conflicts( - "%{0}".format(__compiler), msg="intel-oneapi-compilers must be installed with %gcc" - ) + requires("%gcc", msg="intel-oneapi-compilers must be installed with %gcc") for v in versions: version(v["version"], expand=False, **v["cpp"]) diff --git a/var/spack/repos/builtin/packages/knem/package.py b/var/spack/repos/builtin/packages/knem/package.py index 85f1c8d1d6259b..2b229c93d28755 100644 --- a/var/spack/repos/builtin/packages/knem/package.py +++ b/var/spack/repos/builtin/packages/knem/package.py @@ -49,12 +49,7 @@ class Knem(AutotoolsPackage): # Ideally, we should list all non-Linux-based platforms here: conflicts("platform=darwin") - # All compilers except for gcc are in conflict: - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "gcc": - conflicts( - "%{0}".format(__compiler), msg="Linux kernel module must be compiled with gcc" - ) + requires("%gcc", msg="Linux kernel module must be compiled with gcc") @run_before("build") def override_kernel_compiler(self): diff --git a/var/spack/repos/builtin/packages/lapackpp/package.py b/var/spack/repos/builtin/packages/lapackpp/package.py index 1532d87843048a..a69e4e7bdffe16 100644 --- a/var/spack/repos/builtin/packages/lapackpp/package.py +++ b/var/spack/repos/builtin/packages/lapackpp/package.py @@ -85,15 +85,7 @@ class Lapackpp(CMakePackage, CudaPackage, ROCmPackage): conflicts("+cuda", when="+sycl", msg=backend_msg) conflicts("+sycl", when="@:2023.06.00", msg="+sycl requires LAPACK++ version 2023.08.25") - # TODO: +sycl requires use of the intel-oneapi compiler, but we cannot express that directly. - # For now, add conflicts for other compilers instead. - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "oneapi": - conflicts( - "%{0}".format(__compiler), - when="+sycl", - msg="lapackpp+sycl must be compiled with %oneapi", - ) + requires("%oneapi", when="+sycl", msg="lapackpp+sycl must be compiled with %oneapi") def cmake_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/lbann/package.py b/var/spack/repos/builtin/packages/lbann/package.py index bc407d9ea442f4..b5ed6df831503f 100644 --- a/var/spack/repos/builtin/packages/lbann/package.py +++ b/var/spack/repos/builtin/packages/lbann/package.py @@ -179,9 +179,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): conflicts("~python", when="@0.91:0.101") conflicts("~pfe", when="@0.91:0.101") - for comp in spack.compilers.supported_compilers(): - if comp != "clang": - conflicts("+lld", when="%" + comp) + requires("%clang", when="+lld") conflicts("+lld", when="+gold") conflicts("+gold", when="platform=darwin", msg="gold does not work on Darwin") diff --git a/var/spack/repos/builtin/packages/nvhpc/package.py b/var/spack/repos/builtin/packages/nvhpc/package.py index 2cabbfa056f90d..578813f80668cb 100644 --- a/var/spack/repos/builtin/packages/nvhpc/package.py +++ b/var/spack/repos/builtin/packages/nvhpc/package.py @@ -377,11 +377,7 @@ class Nvhpc(Package): provides("lapack", when="+lapack") provides("mpi", when="+mpi") - # TODO: effectively gcc is a direct dependency of nvhpc, but we cannot express that - # properly. For now, add conflicts for non-gcc compilers instead. - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "gcc": - conflicts("%{0}".format(__compiler), msg="nvhpc must be installed with %gcc") + requires("%gcc", msg="nvhpc must be installed with %gcc") def _version_prefix(self): return join_path(self.prefix, "Linux_%s" % self.spec.target.family, self.version) diff --git a/var/spack/repos/builtin/packages/slate/package.py b/var/spack/repos/builtin/packages/slate/package.py index 95b4e895801ffa..778beda83ae1d4 100644 --- a/var/spack/repos/builtin/packages/slate/package.py +++ b/var/spack/repos/builtin/packages/slate/package.py @@ -84,15 +84,7 @@ class Slate(CMakePackage, CudaPackage, ROCmPackage): depends_on("scalapack", type="test") depends_on("hipify-clang", when="@:2021.05.02 +rocm ^hip@5:") - # TODO: +sycl requires use of the intel-oneapi compiler, but we cannot express that directly. - # For now, add conflicts for other compilers instead. - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "oneapi": - conflicts( - "%{0}".format(__compiler), - when="+sycl", - msg="slate+sycl must be compiled with %oneapi", - ) + requires("%oneapi", when="+sycl", msg="slate+sycl must be compiled with %oneapi") cpp_17_msg = "Requires C++17 compiler support" conflicts("%gcc@:5", msg=cpp_17_msg) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index b6ee39282e44c8..9af8ab14dcdd73 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -344,13 +344,11 @@ class Trilinos(CMakePackage, CudaPackage, ROCmPackage): conflicts("gotype=all", when="@12.15:") # CUDA without wrapper requires clang - for _compiler in spack.compilers.supported_compilers(): - if _compiler != "clang": - conflicts( - "+cuda", - when="~wrapper %" + _compiler, - msg="trilinos~wrapper+cuda can only be built with the " "Clang compiler", - ) + requires( + "%clang", + when="+cuda~wrapper", + msg="trilinos~wrapper+cuda can only be built with the Clang compiler", + ) conflicts("+cuda_rdc", when="~cuda") conflicts("+rocm_rdc", when="~rocm") conflicts("+wrapper", when="~cuda") diff --git a/var/spack/repos/builtin/packages/xpmem/package.py b/var/spack/repos/builtin/packages/xpmem/package.py index 9fb7600fda4e66..c8091478d49b37 100644 --- a/var/spack/repos/builtin/packages/xpmem/package.py +++ b/var/spack/repos/builtin/packages/xpmem/package.py @@ -64,13 +64,7 @@ class Xpmem(AutotoolsPackage): conflicts("+kernel-module", when="platform=darwin") # All compilers except for gcc are in conflict with +kernel-module: - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "gcc": - conflicts( - "+kernel-module", - when="%{0}".format(__compiler), - msg="Linux kernel module must be compiled with gcc", - ) + requires("%gcc", when="+kernel-module", msg="Linux kernel module must be compiled with gcc") def autoreconf(self, spec, prefix): Executable("./autogen.sh")() From 9832b6f2a2dd54a7e5c4f87029bcbcca9263f702 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 17 Oct 2023 15:40:48 +0200 Subject: [PATCH 217/408] Support `spack env activate --with-view ` (#40549) Currently `spack env activate --with-view` exists, but is a no-op. So, it is not too much of a breaking change to make this redundant flag accept a value `spack env activate --with-view ` which activates a particular view by name. The view name is stored in `SPACK_ENV_VIEW`. This also fixes an issue where deactivating a view that was activated with `--without-view` possibly removes entries from PATH, since now we keep track of whether the default view was "enabled" or not. --- lib/spack/spack/cmd/env.py | 33 ++++++----- lib/spack/spack/environment/__init__.py | 2 + lib/spack/spack/environment/environment.py | 64 +++++++++++----------- lib/spack/spack/environment/shell.py | 55 ++++++++++++------- lib/spack/spack/test/cmd/env.py | 29 ++++++++-- share/spack/spack-completion.bash | 2 +- share/spack/spack-completion.fish | 10 ++-- 7 files changed, 120 insertions(+), 75 deletions(-) diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index 6c22e70a5d834f..cf5671aafae796 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -8,6 +8,7 @@ import shutil import sys import tempfile +from typing import Optional import llnl.string as string import llnl.util.filesystem as fs @@ -96,22 +97,16 @@ def env_activate_setup_parser(subparser): view_options = subparser.add_mutually_exclusive_group() view_options.add_argument( - "-v", "--with-view", - action="store_const", - dest="with_view", - const=True, - default=True, - help="update PATH, etc., with associated view", + "-v", + metavar="name", + help="set runtime environment variables for specific view", ) view_options.add_argument( - "-V", "--without-view", - action="store_const", - dest="with_view", - const=False, - default=True, - help="do not update PATH, etc., with associated view", + "-V", + action="store_true", + help="do not set runtime environment variables for any view", ) subparser.add_argument( @@ -197,10 +192,20 @@ def env_activate(args): # Activate new environment active_env = ev.Environment(env_path) + + # Check if runtime environment variables are requested, and if so, for what view. + view: Optional[str] = None + if args.with_view: + view = args.with_view + if not active_env.has_view(view): + tty.die(f"The environment does not have a view named '{view}'") + elif not args.without_view and active_env.has_view(ev.default_view_name): + view = ev.default_view_name + cmds += spack.environment.shell.activate_header( - env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None + env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None, view=view ) - env_mods.extend(spack.environment.shell.activate(env=active_env, add_view=args.with_view)) + env_mods.extend(spack.environment.shell.activate(env=active_env, view=view)) cmds += env_mods.shell_modifications(args.shell) sys.stdout.write(cmds) diff --git a/lib/spack/spack/environment/__init__.py b/lib/spack/spack/environment/__init__.py index 227b48670cafed..ac598e8421d2ad 100644 --- a/lib/spack/spack/environment/__init__.py +++ b/lib/spack/spack/environment/__init__.py @@ -365,6 +365,7 @@ read, root, spack_env_var, + spack_env_view_var, update_yaml, ) @@ -397,5 +398,6 @@ "read", "root", "spack_env_var", + "spack_env_view_var", "update_yaml", ] diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 496a8b332a7525..ee48955ac54038 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -64,6 +64,8 @@ #: environment variable used to indicate the active environment spack_env_var = "SPACK_ENV" +#: environment variable used to indicate the active environment view +spack_env_view_var = "SPACK_ENV_VIEW" #: currently activated environment _active_environment: Optional["Environment"] = None @@ -1595,16 +1597,14 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False): @property def default_view(self): - if not self.views: - raise SpackEnvironmentError("{0} does not have a view enabled".format(self.name)) - - if default_view_name not in self.views: - raise SpackEnvironmentError( - "{0} does not have a default view enabled".format(self.name) - ) + if not self.has_view(default_view_name): + raise SpackEnvironmentError(f"{self.name} does not have a default view enabled") return self.views[default_view_name] + def has_view(self, view_name: str) -> bool: + return view_name in self.views + def update_default_view(self, path_or_bool: Union[str, bool]) -> None: """Updates the path of the default view. @@ -1690,14 +1690,14 @@ def check_views(self): "Loading the environment view will require reconcretization." % self.name ) - def _env_modifications_for_default_view(self, reverse=False): + def _env_modifications_for_view(self, view: ViewDescriptor, reverse: bool = False): all_mods = spack.util.environment.EnvironmentModifications() visited = set() errors = [] for root_spec in self.concrete_roots(): - if root_spec in self.default_view and root_spec.installed and root_spec.package: + if root_spec in view and root_spec.installed and root_spec.package: for spec in root_spec.traverse(deptype="run", root=True): if spec.name in visited: # It is expected that only one instance of the package @@ -1714,7 +1714,7 @@ def _env_modifications_for_default_view(self, reverse=False): visited.add(spec.name) try: - mods = uenv.environment_modifications_for_spec(spec, self.default_view) + mods = uenv.environment_modifications_for_spec(spec, view) except Exception as e: msg = "couldn't get environment settings for %s" % spec.format( "{name}@{version} /{hash:7}" @@ -1726,22 +1726,22 @@ def _env_modifications_for_default_view(self, reverse=False): return all_mods, errors - def add_default_view_to_env(self, env_mod): - """ - Collect the environment modifications to activate an environment using the - default view. Removes duplicate paths. + def add_view_to_env( + self, env_mod: spack.util.environment.EnvironmentModifications, view: str + ) -> spack.util.environment.EnvironmentModifications: + """Collect the environment modifications to activate an environment using the provided + view. Removes duplicate paths. Args: - env_mod (spack.util.environment.EnvironmentModifications): the environment - modifications object that is modified. - """ - if default_view_name not in self.views: - # No default view to add to shell + env_mod: the environment modifications object that is modified. + view: the name of the view to activate.""" + descriptor = self.views.get(view) + if not descriptor: return env_mod - env_mod.extend(uenv.unconditional_environment_modifications(self.default_view)) + env_mod.extend(uenv.unconditional_environment_modifications(descriptor)) - mods, errors = self._env_modifications_for_default_view() + mods, errors = self._env_modifications_for_view(descriptor) env_mod.extend(mods) if errors: for err in errors: @@ -1753,22 +1753,22 @@ def add_default_view_to_env(self, env_mod): return env_mod - def rm_default_view_from_env(self, env_mod): - """ - Collect the environment modifications to deactivate an environment using the - default view. Reverses the action of ``add_default_view_to_env``. + def rm_view_from_env( + self, env_mod: spack.util.environment.EnvironmentModifications, view: str + ) -> spack.util.environment.EnvironmentModifications: + """Collect the environment modifications to deactivate an environment using the provided + view. Reverses the action of ``add_view_to_env``. Args: - env_mod (spack.util.environment.EnvironmentModifications): the environment - modifications object that is modified. - """ - if default_view_name not in self.views: - # No default view to add to shell + env_mod: the environment modifications object that is modified. + view: the name of the view to deactivate.""" + descriptor = self.views.get(view) + if not descriptor: return env_mod - env_mod.extend(uenv.unconditional_environment_modifications(self.default_view).reversed()) + env_mod.extend(uenv.unconditional_environment_modifications(descriptor).reversed()) - mods, _ = self._env_modifications_for_default_view(reverse=True) + mods, _ = self._env_modifications_for_view(descriptor, reverse=True) env_mod.extend(mods) return env_mod diff --git a/lib/spack/spack/environment/shell.py b/lib/spack/spack/environment/shell.py index 380e49fa0f9059..a4f9634a8da051 100644 --- a/lib/spack/spack/environment/shell.py +++ b/lib/spack/spack/environment/shell.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +from typing import Optional import llnl.util.tty as tty from llnl.util.tty.color import colorize @@ -13,12 +14,14 @@ from spack.util.environment import EnvironmentModifications -def activate_header(env, shell, prompt=None): +def activate_header(env, shell, prompt=None, view: Optional[str] = None): # Construct the commands to run cmds = "" if shell == "csh": # TODO: figure out how to make color work for csh cmds += "setenv SPACK_ENV %s;\n" % env.path + if view: + cmds += "setenv SPACK_ENV_VIEW %s;\n" % view cmds += 'alias despacktivate "spack env deactivate";\n' if prompt: cmds += "if (! $?SPACK_OLD_PROMPT ) " @@ -29,6 +32,8 @@ def activate_header(env, shell, prompt=None): prompt = colorize("@G{%s} " % prompt, color=True) cmds += "set -gx SPACK_ENV %s;\n" % env.path + if view: + cmds += "set -gx SPACK_ENV_VIEW %s;\n" % view cmds += "function despacktivate;\n" cmds += " spack env deactivate;\n" cmds += "end;\n" @@ -40,15 +45,21 @@ def activate_header(env, shell, prompt=None): elif shell == "bat": # TODO: Color cmds += 'set "SPACK_ENV=%s"\n' % env.path + if view: + cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view # TODO: despacktivate # TODO: prompt elif shell == "pwsh": cmds += "$Env:SPACK_ENV='%s'\n" % env.path + if view: + cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view else: if "color" in os.getenv("TERM", "") and prompt: prompt = colorize("@G{%s}" % prompt, color=True, enclose=True) cmds += "export SPACK_ENV=%s;\n" % env.path + if view: + cmds += "export SPACK_ENV_VIEW=%s;\n" % view cmds += "alias despacktivate='spack env deactivate';\n" if prompt: cmds += "if [ -z ${SPACK_OLD_PS1+x} ]; then\n" @@ -66,12 +77,14 @@ def deactivate_header(shell): cmds = "" if shell == "csh": cmds += "unsetenv SPACK_ENV;\n" + cmds += "unsetenv SPACK_ENV_VIEW;\n" cmds += "if ( $?SPACK_OLD_PROMPT ) " cmds += ' eval \'set prompt="$SPACK_OLD_PROMPT" &&' cmds += " unsetenv SPACK_OLD_PROMPT';\n" cmds += "unalias despacktivate;\n" elif shell == "fish": cmds += "set -e SPACK_ENV;\n" + cmds += "set -e SPACK_ENV_VIEW;\n" cmds += "functions -e despacktivate;\n" # # NOTE: Not changing fish_prompt (above) => no need to restore it here. @@ -79,14 +92,19 @@ def deactivate_header(shell): elif shell == "bat": # TODO: Color cmds += 'set "SPACK_ENV="\n' + cmds += 'set "SPACK_ENV_VIEW="\n' # TODO: despacktivate # TODO: prompt elif shell == "pwsh": cmds += "Set-Item -Path Env:SPACK_ENV\n" + cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n" else: cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n" cmds += "unset SPACK_ENV; export SPACK_ENV;\n" cmds += "fi;\n" + cmds += "if [ ! -z ${SPACK_ENV_VIEW+x} ]; then\n" + cmds += "unset SPACK_ENV_VIEW; export SPACK_ENV_VIEW;\n" + cmds += "fi;\n" cmds += "alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n" cmds += "if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n" cmds += " if [ \"$SPACK_OLD_PS1\" = '$$$$' ]; then\n" @@ -100,24 +118,23 @@ def deactivate_header(shell): return cmds -def activate(env, use_env_repo=False, add_view=True): - """ - Activate an environment and append environment modifications +def activate( + env: ev.Environment, use_env_repo=False, view: Optional[str] = "default" +) -> EnvironmentModifications: + """Activate an environment and append environment modifications To activate an environment, we add its configuration scope to the existing Spack configuration, and we set active to the current environment. Arguments: - env (spack.environment.Environment): the environment to activate - use_env_repo (bool): use the packages exactly as they appear in the - environment's repository - add_view (bool): generate commands to add view to path variables + env: the environment to activate + use_env_repo: use the packages exactly as they appear in the environment's repository + view: generate commands to add runtime environment variables for named view Returns: spack.util.environment.EnvironmentModifications: Environment variables - modifications to activate environment. - """ + modifications to activate environment.""" ev.activate(env, use_env_repo=use_env_repo) env_mods = EnvironmentModifications() @@ -129,9 +146,9 @@ def activate(env, use_env_repo=False, add_view=True): # become PATH variables. # try: - if add_view and ev.default_view_name in env.views: + if view and env.has_view(view): with spack.store.STORE.db.read_transaction(): - env.add_default_view_to_env(env_mods) + env.add_view_to_env(env_mods, view) except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e: tty.error(e) tty.die( @@ -145,17 +162,15 @@ def activate(env, use_env_repo=False, add_view=True): return env_mods -def deactivate(): - """ - Deactivate an environment and collect corresponding environment modifications. +def deactivate() -> EnvironmentModifications: + """Deactivate an environment and collect corresponding environment modifications. Note: unloads the environment in its current state, not in the state it was loaded in, meaning that specs that were removed from the spack environment after activation are not unloaded. Returns: - spack.util.environment.EnvironmentModifications: Environment variables - modifications to activate environment. + Environment variables modifications to activate environment. """ env_mods = EnvironmentModifications() active = ev.active_environment() @@ -163,10 +178,12 @@ def deactivate(): if active is None: return env_mods - if ev.default_view_name in active.views: + active_view = os.getenv(ev.spack_env_view_var) + + if active_view and active.has_view(active_view): try: with spack.store.STORE.db.read_transaction(): - active.rm_default_view_from_env(env_mods) + active.rm_view_from_env(env_mods, active_view) except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e: tty.warn(e) tty.warn( diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index cef5ccbcd5a744..4845d122060ea6 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -663,7 +663,7 @@ def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages): e.write() env_mod = spack.util.environment.EnvironmentModifications() - e.add_default_view_to_env(env_mod) + e.add_view_to_env(env_mod, "default") env_variables = {} env_mod.apply_modifications(env_variables) assert str(fake_bin) in env_variables["PATH"] @@ -2356,7 +2356,7 @@ def test_env_activate_sh_prints_shell_output(tmpdir, mock_stage, mock_fetch, ins This is a cursory check; ``share/spack/qa/setup-env-test.sh`` checks for correctness. """ - env("create", "test", add_view=True) + env("create", "test") out = env("activate", "--sh", "test") assert "export SPACK_ENV=" in out @@ -2371,7 +2371,7 @@ def test_env_activate_sh_prints_shell_output(tmpdir, mock_stage, mock_fetch, ins def test_env_activate_csh_prints_shell_output(tmpdir, mock_stage, mock_fetch, install_mockery): """Check the shell commands output by ``spack env activate --csh``.""" - env("create", "test", add_view=True) + env("create", "test") out = env("activate", "--csh", "test") assert "setenv SPACK_ENV" in out @@ -2388,7 +2388,7 @@ def test_env_activate_csh_prints_shell_output(tmpdir, mock_stage, mock_fetch, in def test_env_activate_default_view_root_unconditional(mutable_mock_env_path): """Check that the root of the default view in the environment is added to the shell unconditionally.""" - env("create", "test", add_view=True) + env("create", "test") with ev.read("test") as e: viewdir = e.default_view.root @@ -2403,6 +2403,27 @@ def test_env_activate_default_view_root_unconditional(mutable_mock_env_path): ) +def test_env_activate_custom_view(tmp_path: pathlib.Path, mock_packages): + """Check that an environment can be activated with a non-default view.""" + env_template = tmp_path / "spack.yaml" + default_dir = tmp_path / "defaultdir" + nondefaultdir = tmp_path / "nondefaultdir" + with open(env_template, "w") as f: + f.write( + f"""\ +spack: + specs: [a] + view: + default: + root: {default_dir} + nondefault: + root: {nondefaultdir}""" + ) + env("create", "test", str(env_template)) + shell = env("activate", "--sh", "--with-view", "nondefault", "test") + assert os.path.join(nondefaultdir, "bin") in shell + + def test_concretize_user_specs_together(): e = ev.create("coconcretization") e.unify = True diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index b9521b8f0cfaad..0280524536cfbc 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -1016,7 +1016,7 @@ _spack_env() { _spack_env_activate() { if $list_options then - SPACK_COMPREPLY="-h --help --sh --csh --fish --bat --pwsh -v --with-view -V --without-view -p --prompt --temp -d --dir" + SPACK_COMPREPLY="-h --help --sh --csh --fish --bat --pwsh --with-view -v --without-view -V -p --prompt --temp -d --dir" else _environments fi diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index f4ac310adacdb4..e37b3448d5fcfc 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -1427,7 +1427,7 @@ complete -c spack -n '__fish_spack_using_command env' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command env' -s h -l help -d 'show this help message and exit' # spack env activate -set -g __fish_spack_optspecs_spack_env_activate h/help sh csh fish bat pwsh v/with-view V/without-view p/prompt temp d/dir= +set -g __fish_spack_optspecs_spack_env_activate h/help sh csh fish bat pwsh v/with-view= V/without-view p/prompt temp d/dir= complete -c spack -n '__fish_spack_using_command_pos 0 env activate' -f -a '(__fish_spack_environments)' complete -c spack -n '__fish_spack_using_command env activate' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command env activate' -s h -l help -d 'show this help message and exit' @@ -1441,10 +1441,10 @@ complete -c spack -n '__fish_spack_using_command env activate' -l bat -f -a shel complete -c spack -n '__fish_spack_using_command env activate' -l bat -d 'print bat commands to activate the environment' complete -c spack -n '__fish_spack_using_command env activate' -l pwsh -f -a shell complete -c spack -n '__fish_spack_using_command env activate' -l pwsh -d 'print powershell commands to activate environment' -complete -c spack -n '__fish_spack_using_command env activate' -s v -l with-view -f -a with_view -complete -c spack -n '__fish_spack_using_command env activate' -s v -l with-view -d 'update PATH, etc., with associated view' -complete -c spack -n '__fish_spack_using_command env activate' -s V -l without-view -f -a with_view -complete -c spack -n '__fish_spack_using_command env activate' -s V -l without-view -d 'do not update PATH, etc., with associated view' +complete -c spack -n '__fish_spack_using_command env activate' -l with-view -s v -r -f -a with_view +complete -c spack -n '__fish_spack_using_command env activate' -l with-view -s v -r -d 'set runtime environment variables for specific view' +complete -c spack -n '__fish_spack_using_command env activate' -l without-view -s V -f -a without_view +complete -c spack -n '__fish_spack_using_command env activate' -l without-view -s V -d 'do not set runtime environment variables for any view' complete -c spack -n '__fish_spack_using_command env activate' -s p -l prompt -f -a prompt complete -c spack -n '__fish_spack_using_command env activate' -s p -l prompt -d 'decorate the command line prompt when activating' complete -c spack -n '__fish_spack_using_command env activate' -l temp -f -a temp From 908c72a372282b4685dda4932d8537762703b703 Mon Sep 17 00:00:00 2001 From: wspear Date: Tue, 17 Oct 2023 10:23:15 -0700 Subject: [PATCH 218/408] Support apple-clang in pdt (#40582) --- var/spack/repos/builtin/packages/pdt/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/pdt/package.py b/var/spack/repos/builtin/packages/pdt/package.py index 4f3ccac0c40e73..c8665aaac27758 100644 --- a/var/spack/repos/builtin/packages/pdt/package.py +++ b/var/spack/repos/builtin/packages/pdt/package.py @@ -54,12 +54,12 @@ def configure(self, spec, prefix): options.append("-pgCC") elif self.compiler.name == "gcc": options.append("-GNU") - elif self.compiler.name == "clang": + elif self.compiler.name == "clang" or self.compiler.name == "apple-clang": options.append("-clang") elif self.compiler.name == "cce": options.append("-CC") else: - raise InstallError("Unknown/unsupported compiler family") + raise InstallError("Unknown/unsupported compiler family: " + self.compiler.name) if "+pic" in spec: options.append("-useropt=" + self.compiler.cxx_pic_flag) From 6a87523d431abc24962d1700926ef35177381c12 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 17 Oct 2023 20:12:09 +0200 Subject: [PATCH 219/408] patchelf: fix compilation with GCC 7 (#40581) --- .../repos/builtin/packages/patchelf/513.patch | 25 +++++++++++++++++++ .../builtin/packages/patchelf/package.py | 3 +++ 2 files changed, 28 insertions(+) create mode 100644 var/spack/repos/builtin/packages/patchelf/513.patch diff --git a/var/spack/repos/builtin/packages/patchelf/513.patch b/var/spack/repos/builtin/packages/patchelf/513.patch new file mode 100644 index 00000000000000..e5e8dd7174c33c --- /dev/null +++ b/var/spack/repos/builtin/packages/patchelf/513.patch @@ -0,0 +1,25 @@ +From 5fb5d82637c1b547b800b5994a1f5342b3224da4 Mon Sep 17 00:00:00 2001 +From: Rosen Penev +Date: Sat, 12 Aug 2023 11:46:14 -0700 +Subject: [PATCH] fix compilation with GCC7 + +CTAD is not working here. + +Signed-off-by: Rosen Penev +--- + src/patchelf.cc | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/patchelf.cc b/src/patchelf.cc +index 82b4b46c..d6000160 100644 +--- a/src/patchelf.cc ++++ b/src/patchelf.cc +@@ -2069,7 +2069,7 @@ void ElfFile::rebuildGnuHashTable(span strTab, span> tmp(dst.begin(), dst.end()); + for (size_t i = 0; i < tmp.size(); ++i) + dst[old2new[i]] = tmp[i]; + }; diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py index d5992b8e644407..2630085d4eeaf7 100644 --- a/var/spack/repos/builtin/packages/patchelf/package.py +++ b/var/spack/repos/builtin/packages/patchelf/package.py @@ -39,6 +39,9 @@ class Patchelf(AutotoolsPackage): conflicts("%gcc@:6", when="@0.14:", msg="Requires C++17 support") conflicts("%clang@:3", when="@0.14:", msg="Requires C++17 support") + # GCC 7.5 doesn't have __cpp_deduction_guides >= 201606 + patch("513.patch", when="@0.18: %gcc@:7") + def url_for_version(self, version): if version < Version("0.12"): return "https://nixos.org/releases/patchelf/patchelf-{0}/patchelf-{1}.tar.gz".format( From e7a174f37568041332975e692d1f869cad70bc7e Mon Sep 17 00:00:00 2001 From: Cameron Rutherford Date: Tue, 17 Oct 2023 14:14:17 -0400 Subject: [PATCH 220/408] hiop: 1.0.1 release (#40580) --- var/spack/repos/builtin/packages/hiop/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index 0ee82f9c32a0e8..ff62c7da56c0ee 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -22,6 +22,7 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): maintainers("ryandanehy", "cameronrutherford", "pelesh") # Most recent tagged snapshot is the preferred version when profiling. + version("1.0.1", commit="c5e156c6f27d046f590dc35114980e3f9c573ca6", submodules=True) version("1.0.0", commit="10b7d3ee0a15cb4949ccee8c905d447b9528794f", submodules=True) version("0.7.2", commit="d0f57c880d4202a72c62dd1f5c92e3bc8acb9788", submodules=True) version("0.7.1", commit="8064ef6b2249ad2feca92a9d1e90060bad3eebc7", submodules=True) From 6014b1cc552878fa532f49cf440d09d838073b74 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 17 Oct 2023 13:25:18 -0500 Subject: [PATCH 221/408] py-rtree: add v1.1.0 (#40575) --- var/spack/repos/builtin/packages/py-rtree/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-rtree/package.py b/var/spack/repos/builtin/packages/py-rtree/package.py index 7cfa444072f451..7caa6d4f78bb82 100644 --- a/var/spack/repos/builtin/packages/py-rtree/package.py +++ b/var/spack/repos/builtin/packages/py-rtree/package.py @@ -14,13 +14,14 @@ class PyRtree(PythonPackage): maintainers("adamjstewart", "hobu") + version("1.1.0", sha256="6f8ee504dde5d005b25b08aaf5be0b3404af3ad5fece6e1ddcde35908a798a95") version("1.0.1", sha256="222121699c303a64065d849bf7038b1ecabc37b65c7fa340bedb38ef0e805429") version("1.0.0", sha256="d0483482121346b093b9a42518d40f921adf445915b7aea307eb26768c839682") version("0.9.7", sha256="be8772ca34699a9ad3fb4cfe2cfb6629854e453c10b3328039301bbfc128ca3e") version("0.8.3", sha256="6cb9cf3000963ea6a3db777a597baee2bc55c4fc891e4f1967f262cc96148649") - depends_on("python@3.7:", when="@1:", type=("build", "run")) - depends_on("python@3:", when="@0.9.4:", type=("build", "run")) + depends_on("python@3.8:", when="@1.1:", type=("build", "run")) + depends_on("py-setuptools@61:", when="@1.1:", type="build") depends_on("py-setuptools@39.2:", when="@1:", type="build") depends_on("py-setuptools", type="build") depends_on("py-typing-extensions@3.7:", when="@1: ^python@:3.7", type=("build", "run")) From de0dc3046e50553cc653e35abe5fd1481b79a067 Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Tue, 17 Oct 2023 20:33:13 +0200 Subject: [PATCH 222/408] Add mpi_f08 variant to CP2K (#40574) * add mpi_f08 variant * add conflict * add conflict with released versions of cp2k and +mpi_f08 --- var/spack/repos/builtin/packages/cp2k/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index a493b906cbb5f6..94dfbfec400418 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -103,6 +103,7 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): ) variant("pytorch", default=False, description="Enable libtorch support") variant("quip", default=False, description="Enable quip support") + variant("mpi_f08", default=False, description="Use MPI F08 module") variant( "enable_regtests", @@ -203,6 +204,9 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): depends_on("mpi@2:") depends_on("mpi@3:", when="@2023.1:") depends_on("scalapack") + depends_on("mpich+fortran", when="^mpich") + + conflicts("~mpi_f08", when="^mpich@4.1:") with when("+cosma"): depends_on("cosma+scalapack") @@ -277,6 +281,7 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): depends_on("dbcsr+openmp", when="+openmp") depends_on("dbcsr+cuda", when="+cuda") depends_on("dbcsr+rocm", when="+rocm") + conflicts("+mpi_f08", when="@:2023.2") # CP2K needs compiler specific compilation flags, e.g. optflags conflicts("%apple-clang") @@ -540,6 +545,9 @@ def edit(self, spec, prefix): libs.extend(mpi) libs.extend(self.compiler.stdcxx_libs) + if "+mpi_f08" in spec: + cppflags.append("-D__MPI_F08") + if "wannier90" in spec: cppflags.append("-D__WANNIER90") wannier = join_path(spec["wannier90"].libs.directories[0], "libwannier.a") @@ -947,6 +955,7 @@ def cmake_args(self): self.define_from_variant("CP2K_USE_VORI", "libvori"), self.define_from_variant("CP2K_USE_SPLA", "spla"), self.define_from_variant("CP2K_USE_QUIP", "quip"), + self.define_from_variant("CP2K_USE_MPI_F08", "mpi_f08"), ] # we force the use elpa openmp threading support. might need to be revisited though From 3903440e60f5af90a670e8aed7fce66364857709 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Tue, 17 Oct 2023 11:33:59 -0700 Subject: [PATCH 223/408] Allow / in GitVersion (#39398) This commit allows version specifiers to refer to git branches that contain forward slashes. For example, the following is valid syntax now: pkg@git.releases/1.0 It also adds a new method `Spec.format_path(fmt)` which is like `Spec.format`, but also maps unsafe characters to `_` after interpolation. The difference is as follows: >>> Spec("pkg@git.releases/1.0").format("{name}/{version}") 'pkg/git.releases/1.0' >>> Spec("pkg@git.releases/1.0").format_path("{name}/{version}") 'pkg/git.releases_1.0' The `format_path` method is used in all projections. Notice that this method also maps `=` to `_` >>> Spec("pkg@git.main=1.0").format_path("{name}/{version}") 'pkg/git.main_1.0' which should avoid syntax issues when `Spec.prefix` is literally copied into a Makefile as sometimes happens in AutotoolsPackage or MakefilePackage --- lib/spack/llnl/util/filesystem.py | 31 ++++++++++ lib/spack/spack/binary_distribution.py | 12 ++-- lib/spack/spack/cmd/ci.py | 4 +- lib/spack/spack/cmd/install.py | 3 +- lib/spack/spack/directory_layout.py | 2 +- lib/spack/spack/filesystem_view.py | 6 +- lib/spack/spack/install_test.py | 2 +- lib/spack/spack/modules/common.py | 2 +- lib/spack/spack/modules/lmod.py | 9 ++- lib/spack/spack/package_base.py | 7 ++- lib/spack/spack/parser.py | 13 +++-- lib/spack/spack/spec.py | 41 +++++++++++++ lib/spack/spack/stage.py | 2 +- lib/spack/spack/test/spec_semantics.py | 80 ++++++++++++++++++++++++++ lib/spack/spack/test/spec_syntax.py | 8 +++ lib/spack/spack/test/versions.py | 19 ++++++ 16 files changed, 214 insertions(+), 27 deletions(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 8f4217049dd55a..47c66248b53aee 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -156,6 +156,37 @@ def lookup(name): shutil.copystat = copystat +def polite_path(components: Iterable[str]): + """ + Given a list of strings which are intended to be path components, + generate a path, and format each component to avoid generating extra + path entries. + + For example all "/", "\", and ":" characters will be replaced with + "_". Other characters like "=" will also be replaced. + """ + return os.path.join(*[polite_filename(x) for x in components]) + + +@memoized +def _polite_antipattern(): + # A regex of all the characters we don't want in a filename + return re.compile(r"[^A-Za-z0-9_.-]") + + +def polite_filename(filename: str) -> str: + """ + Replace generally problematic filename characters with underscores. + + This differs from sanitize_filename in that it is more aggressive in + changing characters in the name. For example it removes "=" which can + confuse path parsing in external tools. + """ + # This character set applies for both Windows and Linux. It does not + # account for reserved filenames in Windows. + return _polite_antipattern().sub("_", filename) + + def getuid(): if sys.platform == "win32": import ctypes diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 5559e898203c08..7484fee09793c2 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -797,11 +797,7 @@ def tarball_directory_name(spec): Return name of the tarball directory according to the convention -//-/ """ - return os.path.join( - str(spec.architecture), - f"{spec.compiler.name}-{spec.compiler.version}", - f"{spec.name}-{spec.version}", - ) + return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}") def tarball_name(spec, ext): @@ -809,10 +805,10 @@ def tarball_name(spec, ext): Return the name of the tarfile according to the convention --- """ - return ( - f"{spec.architecture}-{spec.compiler.name}-{spec.compiler.version}-" - f"{spec.name}-{spec.version}-{spec.dag_hash()}{ext}" + spec_formatted = spec.format_path( + "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}" ) + return f"{spec_formatted}{ext}" def tarball_path_name(spec, ext): diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py index b30483218a4a1f..cf2ee11c044b40 100644 --- a/lib/spack/spack/cmd/ci.py +++ b/lib/spack/spack/cmd/ci.py @@ -579,7 +579,9 @@ def ci_rebuild(args): "SPACK_COLOR=always", "SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)), "-j$(nproc)", - "install-deps/{}".format(job_spec.format("{name}-{version}-{hash}")), + "install-deps/{}".format( + ev.depfile.MakefileSpec(job_spec).safe_format("{name}-{version}-{hash}") + ), ], spack_cmd + ["install"] + root_install_args, ] diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 2f497320948ee8..b74f982755115b 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -240,8 +240,7 @@ def default_log_file(spec): """Computes the default filename for the log file and creates the corresponding directory if not present """ - fmt = "test-{x.name}-{x.version}-{hash}.xml" - basename = fmt.format(x=spec, hash=spec.dag_hash()) + basename = spec.format_path("test-{name}-{version}-{hash}.xml") dirname = fs.os.path.join(spack.paths.reports_path, "junit") fs.mkdirp(dirname) return fs.os.path.join(dirname, basename) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 46bb6c855743a0..c0741a037c0a03 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -104,7 +104,7 @@ def relative_path_for_spec(self, spec): _check_concrete(spec) projection = spack.projections.get_projection(self.projections, spec) - path = spec.format(projection) + path = spec.format_path(projection) return str(Path(path)) def write_spec(self, spec, path): diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py index f0e79afd7d352f..e6631fecbf66a4 100644 --- a/lib/spack/spack/filesystem_view.py +++ b/lib/spack/spack/filesystem_view.py @@ -500,7 +500,7 @@ def get_projection_for_spec(self, spec): proj = spack.projections.get_projection(self.projections, locator_spec) if proj: - return os.path.join(self._root, locator_spec.format(proj)) + return os.path.join(self._root, locator_spec.format_path(proj)) return self._root def get_all_specs(self): @@ -776,7 +776,7 @@ def get_relative_projection_for_spec(self, spec): spec = spec.package.extendee_spec p = spack.projections.get_projection(self.projections, spec) - return spec.format(p) if p else "" + return spec.format_path(p) if p else "" def get_projection_for_spec(self, spec): """ @@ -791,7 +791,7 @@ def get_projection_for_spec(self, spec): proj = spack.projections.get_projection(self.projections, spec) if proj: - return os.path.join(self._root, spec.format(proj)) + return os.path.join(self._root, spec.format_path(proj)) return self._root diff --git a/lib/spack/spack/install_test.py b/lib/spack/spack/install_test.py index 0d8fa782b6165a..662a1536c4b827 100644 --- a/lib/spack/spack/install_test.py +++ b/lib/spack/spack/install_test.py @@ -1039,7 +1039,7 @@ def test_pkg_id(cls, spec): Returns: str: the install test package identifier """ - return spec.format("{name}-{version}-{hash:7}") + return spec.format_path("{name}-{version}-{hash:7}") @classmethod def test_log_name(cls, spec): diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 4a3485c8647959..4b60f52bf43d18 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -586,7 +586,7 @@ def use_name(self): if not projection: projection = self.conf.default_projections["all"] - name = self.spec.format(projection) + name = self.spec.format_path(projection) # Not everybody is working on linux... parts = name.split("/") name = os.path.join(*parts) diff --git a/lib/spack/spack/modules/lmod.py b/lib/spack/spack/modules/lmod.py index 5c001c9ead2898..d81e07e0bf9449 100644 --- a/lib/spack/spack/modules/lmod.py +++ b/lib/spack/spack/modules/lmod.py @@ -9,6 +9,7 @@ import posixpath from typing import Any, Dict, List +import llnl.util.filesystem as fs import llnl.util.lang as lang import spack.compilers @@ -283,8 +284,10 @@ def token_to_path(self, name, value): Returns: str: part of the path associated with the service """ + # General format for the path part - path_part_fmt = os.path.join("{token.name}", "{token.version}") + def path_part_fmt(token): + return fs.polite_path([f"{token.name}", f"{token.version}"]) # If we are dealing with a core compiler, return 'Core' core_compilers = self.conf.core_compilers @@ -296,13 +299,13 @@ def token_to_path(self, name, value): # CompilerSpec does not have a hash, as we are not allowed to # use different flavors of the same compiler if name == "compiler": - return path_part_fmt.format(token=value) + return path_part_fmt(token=value) # In case the hierarchy token refers to a virtual provider # we need to append a hash to the version to distinguish # among flavors of the same library (e.g. openblas~openmp vs. # openblas+openmp) - path = path_part_fmt.format(token=value) + path = path_part_fmt(token=value) path = "-".join([path, value.dag_hash(length=7)]) return path diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py index 940c12c11ab92d..81cc9b8d61d823 100644 --- a/lib/spack/spack/package_base.py +++ b/lib/spack/spack/package_base.py @@ -991,13 +991,14 @@ def find_valid_url_for_version(self, version): return None def _make_resource_stage(self, root_stage, resource): + pretty_resource_name = fsys.polite_filename(f"{resource.name}-{self.version}") return ResourceStage( resource.fetcher, root=root_stage, resource=resource, name=self._resource_stage(resource), mirror_paths=spack.mirror.mirror_archive_paths( - resource.fetcher, os.path.join(self.name, f"{resource.name}-{self.version}") + resource.fetcher, os.path.join(self.name, pretty_resource_name) ), path=self.path, ) @@ -1008,8 +1009,10 @@ def _download_search(self): def _make_root_stage(self, fetcher): # Construct a mirror path (TODO: get this out of package.py) + format_string = "{name}-{version}" + pretty_name = self.spec.format_path(format_string) mirror_paths = spack.mirror.mirror_archive_paths( - fetcher, os.path.join(self.name, f"{self.name}-{self.version}"), self.spec + fetcher, os.path.join(self.name, pretty_name), self.spec ) # Construct a path where the stage should build.. s = self.spec diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py index 5e46ddb1b17f7b..7e3532e9488ea2 100644 --- a/lib/spack/spack/parser.py +++ b/lib/spack/spack/parser.py @@ -76,7 +76,9 @@ IDENTIFIER = r"(?:[a-zA-Z_0-9][a-zA-Z_0-9\-]*)" DOTTED_IDENTIFIER = rf"(?:{IDENTIFIER}(?:\.{IDENTIFIER})+)" GIT_HASH = r"(?:[A-Fa-f0-9]{40})" -GIT_VERSION = rf"(?:(?:git\.(?:{DOTTED_IDENTIFIER}|{IDENTIFIER}))|(?:{GIT_HASH}))" +#: Git refs include branch names, and can contain "." and "/" +GIT_REF = r"(?:[a-zA-Z_0-9][a-zA-Z_0-9./\-]*)" +GIT_VERSION_PATTERN = rf"(?:(?:git\.(?:{GIT_REF}))|(?:{GIT_HASH}))" NAME = r"[a-zA-Z_0-9][a-zA-Z_0-9\-.]*" @@ -127,7 +129,8 @@ class TokenType(TokenBase): # Dependency DEPENDENCY = r"(?:\^)" # Version - VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION})=(?:{VERSION}))" + VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION_PATTERN})=(?:{VERSION}))" + GIT_VERSION = rf"@(?:{GIT_VERSION_PATTERN})" VERSION = rf"(?:@\s*(?:{VERSION_LIST}))" # Variants PROPAGATED_BOOL_VARIANT = rf"(?:(?:\+\+|~~|--)\s*{NAME})" @@ -358,8 +361,10 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac compiler_name.strip(), compiler_version ) self.has_compiler = True - elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept( - TokenType.VERSION_HASH_PAIR + elif ( + self.ctx.accept(TokenType.VERSION_HASH_PAIR) + or self.ctx.accept(TokenType.GIT_VERSION) + or self.ctx.accept(TokenType.VERSION) ): if self.has_version: raise spack.spec.MultipleVersionError( diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index aa6afa668ebe74..5345c1c03ef5a6 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -54,6 +54,7 @@ import io import itertools import os +import pathlib import platform import re import socket @@ -4460,6 +4461,42 @@ def cformat(self, *args, **kwargs): kwargs.setdefault("color", None) return self.format(*args, **kwargs) + def format_path( + # self, format_string: str, _path_ctor: Optional[pathlib.PurePath] = None + self, + format_string: str, + _path_ctor: Optional[Callable[[Any], pathlib.PurePath]] = None, + ) -> str: + """Given a `format_string` that is intended as a path, generate a string + like from `Spec.format`, but eliminate extra path separators introduced by + formatting of Spec properties. + + Path separators explicitly added to the string are preserved, so for example + "{name}/{version}" would generate a directory based on the Spec's name, and + a subdirectory based on its version; this function guarantees though that + the resulting string would only have two directories (i.e. that if under + normal circumstances that `str(Spec.version)` would contain a path + separator, it would not in this case). + """ + format_component_with_sep = r"\{[^}]*[/\\][^}]*}" + if re.search(format_component_with_sep, format_string): + raise SpecFormatPathError( + f"Invalid path format string: cannot contain {{/...}}\n\t{format_string}" + ) + + path_ctor = _path_ctor or pathlib.PurePath + format_string_as_path = path_ctor(format_string) + if format_string_as_path.is_absolute(): + output_path_components = [format_string_as_path.parts[0]] + input_path_components = list(format_string_as_path.parts[1:]) + else: + output_path_components = [] + input_path_components = list(format_string_as_path.parts) + output_path_components += [ + fs.polite_filename(self.format(x)) for x in input_path_components + ] + return str(path_ctor(*output_path_components)) + def __str__(self): sorted_nodes = [self] + sorted( self.traverse(root=False), key=lambda x: x.name or x.abstract_hash @@ -5391,6 +5428,10 @@ class SpecFormatStringError(spack.error.SpecError): """Called for errors in Spec format strings.""" +class SpecFormatPathError(spack.error.SpecError): + """Called for errors in Spec path-format strings.""" + + class SpecFormatSigilError(SpecFormatStringError): """Called for mismatched sigils and attributes in format strings""" diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 90fb193a63573c..d53ec5fee8995a 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -58,7 +58,7 @@ def compute_stage_name(spec): """Determine stage name given a spec""" default_stage_structure = stage_prefix + "{name}-{version}-{hash}" stage_name_structure = spack.config.get("config:stage_name", default=default_stage_structure) - return spec.format(format_string=stage_name_structure) + return spec.format_path(format_string=stage_name_structure) def create_stage_root(path: str) -> None: diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 662ea5ef0e0b65..579ba4486c8a36 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import pathlib + import pytest import spack.directives @@ -1005,6 +1007,84 @@ def test_spec_override(self): assert new_spec.compiler_flags["cxxflags"] == ["-O1"] +@pytest.mark.parametrize( + "spec_str,format_str,expected", + [ + ("zlib@git.foo/bar", "{name}-{version}", str(pathlib.Path("zlib-git.foo_bar"))), + ("zlib@git.foo/bar", "{name}-{version}-{/hash}", None), + ("zlib@git.foo/bar", "{name}/{version}", str(pathlib.Path("zlib", "git.foo_bar"))), + ( + "zlib@{0}=1.0%gcc".format("a" * 40), + "{name}/{version}/{compiler}", + str(pathlib.Path("zlib", "{0}_1.0".format("a" * 40), "gcc")), + ), + ( + "zlib@git.foo/bar=1.0%gcc", + "{name}/{version}/{compiler}", + str(pathlib.Path("zlib", "git.foo_bar_1.0", "gcc")), + ), + ], +) +def test_spec_format_path(spec_str, format_str, expected): + _check_spec_format_path(spec_str, format_str, expected) + + +def _check_spec_format_path(spec_str, format_str, expected, path_ctor=None): + spec = Spec(spec_str) + if not expected: + with pytest.raises((spack.spec.SpecFormatPathError, spack.spec.SpecFormatStringError)): + spec.format_path(format_str, _path_ctor=path_ctor) + else: + formatted = spec.format_path(format_str, _path_ctor=path_ctor) + assert formatted == expected + + +@pytest.mark.parametrize( + "spec_str,format_str,expected", + [ + ( + "zlib@git.foo/bar", + r"C:\\installroot\{name}\{version}", + r"C:\installroot\zlib\git.foo_bar", + ), + ( + "zlib@git.foo/bar", + r"\\hostname\sharename\{name}\{version}", + r"\\hostname\sharename\zlib\git.foo_bar", + ), + # Windows doesn't attribute any significance to a leading + # "/" so it is discarded + ("zlib@git.foo/bar", r"/installroot/{name}/{version}", r"installroot\zlib\git.foo_bar"), + ], +) +def test_spec_format_path_windows(spec_str, format_str, expected): + _check_spec_format_path(spec_str, format_str, expected, path_ctor=pathlib.PureWindowsPath) + + +@pytest.mark.parametrize( + "spec_str,format_str,expected", + [ + ("zlib@git.foo/bar", r"/installroot/{name}/{version}", "/installroot/zlib/git.foo_bar"), + ("zlib@git.foo/bar", r"//installroot/{name}/{version}", "//installroot/zlib/git.foo_bar"), + # This is likely unintentional on Linux: Firstly, "\" is not a + # path separator for POSIX, so this is treated as a single path + # component (containing literal "\" characters); secondly, + # Spec.format treats "\" as an escape character, so is + # discarded (unless directly following another "\") + ( + "zlib@git.foo/bar", + r"C:\\installroot\package-{name}-{version}", + r"C__installrootpackage-zlib-git.foo_bar", + ), + # "\" is not a POSIX separator, and Spec.format treats "\{" as a literal + # "{", which means that the resulting format string is invalid + ("zlib@git.foo/bar", r"package\{name}\{version}", None), + ], +) +def test_spec_format_path_posix(spec_str, format_str, expected): + _check_spec_format_path(spec_str, format_str, expected, path_ctor=pathlib.PurePosixPath) + + @pytest.mark.regression("3887") @pytest.mark.parametrize("spec_str", ["py-extension2", "extension1", "perl-extension"]) def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str): diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index b79b829f96a68e..d731fcd31c1ac5 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -517,6 +517,14 @@ def _specfile_for(spec_str, filename): [Token(TokenType.VERSION, value="@:0.4"), Token(TokenType.COMPILER, value="% nvhpc")], "@:0.4%nvhpc", ), + ( + "zlib@git.foo/bar", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"), + Token(TokenType.GIT_VERSION, "@git.foo/bar"), + ], + "zlib@git.foo/bar", + ), ], ) def test_parse_single_spec(spec_str, tokens, expected_roundtrip): diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py index 1dcf28cd711040..50fcc197986353 100644 --- a/lib/spack/spack/test/versions.py +++ b/lib/spack/spack/test/versions.py @@ -675,6 +675,25 @@ def test_git_ref_comparisons(mock_git_version_info, install_mockery, mock_packag assert str(spec_branch.version) == "git.1.x=1.2" +def test_git_branch_with_slash(): + class MockLookup(object): + def get(self, ref): + assert ref == "feature/bar" + return "1.2", 0 + + v = spack.version.from_string("git.feature/bar") + assert isinstance(v, GitVersion) + v.attach_lookup(MockLookup()) + + # Create a version range + test_number_version = spack.version.from_string("1.2") + v.satisfies(test_number_version) + + serialized = VersionList([v]).to_dict() + v_deserialized = VersionList.from_dict(serialized) + assert v_deserialized[0].ref == "feature/bar" + + @pytest.mark.parametrize( "string,git", [ From 3f0f8b59ccb64a18f7a48ad1725fd39122dde251 Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Tue, 17 Oct 2023 20:35:55 +0200 Subject: [PATCH 224/408] Add tracy 0.10 (#40573) --- var/spack/repos/builtin/packages/tracy-client/package.py | 1 + var/spack/repos/builtin/packages/tracy/package.py | 1 + 2 files changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/tracy-client/package.py b/var/spack/repos/builtin/packages/tracy-client/package.py index 0d3e3e9fe5f343..dd219f31ee039a 100644 --- a/var/spack/repos/builtin/packages/tracy-client/package.py +++ b/var/spack/repos/builtin/packages/tracy-client/package.py @@ -15,6 +15,7 @@ class TracyClient(CMakePackage): maintainers("msimberg") version("master", git="https://github.com/wolfpld/tracy.git", branch="master") + version("0.10", sha256="a76017d928f3f2727540fb950edd3b736caa97b12dbb4e5edce66542cbea6600") version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc") version("0.8.2", sha256="4784eddd89c17a5fa030d408392992b3da3c503c872800e9d3746d985cfcc92a") version("0.8.1", sha256="004992012b2dc879a9f6d143cbf94d7ea30e88135db3ef08951605d214892891") diff --git a/var/spack/repos/builtin/packages/tracy/package.py b/var/spack/repos/builtin/packages/tracy/package.py index 572e5d879b11a7..111b4a86534600 100644 --- a/var/spack/repos/builtin/packages/tracy/package.py +++ b/var/spack/repos/builtin/packages/tracy/package.py @@ -15,6 +15,7 @@ class Tracy(MakefilePackage): maintainers("msimberg") version("master", git="https://github.com/wolfpld/tracy.git", branch="master") + version("0.10", sha256="a76017d928f3f2727540fb950edd3b736caa97b12dbb4e5edce66542cbea6600") version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc") version("0.8.2", sha256="4784eddd89c17a5fa030d408392992b3da3c503c872800e9d3746d985cfcc92a") version("0.8.1", sha256="004992012b2dc879a9f6d143cbf94d7ea30e88135db3ef08951605d214892891") From b83693275465e67b24fe8d99ff4038eac5531178 Mon Sep 17 00:00:00 2001 From: Dennis Klein Date: Tue, 17 Oct 2023 20:58:46 +0200 Subject: [PATCH 225/408] xmlto: add more dependencies (#40578) `xmllint` is called by `xmlto` during generation of `libzmq`'s docs, so adding `libxml2`. The docbook deps and the patches are taken from https://src.fedoraproject.org/rpms/xmlto/blob/rawhide/f/xmlto.spec There are still many more dependencies missing, but this is out of scope of this patch (which is only concerned about the use case of `libzmq`). --- var/spack/repos/builtin/packages/xmlto/package.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/xmlto/package.py b/var/spack/repos/builtin/packages/xmlto/package.py index 1a018bfa877ccc..ca5748188176fd 100644 --- a/var/spack/repos/builtin/packages/xmlto/package.py +++ b/var/spack/repos/builtin/packages/xmlto/package.py @@ -18,5 +18,18 @@ class Xmlto(AutotoolsPackage): version("0.0.28", sha256="2f986b7c9a0e9ac6728147668e776d405465284e13c74d4146c9cbc51fd8aad3") # FIXME: missing a lot of dependencies - depends_on("libxslt") + depends_on("docbook-xsl", type=("build", "run")) + depends_on("libxml2", type=("build", "run")) # xmllint + depends_on("libxslt", type=("build", "run")) # xsltconf depends_on("util-linux", type=("build", "run")) # getopt with support for longopts + + depends_on("docbook-xml", type="run") + + patch( + "https://src.fedoraproject.org/rpms/xmlto/raw/rawhide/f/xmlto-c99-1.patch", + sha256="056c8bebc25d8d1488cc6a3724e2bcafc0e5e0df5c50080559cdef99bd377839", + ) + patch( + "https://src.fedoraproject.org/rpms/xmlto/raw/rawhide/f/xmlto-c99-2.patch", + sha256="50e39b1810bbf22a1d67944086c5681bcd58b8c325dfb251d56ac15d088fc17a", + ) From 88caf01529a7f2206980bb0aadf768a7cc1ca61f Mon Sep 17 00:00:00 2001 From: jfavre Date: Tue, 17 Oct 2023 21:03:41 +0200 Subject: [PATCH 226/408] paraview: add variant for NVIDIA IndeX (#40577) * add variant for NVIDIA IndeX * remove whitespaces --- var/spack/repos/builtin/packages/paraview/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 9b43d5e92c8756..5760a9d68da7de 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -72,6 +72,7 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): variant("kits", default=True, description="Use module kits") variant("pagosa", default=False, description="Build the pagosa adaptor") variant("eyedomelighting", default=False, description="Enable Eye Dome Lighting feature") + variant("nvindex", default=False, description="Enable the pvNVIDIAIndeX plugin") variant("tbb", default=False, description="Enable multi-threaded parallelism with TBB") variant("adios2", default=False, description="Enable ADIOS2 support", when="@5.8:") variant("visitbridge", default=False, description="Enable VisItBridge support") @@ -609,6 +610,9 @@ def nvariant_bool(feature): if "+tbb" in spec: cmake_args.append("-DVTK_SMP_IMPLEMENTATION_TYPE=TBB") + if "+nvindex" in spec: + cmake_args.append("-DPARAVIEW_PLUGIN_ENABLE_pvNVIDIAIndeX:BOOL=ON") + # Hide git from Paraview so it will not use `git describe` # to find its own version number if spec.satisfies("@5.4.0:5.4.1"): From a9adce561b43486969cafcd36f5719c1eb545413 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Wed, 18 Oct 2023 05:22:55 -0400 Subject: [PATCH 227/408] abi.py: fix typo, add type-hints (#38216) Co-authored-by: Massimiliano Culpo --- lib/spack/spack/abi.py | 31 +++++++++++++++++++------------ lib/spack/spack/concretize.py | 2 +- 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py index c8111048dd9d95..dd12d6dbafe484 100644 --- a/lib/spack/spack/abi.py +++ b/lib/spack/spack/abi.py @@ -8,8 +8,8 @@ from llnl.util.lang import memoized import spack.spec +import spack.version from spack.compilers.clang import Clang -from spack.spec import CompilerSpec from spack.util.executable import Executable, ProcessError @@ -17,7 +17,9 @@ class ABI: """This class provides methods to test ABI compatibility between specs. The current implementation is rather rough and could be improved.""" - def architecture_compatible(self, target, constraint): + def architecture_compatible( + self, target: spack.spec.Spec, constraint: spack.spec.Spec + ) -> bool: """Return true if architecture of target spec is ABI compatible to the architecture of constraint spec. If either the target or constraint specs have no architecture, target is also defined @@ -34,7 +36,7 @@ def _gcc_get_libstdcxx_version(self, version): a compiler's libstdc++ or libgcc_s""" from spack.build_environment import dso_suffix - spec = CompilerSpec("gcc", version) + spec = spack.spec.CompilerSpec("gcc", version) compilers = spack.compilers.compilers_for_spec(spec) if not compilers: return None @@ -77,16 +79,20 @@ def _gcc_compiler_compare(self, pversion, cversion): return False return plib == clib - def _intel_compiler_compare(self, pversion, cversion): + def _intel_compiler_compare( + self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange + ) -> bool: """Returns true iff the intel version pversion and cversion are ABI compatible""" # Test major and minor versions. Ignore build version. - if len(pversion.version) < 2 or len(cversion.version) < 2: - return False - return pversion.version[:2] == cversion.version[:2] + pv = pversion.lo + cv = cversion.lo + return pv.up_to(2) == cv.up_to(2) - def compiler_compatible(self, parent, child, **kwargs): + def compiler_compatible( + self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False + ) -> bool: """Return true if compilers for parent and child are ABI compatible.""" if not parent.compiler or not child.compiler: return True @@ -95,7 +101,7 @@ def compiler_compatible(self, parent, child, **kwargs): # Different compiler families are assumed ABI incompatible return False - if kwargs.get("loose", False): + if loose: return True # TODO: Can we move the specialized ABI matching stuff @@ -116,9 +122,10 @@ def compiler_compatible(self, parent, child, **kwargs): return True return False - def compatible(self, target, constraint, **kwargs): + def compatible( + self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False + ) -> bool: """Returns true if target spec is ABI compatible to constraint spec""" - loosematch = kwargs.get("loose", False) return self.architecture_compatible(target, constraint) and self.compiler_compatible( - target, constraint, loose=loosematch + target, constraint, loose=loose ) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index e4acef401e44a7..6e85d66b154f51 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -155,7 +155,7 @@ def _valid_virtuals_and_externals(self, spec): ), ) - def choose_virtual_or_external(self, spec): + def choose_virtual_or_external(self, spec: spack.spec.Spec): """Given a list of candidate virtual and external packages, try to find one that is most ABI compatible. """ From c33409479baa92f1a08deefb18e28e7765be488f Mon Sep 17 00:00:00 2001 From: Aiden Grossman Date: Wed, 18 Oct 2023 03:58:19 -0700 Subject: [PATCH 228/408] Add license directive (#39346) This patch adds in a license directive to get the ball rolling on adding in license information about packages to spack. I'm primarily interested in just adding license into spack, but this would also help with other efforts that people are interested in such as adding license information to the ASP solve for concretization to make sure licenses are compatible. Usage: Specifying the specific license that a package is released under in a project's `package.py` is good practice. To specify a license, find the SPDX identifier for a project and then add it using the license directive: ```python license("") ``` For example, for Apache 2.0, you might write: ```python license("Apache-2.0") ``` Note that specifying a license without a when clause makes it apply to all versions and variants of the package, which might not actually be the case. For example, a project might have switched licenses at some point or have certain build configurations that include files that are licensed differently. To account for this, you can specify when licenses should be applied. For example, to specify that a specific license identifier should only apply to versionup to and including 1.5, you could write the following directive: ```python license("MIT", when="@:1.5") ``` --- lib/spack/docs/packaging_guide.rst | 27 ++++++++++++ lib/spack/spack/cmd/create.py | 3 ++ lib/spack/spack/cmd/info.py | 21 +++++++++ lib/spack/spack/directives.py | 43 +++++++++++++++++++ lib/spack/spack/test/cmd/create.py | 1 + lib/spack/spack/test/cmd/info.py | 1 + lib/spack/spack/test/directives.py | 38 ++++++++++++++++ .../packages/licenses-1/package.py | 18 ++++++++ .../repos/builtin/packages/zlib/package.py | 2 + 9 files changed, 154 insertions(+) create mode 100644 var/spack/repos/builtin.mock/packages/licenses-1/package.py diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index acc79ea3424090..ae6be5b4a6eb3e 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -6799,3 +6799,30 @@ To achieve backward compatibility with the single-class format Spack creates in Overall the role of the adapter is to route access to attributes of methods first through the ``*Package`` hierarchy, and then back to the base class builder. This is schematically shown in the diagram above, where the adapter role is to "emulate" a method resolution order like the one represented by the red arrows. + +------------------------------ +Specifying License Information +------------------------------ + +A significant portion of software that Spack packages is open source. Most open +source software is released under one or more common open source licenses. +Specifying the specific license that a package is released under in a project's +`package.py` is good practice. To specify a license, find the SPDX identifier for +a project and then add it using the license directive: + +.. code-block:: python + + license("") + +Note that specifying a license without a when clause makes it apply to all +versions and variants of the package, which might not actually be the case. +For example, a project might have switched licenses at some point or have +certain build configurations that include files that are licensed differently. +To account for this, you can specify when licenses should be applied. For +example, to specify that a specific license identifier should only apply +to versionup to and including 1.5, you could write the following directive: + +.. code-block:: python + + license("...", when="@:1.5") + diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 474e271d1791f4..32c6ed13e174b7 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -63,6 +63,9 @@ class {class_name}({base_class_name}): # notify when the package is updated. # maintainers("github_user1", "github_user2") + # FIXME: Add the SPDX identifier of the project's license below. + license("UNKNOWN") + {versions} {dependencies} diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index f0850d5dcf3200..5e667f487686e1 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -72,6 +72,10 @@ def variant(s): return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format +def license(s): + return spack.spec.VERSION_COLOR + s + plain_format + + class VariantFormatter: def __init__(self, variants): self.variants = variants @@ -348,6 +352,22 @@ def print_virtuals(pkg): color.cprint(" None") +def print_licenses(pkg): + """Output the licenses of the project.""" + + color.cprint("") + color.cprint(section_title("Licenses: ")) + + if len(pkg.licenses) == 0: + color.cprint(" None") + else: + pad = padder(pkg.licenses, 4) + for when_spec in pkg.licenses: + license_identifier = pkg.licenses[when_spec] + line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec) + color.cprint(line) + + def info(parser, args): spec = spack.spec.Spec(args.package) pkg_cls = spack.repo.PATH.get_pkg_class(spec.name) @@ -377,6 +397,7 @@ def info(parser, args): (args.all or not args.no_dependencies, print_dependencies), (args.all or args.virtuals, print_virtuals), (args.all or args.tests, print_tests), + (args.all or True, print_licenses), ] for print_it, func in sections: if print_it: diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 9ac992b2098bcb..7ebf68e54885f7 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -64,6 +64,7 @@ class OpenMpi(Package): "depends_on", "extends", "maintainers", + "license", "provides", "patch", "variant", @@ -862,6 +863,44 @@ def _execute_maintainer(pkg): return _execute_maintainer +def _execute_license(pkg, license_identifier: str, when): + # If when is not specified the license always holds + when_spec = make_when_spec(when) + if not when_spec: + return + + for other_when_spec in pkg.licenses: + if when_spec.intersects(other_when_spec): + when_message = "" + if when_spec != make_when_spec(None): + when_message = f"when {when_spec}" + other_when_message = "" + if other_when_spec != make_when_spec(None): + other_when_message = f"when {other_when_spec}" + err_msg = ( + f"{pkg.name} is specified as being licensed as {license_identifier} " + f"{when_message}, but it is also specified as being licensed under " + f"{pkg.licenses[other_when_spec]} {other_when_message}, which conflict." + ) + raise OverlappingLicenseError(err_msg) + + pkg.licenses[when_spec] = license_identifier + + +@directive("licenses") +def license(license_identifier: str, when=None): + """Add a new license directive, to specify the SPDX identifier the software is + distributed under. + + Args: + license_identifiers: A list of SPDX identifiers specifying the licenses + the software is distributed under. + when: A spec specifying when the license applies. + """ + + return lambda pkg: _execute_license(pkg, license_identifier, when) + + @directive("requirements") def requires(*requirement_specs, policy="one_of", when=None, msg=None): """Allows a package to request a configuration to be present in all valid solutions. @@ -920,3 +959,7 @@ class DependencyPatchError(DirectiveError): class UnsupportedPackageDirective(DirectiveError): """Raised when an invalid or unsupported package directive is specified.""" + + +class OverlappingLicenseError(DirectiveError): + """Raised when two licenses are declared that apply on overlapping specs.""" diff --git a/lib/spack/spack/test/cmd/create.py b/lib/spack/spack/test/cmd/create.py index b99d221d02ae61..089dc8b0c52440 100644 --- a/lib/spack/spack/test/cmd/create.py +++ b/lib/spack/spack/test/cmd/create.py @@ -27,6 +27,7 @@ [r"TestNamedPackage(Package)", r"def install(self"], ), (["file://example.tar.gz"], "example", [r"Example(Package)", r"def install(self"]), + (["-n", "test-license"], "test-license", [r'license("UNKNOWN")']), # Template-specific cases ( ["-t", "autoreconf", "/test-autoreconf"], diff --git a/lib/spack/spack/test/cmd/info.py b/lib/spack/spack/test/cmd/info.py index 4b2f5d2b3980ac..c4528f9852e284 100644 --- a/lib/spack/spack/test/cmd/info.py +++ b/lib/spack/spack/test/cmd/info.py @@ -88,6 +88,7 @@ def test_info_fields(pkg_query, parser, print_buffer): "Installation Phases:", "Virtual Packages:", "Tags:", + "Licenses:", ) args = parser.parse_args(["--all", pkg_query]) diff --git a/lib/spack/spack/test/directives.py b/lib/spack/spack/test/directives.py index e32ec6ac086976..677eb043a9e6ed 100644 --- a/lib/spack/spack/test/directives.py +++ b/lib/spack/spack/test/directives.py @@ -89,6 +89,44 @@ def test_maintainer_directive(config, mock_packages, package_name, expected_main assert pkg_cls.maintainers == expected_maintainers +@pytest.mark.parametrize( + "package_name,expected_licenses", [("licenses-1", [("MIT", "+foo"), ("Apache-2.0", "~foo")])] +) +def test_license_directive(config, mock_packages, package_name, expected_licenses): + pkg_cls = spack.repo.PATH.get_pkg_class(package_name) + for license in expected_licenses: + assert spack.spec.Spec(license[1]) in pkg_cls.licenses + assert license[0] == pkg_cls.licenses[spack.spec.Spec(license[1])] + + +def test_duplicate_exact_range_license(): + package = namedtuple("package", ["licenses", "name"]) + package.licenses = {spack.directives.make_when_spec("+foo"): "Apache-2.0"} + package.name = "test_package" + + msg = ( + r"test_package is specified as being licensed as MIT when \+foo, but it is also " + r"specified as being licensed under Apache-2.0 when \+foo, which conflict." + ) + + with pytest.raises(spack.directives.OverlappingLicenseError, match=msg): + spack.directives._execute_license(package, "MIT", "+foo") + + +def test_overlapping_duplicate_licenses(): + package = namedtuple("package", ["licenses", "name"]) + package.licenses = {spack.directives.make_when_spec("+foo"): "Apache-2.0"} + package.name = "test_package" + + msg = ( + r"test_package is specified as being licensed as MIT when \+bar, but it is also " + r"specified as being licensed under Apache-2.0 when \+foo, which conflict." + ) + + with pytest.raises(spack.directives.OverlappingLicenseError, match=msg): + spack.directives._execute_license(package, "MIT", "+bar") + + def test_version_type_validation(): # A version should be a string or an int, not a float, because it leads to subtle issues # such as 3.10 being interpreted as 3.1. diff --git a/var/spack/repos/builtin.mock/packages/licenses-1/package.py b/var/spack/repos/builtin.mock/packages/licenses-1/package.py new file mode 100644 index 00000000000000..d5c67830c98f2e --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/licenses-1/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Licenses1(Package): + """Package with a licenses field.""" + + homepage = "https://www.example.com" + url = "https://www.example.com/license" + + license("MIT", when="+foo") + license("Apache-2.0", when="~foo") + + version("1.0", md5="0123456789abcdef0123456789abcdef") diff --git a/var/spack/repos/builtin/packages/zlib/package.py b/var/spack/repos/builtin/packages/zlib/package.py index a4edbea4a03869..144e3b0ec610bf 100644 --- a/var/spack/repos/builtin/packages/zlib/package.py +++ b/var/spack/repos/builtin/packages/zlib/package.py @@ -60,6 +60,8 @@ class Zlib(MakefilePackage, Package): provides("zlib-api") + license("Zlib") + @property def libs(self): shared = "+shared" in self.spec From 4abe5d17a020bb238ed5c989234b60776488fb64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Wed, 18 Oct 2023 13:04:21 +0200 Subject: [PATCH 229/408] [fix] py-werkzeug: add constraint in python dependence (#40590) py-werkzeug@:0.12 does not work with python@3.10: Test with py-werkzeug 0.12.2 and python 3.10: ``` $ python3.10 -c 'import werkzeug' py-werkzeug-0.12.2/lib/python3.11/site-packages/werkzeug/datastructures.py", line 16, in from collections import Container, Iterable, MutableSet ImportError: cannot import name 'Container' from 'collections' ``` Test with py-werkzeug 0.12.2 and python 3.9: ``` python3.9 -c "from collections import Container" :1: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3, and in 3.10 it will stop working ``` --- var/spack/repos/builtin/packages/py-werkzeug/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-werkzeug/package.py b/var/spack/repos/builtin/packages/py-werkzeug/package.py index f0e7ca3f551a2a..e4099e015156c5 100644 --- a/var/spack/repos/builtin/packages/py-werkzeug/package.py +++ b/var/spack/repos/builtin/packages/py-werkzeug/package.py @@ -31,6 +31,7 @@ class PyWerkzeug(PythonPackage): version("0.11.11", sha256="e72c46bc14405cba7a26bd2ce28df734471bc9016bc8b4cb69466c2c14c2f7e5") depends_on("python@3.8:", when="@2.3:", type=("build", "run")) + depends_on("python@:3.9", when="@:0.12", type=("build", "run")) depends_on("py-flit-core@:3", when="@2.3.7:", type="build") depends_on("py-markupsafe@2.1.1:", when="@2.2:", type=("build", "run")) From 19917ca42d0363ed92b5808773a2896cb79e2dbf Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 18 Oct 2023 13:44:26 +0200 Subject: [PATCH 230/408] Fix dev-build keep_stage behavior (#40576) `spack dev-build` would incorrectly set `keep_stage=True` for the entire DAG, including for non-dev specs, even though the dev specs have a DIYStage which never deletes sources. --- lib/spack/spack/package_base.py | 9 +--- lib/spack/spack/test/cmd/dev_build.py | 72 +++++++++++---------------- 2 files changed, 29 insertions(+), 52 deletions(-) diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py index 81cc9b8d61d823..37a08e074b4300 100644 --- a/lib/spack/spack/package_base.py +++ b/lib/spack/spack/package_base.py @@ -1808,14 +1808,7 @@ def do_install(self, **kwargs): verbose (bool): Display verbose build output (by default, suppresses it) """ - # Non-transitive dev specs need to keep the dev stage and be built from - # source every time. Transitive ones just need to be built from source. - dev_path_var = self.spec.variants.get("dev_path", None) - if dev_path_var: - kwargs["keep_stage"] = True - - builder = PackageInstaller([(self, kwargs)]) - builder.install() + PackageInstaller([(self, kwargs)]).install() # TODO (post-34236): Update tests and all packages that use this as a # TODO (post-34236): package method to the routine made available to diff --git a/lib/spack/spack/test/cmd/dev_build.py b/lib/spack/spack/test/cmd/dev_build.py index 4b4636b3c30b1e..71ab195b649c8e 100644 --- a/lib/spack/spack/test/cmd/dev_build.py +++ b/lib/spack/spack/test/cmd/dev_build.py @@ -9,8 +9,10 @@ import llnl.util.filesystem as fs +import spack.build_environment import spack.environment as ev import spack.spec +import spack.store from spack.main import SpackCommand dev_build = SpackCommand("dev-build") @@ -20,9 +22,8 @@ pytestmark = pytest.mark.not_on_windows("does not run on windows") -def test_dev_build_basics(tmpdir, mock_packages, install_mockery): - spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) - spec.concretize() +def test_dev_build_basics(tmpdir, install_mockery): + spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized() assert "dev_path" in spec.variants @@ -39,9 +40,8 @@ def test_dev_build_basics(tmpdir, mock_packages, install_mockery): assert os.path.exists(str(tmpdir)) -def test_dev_build_before(tmpdir, mock_packages, install_mockery): - spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) - spec.concretize() +def test_dev_build_before(tmpdir, install_mockery): + spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized() with tmpdir.as_cwd(): with open(spec.package.filename, "w") as f: @@ -56,9 +56,8 @@ def test_dev_build_before(tmpdir, mock_packages, install_mockery): assert not os.path.exists(spec.prefix) -def test_dev_build_until(tmpdir, mock_packages, install_mockery): - spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) - spec.concretize() +def test_dev_build_until(tmpdir, install_mockery): + spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized() with tmpdir.as_cwd(): with open(spec.package.filename, "w") as f: @@ -74,10 +73,9 @@ def test_dev_build_until(tmpdir, mock_packages, install_mockery): assert not spack.store.STORE.db.query(spec, installed=True) -def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery): +def test_dev_build_until_last_phase(tmpdir, install_mockery): # Test that we ignore the last_phase argument if it is already last - spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) - spec.concretize() + spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized() with tmpdir.as_cwd(): with open(spec.package.filename, "w") as f: @@ -94,9 +92,8 @@ def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery): assert os.path.exists(str(tmpdir)) -def test_dev_build_before_until(tmpdir, mock_packages, install_mockery, capsys): - spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) - spec.concretize() +def test_dev_build_before_until(tmpdir, install_mockery, capsys): + spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized() with tmpdir.as_cwd(): with open(spec.package.filename, "w") as f: @@ -134,7 +131,6 @@ def mock_module_noop(*args): def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env): monkeypatch.setattr(os, "execvp", print_spack_cc) - monkeypatch.setattr(spack.build_environment, "module", mock_module_noop) with tmpdir.as_cwd(): @@ -142,7 +138,7 @@ def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, assert "lib/spack/env" in output -def test_dev_build_fails_already_installed(tmpdir, mock_packages, install_mockery): +def test_dev_build_fails_already_installed(tmpdir, install_mockery): spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) spec.concretize() @@ -175,7 +171,7 @@ def test_dev_build_fails_no_version(mock_packages): assert "dev-build spec must have a single, concrete version" in output -def test_dev_build_env(tmpdir, mock_packages, install_mockery, mutable_mock_env_path): +def test_dev_build_env(tmpdir, install_mockery, mutable_mock_env_path): """Test Spack does dev builds for packages in develop section of env.""" # setup dev-build-test-install package for dev build build_dir = tmpdir.mkdir("build") @@ -191,7 +187,7 @@ def test_dev_build_env(tmpdir, mock_packages, install_mockery, mutable_mock_env_ with envdir.as_cwd(): with open("spack.yaml", "w") as f: f.write( - """\ + f"""\ spack: specs: - dev-build-test-install@0.0.0 @@ -199,11 +195,9 @@ def test_dev_build_env(tmpdir, mock_packages, install_mockery, mutable_mock_env_ develop: dev-build-test-install: spec: dev-build-test-install@0.0.0 - path: %s + path: {os.path.relpath(str(build_dir), start=str(envdir))} """ - % os.path.relpath(str(build_dir), start=str(envdir)) ) - env("create", "test", "./spack.yaml") with ev.read("test"): install() @@ -213,9 +207,7 @@ def test_dev_build_env(tmpdir, mock_packages, install_mockery, mutable_mock_env_ assert f.read() == spec.package.replacement_string -def test_dev_build_env_version_mismatch( - tmpdir, mock_packages, install_mockery, mutable_mock_env_path -): +def test_dev_build_env_version_mismatch(tmpdir, install_mockery, mutable_mock_env_path): """Test Spack constraints concretization by develop specs.""" # setup dev-build-test-install package for dev build build_dir = tmpdir.mkdir("build") @@ -231,7 +223,7 @@ def test_dev_build_env_version_mismatch( with envdir.as_cwd(): with open("spack.yaml", "w") as f: f.write( - """\ + f"""\ spack: specs: - dev-build-test-install@0.0.0 @@ -239,9 +231,8 @@ def test_dev_build_env_version_mismatch( develop: dev-build-test-install: spec: dev-build-test-install@1.1.1 - path: %s + path: {build_dir} """ - % build_dir ) env("create", "test", "./spack.yaml") @@ -250,9 +241,7 @@ def test_dev_build_env_version_mismatch( install() -def test_dev_build_multiple( - tmpdir, mock_packages, install_mockery, mutable_mock_env_path, mock_fetch -): +def test_dev_build_multiple(tmpdir, install_mockery, mutable_mock_env_path, mock_fetch): """Test spack install with multiple developer builds Test that only the root needs to be specified in the environment @@ -284,20 +273,19 @@ def test_dev_build_multiple( with envdir.as_cwd(): with open("spack.yaml", "w") as f: f.write( - """\ + f"""\ spack: specs: - dev-build-test-dependent@0.0.0 develop: dev-build-test-install: - path: %s + path: {leaf_dir} spec: dev-build-test-install@=1.0.0 dev-build-test-dependent: spec: dev-build-test-dependent@0.0.0 - path: %s + path: {root_dir} """ - % (leaf_dir, root_dir) ) env("create", "test", "./spack.yaml") @@ -316,9 +304,7 @@ def test_dev_build_multiple( assert f.read() == spec.package.replacement_string -def test_dev_build_env_dependency( - tmpdir, mock_packages, install_mockery, mock_fetch, mutable_mock_env_path -): +def test_dev_build_env_dependency(tmpdir, install_mockery, mock_fetch, mutable_mock_env_path): """ Test non-root specs in an environment are properly marked for dev builds. """ @@ -337,7 +323,7 @@ def test_dev_build_env_dependency( with envdir.as_cwd(): with open("spack.yaml", "w") as f: f.write( - """\ + f"""\ spack: specs: - dependent-of-dev-build@0.0.0 @@ -345,11 +331,9 @@ def test_dev_build_env_dependency( develop: dev-build-test-install: spec: dev-build-test-install@0.0.0 - path: %s + path: {os.path.relpath(str(build_dir), start=str(envdir))} """ - % os.path.relpath(str(build_dir), start=str(envdir)) ) - env("create", "test", "./spack.yaml") with ev.read("test"): # concretize in the environment to get the dev build info @@ -371,7 +355,7 @@ def test_dev_build_env_dependency( @pytest.mark.parametrize("test_spec", ["dev-build-test-install", "dependent-of-dev-build"]) def test_dev_build_rebuild_on_source_changes( - test_spec, tmpdir, mock_packages, install_mockery, mutable_mock_env_path, mock_fetch + test_spec, tmpdir, install_mockery, mutable_mock_env_path, mock_fetch ): """Test dev builds rebuild on changes to source code. @@ -416,4 +400,4 @@ def reset_string(): fs.touch(os.path.join(str(build_dir), "test")) output = install() - assert "Installing %s" % test_spec in output + assert f"Installing {test_spec}" in output From 1f813bef906efc28ecf27926c65d329eb2cb2f08 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 18 Oct 2023 19:04:49 +0200 Subject: [PATCH 231/408] llvm: fix ncurses+termlib linking in lldb (#40594) --- var/spack/repos/builtin/packages/llvm/package.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index df0e762fc54329..b7a78c6fdd9fda 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -562,6 +562,16 @@ class Llvm(CMakePackage, CudaPackage): patch("add-include-for-libelf-llvm-12-14.patch", when="@12:14") patch("add-include-for-libelf-llvm-15.patch", when="@15") + @when("@14:17") + def patch(self): + # https://github.com/llvm/llvm-project/pull/69458 + filter_file( + r"${TERMINFO_LIB}", + r"${Terminfo_LIBRARIES}", + "lldb/source/Core/CMakeLists.txt", + string=True, + ) + # The functions and attributes below implement external package # detection for LLVM. See: # From f9d67ed0a940efea143d4ce345f278400efa724c Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 18 Oct 2023 19:56:54 +0200 Subject: [PATCH 232/408] AutotoolsPackage / MakefilePackage: add gmake build dependency (#40380) --- .../docs/tables/system_prerequisites.csv | 1 - lib/spack/spack/build_systems/autotools.py | 1 + lib/spack/spack/build_systems/makefile.py | 8 +++-- lib/spack/spack/test/architecture.py | 28 ++++++--------- lib/spack/spack/test/cmd/ci.py | 21 +++++------ lib/spack/spack/test/installer.py | 14 ++++---- share/spack/qa/setup-env-test.fish | 36 +++++++++---------- share/spack/qa/setup-env-test.sh | 36 +++++++++---------- .../builder.test/packages/gmake/package.py | 18 ++++++++++ .../builtin.mock/packages/gmake/package.py | 3 ++ .../builtin.mock/packages/shell-a/package.py | 17 +++++++++ .../builtin.mock/packages/shell-b/package.py | 16 +++++++++ .../repos/builtin/packages/gmake/package.py | 20 +++++------ 13 files changed, 133 insertions(+), 86 deletions(-) create mode 100644 var/spack/repos/builder.test/packages/gmake/package.py create mode 100644 var/spack/repos/builtin.mock/packages/shell-a/package.py create mode 100644 var/spack/repos/builtin.mock/packages/shell-b/package.py diff --git a/lib/spack/docs/tables/system_prerequisites.csv b/lib/spack/docs/tables/system_prerequisites.csv index f275bcef5be291..7a72078cdd6a10 100644 --- a/lib/spack/docs/tables/system_prerequisites.csv +++ b/lib/spack/docs/tables/system_prerequisites.csv @@ -1,7 +1,6 @@ Name, Supported Versions, Notes, Requirement Reason Python, 3.6--3.12, , Interpreter for Spack C/C++ Compilers, , , Building software -make, , , Build software patch, , , Build software tar, , , Extract/create archives gzip, , , Compress/Decompress archives diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py index e78b87bee4f12c..760faf307bbc65 100644 --- a/lib/spack/spack/build_systems/autotools.py +++ b/lib/spack/spack/build_systems/autotools.py @@ -46,6 +46,7 @@ class AutotoolsPackage(spack.package_base.PackageBase): depends_on("gnuconfig", type="build", when="target=ppc64le:") depends_on("gnuconfig", type="build", when="target=aarch64:") depends_on("gnuconfig", type="build", when="target=riscv64:") + depends_on("gmake", type="build") conflicts("platform=windows") def flags_to_build_system_args(self, flags): diff --git a/lib/spack/spack/build_systems/makefile.py b/lib/spack/spack/build_systems/makefile.py index feb6d37f24325b..25eec07095b176 100644 --- a/lib/spack/spack/build_systems/makefile.py +++ b/lib/spack/spack/build_systems/makefile.py @@ -9,7 +9,8 @@ import spack.builder import spack.package_base -from spack.directives import build_system, conflicts +from spack.directives import build_system, conflicts, depends_on +from spack.multimethod import when from ._checks import ( BaseBuilder, @@ -29,7 +30,10 @@ class MakefilePackage(spack.package_base.PackageBase): legacy_buildsystem = "makefile" build_system("makefile") - conflicts("platform=windows", when="build_system=makefile") + + with when("build_system=makefile"): + conflicts("platform=windows") + depends_on("gmake", type="build") @spack.builder.builder("makefile") diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index a5d7a0083d0239..7bea7a1d86a47f 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -13,8 +13,8 @@ import spack.concretize import spack.operating_systems import spack.platforms -import spack.spec import spack.target +from spack.spec import ArchSpec, CompilerSpec, Spec @pytest.fixture(scope="module") @@ -64,7 +64,7 @@ def test_user_input_combination(config, target_str, os_str): the operating system match. """ spec_str = "libelf os={} target={}".format(os_str, target_str) - spec = spack.spec.Spec(spec_str) + spec = Spec(spec_str) assert spec.architecture.os == str(TEST_PLATFORM.operating_system(os_str)) assert spec.architecture.target == TEST_PLATFORM.target(target_str) @@ -114,7 +114,7 @@ def test_target_container_semantic(cpu_flag, target_name): ], ) def test_arch_spec_container_semantic(item, architecture_str): - architecture = spack.spec.ArchSpec(architecture_str) + architecture = ArchSpec(architecture_str) assert item in architecture @@ -141,24 +141,24 @@ def test_optimization_flags(compiler_spec, target_name, expected_flags, config): @pytest.mark.parametrize( "compiler,real_version,target_str,expected_flags", [ - (spack.spec.CompilerSpec("gcc@=9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"), + (CompilerSpec("gcc@=9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"), # Check that custom string versions are accepted ( - spack.spec.CompilerSpec("gcc@=10foo"), + CompilerSpec("gcc@=10foo"), "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client", ), # Check that we run version detection (4.4.0 doesn't support icelake) ( - spack.spec.CompilerSpec("gcc@=4.4.0-special"), + CompilerSpec("gcc@=4.4.0-special"), "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client", ), # Check that the special case for Apple's clang is treated correctly # i.e. it won't try to detect the version again - (spack.spec.CompilerSpec("apple-clang@=9.1.0"), None, "x86_64", "-march=x86-64"), + (CompilerSpec("apple-clang@=9.1.0"), None, "x86_64", "-march=x86-64"), ], ) def test_optimization_flags_with_custom_versions( @@ -180,8 +180,8 @@ def test_optimization_flags_with_custom_versions( ], ) def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constraint_tuple): - architecture = spack.spec.ArchSpec(architecture_tuple) - constraint = spack.spec.ArchSpec(constraint_tuple) + architecture = ArchSpec(architecture_tuple) + constraint = ArchSpec(constraint_tuple) assert not architecture.satisfies(constraint) @@ -204,16 +204,10 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch): # Monkeypatch so that all concretization is done as if the machine is core2 monkeypatch.setattr(spack.platforms.test.Test, "default", "core2") - - spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % ( - root_target_range, - dep_target_range, - ) - spec = spack.spec.Spec(spec_str) + spec = Spec(f"a %gcc@10 foobar=bar target={root_target_range} ^b target={dep_target_range}") with spack.concretize.disable_compiler_existence_check(): spec.concretize() - - assert str(spec).count("arch=test-debian6-%s" % result) == 2 + assert spec.target == spec["b"].target == result @pytest.mark.parametrize( diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index a0de63517af1e8..d02e1caa2dec74 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -1080,14 +1080,17 @@ def test_push_mirror_contents( ci.import_signing_key(_signing_key()) - spack_yaml_contents = """ + with tmpdir.as_cwd(): + with open("spack.yaml", "w") as f: + f.write( + f"""\ spack: definitions: - packages: [patchelf] specs: - $packages mirrors: - test-mirror: {0} + test-mirror: {mirror_url} ci: enable-artifacts-buildcache: True pipeline-gen: @@ -1107,15 +1110,8 @@ def test_push_mirror_contents( - nonbuildtag image: basicimage custom_attribute: custom! -""".format( - mirror_url - ) - - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(spack_yaml_contents) - - with tmpdir.as_cwd(): +""" + ) env_cmd("create", "test", "./spack.yaml") with ev.read("test"): concrete_spec = Spec("patchelf").concretized() @@ -1126,7 +1122,8 @@ def test_push_mirror_contents( install_cmd("--add", "--keep-stage", json_path) - ci.push_mirror_contents(concrete_spec, mirror_url, True) + for s in concrete_spec.traverse(): + ci.push_mirror_contents(s, mirror_url, True) buildcache_path = os.path.join(mirror_dir.strpath, "build_cache") diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index 6b42e591eb9dbe..d28d3f44c387bd 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -719,13 +719,12 @@ def test_check_deps_status_external(install_mockery, monkeypatch): installer = create_installer(const_arg) request = installer.build_requests[0] - # Mock the known dependent, b, as external so assumed to be installed + # Mock the dependencies as external so assumed to be installed monkeypatch.setattr(spack.spec.Spec, "external", True) installer._check_deps_status(request) - # exotic architectures will add dependencies on gnuconfig, which we want to ignore - installed = [x for x in installer.installed if not x.startswith("gnuconfig")] - assert installed[0].startswith("b") + for dep in request.spec.traverse(root=False): + assert inst.package_id(dep.package) in installer.installed def test_check_deps_status_upstream(install_mockery, monkeypatch): @@ -733,13 +732,12 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch): installer = create_installer(const_arg) request = installer.build_requests[0] - # Mock the known dependent, b, as installed upstream + # Mock the known dependencies as installed upstream monkeypatch.setattr(spack.spec.Spec, "installed_upstream", True) installer._check_deps_status(request) - # exotic architectures will add dependencies on gnuconfig, which we want to ignore - installed = [x for x in installer.installed if not x.startswith("gnuconfig")] - assert installed[0].startswith("b") + for dep in request.spec.traverse(root=False): + assert inst.package_id(dep.package) in installer.installed def test_add_bootstrap_compilers(install_mockery, monkeypatch): diff --git a/share/spack/qa/setup-env-test.fish b/share/spack/qa/setup-env-test.fish index 87158840b1f5a4..86563b4b08ce3a 100755 --- a/share/spack/qa/setup-env-test.fish +++ b/share/spack/qa/setup-env-test.fish @@ -285,7 +285,7 @@ spt_succeeds which spack # create a fake mock package install and store its location for later title "Setup" echo "Creating a mock package installation" -spack -m install --fake a +spack -m install --fake shell-a # create a test environment for testing environment commands echo "Creating a mock environment" @@ -300,7 +300,7 @@ function spt_cleanup -p %self title "Cleanup" echo "Removing test packages before exiting." - spack -m uninstall -yf b a + spack -m uninstall -yf shell-b shell-a echo echo "$__spt_success tests succeeded." @@ -322,7 +322,7 @@ spt_contains "usage: spack " spack help --all title 'Testing `spack cd`' spt_contains "usage: spack cd " spack cd -h spt_contains "usage: spack cd " spack cd --help -spt_contains "cd $b_install" spack cd -i b +spt_contains "cd $b_install" spack cd -i shell-b title 'Testing `spack module`' spt_contains "usage: spack module " spack -m module -h @@ -330,34 +330,34 @@ spt_contains "usage: spack module " spack -m module --help spt_contains "usage: spack module " spack -m module title 'Testing `spack load`' -set _b_loc (spack -m location -i b) +set _b_loc (spack -m location -i shell-b) set _b_bin $_b_loc"/bin" -set _a_loc (spack -m location -i a) +set _a_loc (spack -m location -i shell-a) set _a_bin $_a_loc"/bin" -spt_contains "set -gx PATH $_b_bin" spack -m load --only package --fish b -spt_succeeds spack -m load b -set LIST_CONTENT (spack -m load b; spack load --list) -spt_contains "b@" echo $LIST_CONTENT -spt_does_not_contain "a@" echo $LIST_CONTENT +spt_contains "set -gx PATH $_b_bin" spack -m load --only package --fish shell-b +spt_succeeds spack -m load shell-b +set LIST_CONTENT (spack -m load shell-b; spack load --list) +spt_contains "shell-b@" echo $LIST_CONTENT +spt_does_not_contain "shell-a@" echo $LIST_CONTENT # test a variable MacOS clears and one it doesn't for recursive loads -spt_contains "set -gx PATH $_a_bin:$_b_bin" spack -m load --fish a -spt_succeeds spack -m load --only dependencies a -spt_succeeds spack -m load --only package a +spt_contains "set -gx PATH $_a_bin:$_b_bin" spack -m load --fish shell-a +spt_succeeds spack -m load --only dependencies shell-a +spt_succeeds spack -m load --only package shell-a spt_fails spack -m load d spt_contains "usage: spack load " spack -m load -h spt_contains "usage: spack load " spack -m load -h d spt_contains "usage: spack load " spack -m load --help title 'Testing `spack unload`' -spack -m load b a # setup -# spt_contains "module unload $b_module" spack -m unload b -spt_succeeds spack -m unload b +spack -m load shell-b shell-a # setup +# spt_contains "module unload $b_module" spack -m unload shell-b +spt_succeeds spack -m unload shell-b spt_succeeds spack -m unload --all spack -m unload --all # cleanup spt_fails spack -m unload -l -# spt_contains "module unload -l --arg $b_module" spack -m unload -l --arg b -spt_fails spack -m unload d +# spt_contains "module unload -l --arg $b_module" spack -m unload -l --arg shell-b +spt_fails spack -m unload shell-d spt_contains "usage: spack unload " spack -m unload -h spt_contains "usage: spack unload " spack -m unload -h d spt_contains "usage: spack unload " spack -m unload --help diff --git a/share/spack/qa/setup-env-test.sh b/share/spack/qa/setup-env-test.sh index 94589e5bcb7818..58feca69ea0487 100755 --- a/share/spack/qa/setup-env-test.sh +++ b/share/spack/qa/setup-env-test.sh @@ -60,12 +60,12 @@ cd() { # Create a fake mock package install and store its location for later title "Setup" echo "Creating a mock package installation" -spack -m install --fake a -a_install=$(spack location -i a) -a_module=$(spack -m module tcl find a) +spack -m install --fake shell-a +a_install=$(spack location -i shell-a) +a_module=$(spack -m module tcl find shell-a) -b_install=$(spack location -i b) -b_module=$(spack -m module tcl find b) +b_install=$(spack location -i shell-b) +b_module=$(spack -m module tcl find shell-b) # Create a test environment for testing environment commands echo "Creating a mock environment" @@ -80,7 +80,7 @@ cleanup() { title "Cleanup" echo "Removing test packages before exiting." - spack -m uninstall -yf b a + spack -m uninstall -yf shell-b shell-a } # ----------------------------------------------------------------------- @@ -96,7 +96,7 @@ contains "usage: spack " spack help --all title 'Testing `spack cd`' contains "usage: spack cd " spack cd -h contains "usage: spack cd " spack cd --help -contains "cd $b_install" spack cd -i b +contains "cd $b_install" spack cd -i shell-b title 'Testing `spack module`' contains "usage: spack module " spack -m module -h @@ -104,25 +104,25 @@ contains "usage: spack module " spack -m module --help contains "usage: spack module " spack -m module title 'Testing `spack load`' -contains "export PATH=$(spack -m location -i b)/bin" spack -m load --only package --sh b -succeeds spack -m load b -LIST_CONTENT=`spack -m load b; spack load --list` -contains "b@" echo $LIST_CONTENT -does_not_contain "a@" echo $LIST_CONTENT +contains "export PATH=$(spack -m location -i shell-b)/bin" spack -m load --only package --sh shell-b +succeeds spack -m load shell-b +LIST_CONTENT=`spack -m load shell-b; spack load --list` +contains "shell-b@" echo $LIST_CONTENT +does_not_contain "shell-a@" echo $LIST_CONTENT fails spack -m load -l # test a variable MacOS clears and one it doesn't for recursive loads -contains "export PATH=$(spack -m location -i a)/bin" spack -m load --sh a -contains "export PATH=$(spack -m location -i b)/bin" spack -m load --sh b -succeeds spack -m load --only dependencies a -succeeds spack -m load --only package a +contains "export PATH=$(spack -m location -i shell-a)/bin" spack -m load --sh shell-a +contains "export PATH=$(spack -m location -i shell-b)/bin" spack -m load --sh shell-b +succeeds spack -m load --only dependencies shell-a +succeeds spack -m load --only package shell-a fails spack -m load d contains "usage: spack load " spack -m load -h contains "usage: spack load " spack -m load -h d contains "usage: spack load " spack -m load --help title 'Testing `spack unload`' -spack -m load b a # setup -succeeds spack -m unload b +spack -m load shell-b shell-a # setup +succeeds spack -m unload shell-b succeeds spack -m unload --all spack -m unload --all # cleanup fails spack -m unload -l diff --git a/var/spack/repos/builder.test/packages/gmake/package.py b/var/spack/repos/builder.test/packages/gmake/package.py new file mode 100644 index 00000000000000..b3d5c50086e291 --- /dev/null +++ b/var/spack/repos/builder.test/packages/gmake/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Gmake(Package): + """Dummy GMake Package""" + + homepage = "https://www.gnu.org/software/make" + url = "https://ftpmirror.gnu.org/make/make-4.4.tar.gz" + + version("4.4", sha256="ce35865411f0490368a8fc383f29071de6690cbadc27704734978221f25e2bed") + + def do_stage(self): + mkdirp(self.stage.source_path) diff --git a/var/spack/repos/builtin.mock/packages/gmake/package.py b/var/spack/repos/builtin.mock/packages/gmake/package.py index aa5dd8452bf27e..b3d5c50086e291 100644 --- a/var/spack/repos/builtin.mock/packages/gmake/package.py +++ b/var/spack/repos/builtin.mock/packages/gmake/package.py @@ -13,3 +13,6 @@ class Gmake(Package): url = "https://ftpmirror.gnu.org/make/make-4.4.tar.gz" version("4.4", sha256="ce35865411f0490368a8fc383f29071de6690cbadc27704734978221f25e2bed") + + def do_stage(self): + mkdirp(self.stage.source_path) diff --git a/var/spack/repos/builtin.mock/packages/shell-a/package.py b/var/spack/repos/builtin.mock/packages/shell-a/package.py new file mode 100644 index 00000000000000..3ff34102bfd628 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/shell-a/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class ShellA(Package): + """Simple package with one dependency for shell tests""" + + homepage = "http://www.example.com" + url = "http://www.example.com/shell-a-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + version("2.0", md5="abcdef0123456789abcdef0123456789") + + depends_on("shell-b") diff --git a/var/spack/repos/builtin.mock/packages/shell-b/package.py b/var/spack/repos/builtin.mock/packages/shell-b/package.py new file mode 100644 index 00000000000000..3db70f12189bcc --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/shell-b/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ShellB(Package): + """Simple package with no dependencies for shell tests""" + + homepage = "http://www.example.com" + url = "http://www.example.com/shell-b-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + version("0.9", md5="abcd456789abcdef0123456789abcdef") diff --git a/var/spack/repos/builtin/packages/gmake/package.py b/var/spack/repos/builtin/packages/gmake/package.py index 3f795ad637c228..0cfbccb80e6739 100644 --- a/var/spack/repos/builtin/packages/gmake/package.py +++ b/var/spack/repos/builtin/packages/gmake/package.py @@ -9,7 +9,7 @@ from spack.package import * -class Gmake(AutotoolsPackage, GNUMirrorPackage): +class Gmake(Package, GNUMirrorPackage): """GNU Make is a tool which controls the generation of executables and other non-source files of a program from the program's source files.""" @@ -64,17 +64,17 @@ def determine_version(cls, exe): return match.group(1) if match else None def configure_args(self): - args = [] - args.extend(self.with_or_without("guile")) - args.append("--disable-nls") - return args - - def build(self, spec, prefix): - with working_dir(self.build_directory): - Executable(os.path.join(self.stage.source_path, "build.sh"))() + return [ + "--with-guile" if self.spec.satisfies("+guile") else "--without-guile", + "--disable-nls", + ] def install(self, spec, prefix): - with working_dir(self.build_directory): + configure = Executable(join_path(self.stage.source_path, "configure")) + build_sh = Executable(join_path(self.stage.source_path, "build.sh")) + with working_dir(self.build_directory, create=True): + configure(f"--prefix={prefix}", *self.configure_args()) + build_sh() os.mkdir(prefix.bin) install("make", prefix.bin) os.symlink("make", prefix.bin.gmake) From 001190a36430f6d72b182a71ccc2160a96b26658 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 18 Oct 2023 23:16:05 +0200 Subject: [PATCH 233/408] unparse: also support generic type aliases (#40328) --- lib/spack/spack/util/unparse/unparser.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/spack/spack/util/unparse/unparser.py b/lib/spack/spack/util/unparse/unparser.py index 932ab8d0205b03..8ca4cd57a3eaa8 100644 --- a/lib/spack/spack/util/unparse/unparser.py +++ b/lib/spack/spack/util/unparse/unparser.py @@ -1083,6 +1083,10 @@ def visit_MatchOr(self, node): def visit_TypeAlias(self, node): self.fill("type ") self.dispatch(node.name) + if node.type_params: + self.write("[") + interleave(lambda: self.write(", "), self.dispatch, node.type_params) + self.write("]") self.write(" = ") self.dispatch(node.value) From fa91f85f19ce9de946ac71ac6f08610efedfb0b4 Mon Sep 17 00:00:00 2001 From: Annop Wongwathanarat Date: Thu, 19 Oct 2023 01:07:40 +0100 Subject: [PATCH 234/408] acfl: add version 23.10 (#40510) --- .../repos/builtin/packages/acfl/package.py | 36 ++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/acfl/package.py b/var/spack/repos/builtin/packages/acfl/package.py index 2e7790a422a9f4..bbe476a0198770 100644 --- a/var/spack/repos/builtin/packages/acfl/package.py +++ b/var/spack/repos/builtin/packages/acfl/package.py @@ -37,6 +37,40 @@ } _versions = { + "23.10": { + "RHEL-7": ( + "c3bd4df3e5f6c97369237b0067e0a421dceb9c167d73f22f3da87f5025258314", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_RHEL-7_aarch64.tar", + ), + "RHEL-8": ( + "2aea8890a0c0f60bbcc5ddb043d13bd7cd10501218b04cbeb19129449e7d7053", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_RHEL-8_aarch64.tar", + ), + "RHEL-9": ( + "6c5c63c701875da7e87c6362be189bcbfaad678c08b81ec91e1e0252a321fae7", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_RHEL-9_aarch64.tar", + ), + "SLES-15": ( + "e1e62544210bae495cd2503ef280a748fda637c373f1eb76f5ff30c9ec92c4c1", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_SLES-15_aarch64.tar", + ), + "Ubuntu-20.04": ( + "83dce8ea03de3b9b937ecfc611961a8e4d15eba4c267a4e47e22a876e403da96", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_Ubuntu-20.04_aarch64.tar", + ), + "Ubuntu-22.04": ( + "3354f0ab73856a8a5cd99364cbec7a6b22621701790cb36c3e5f756b363e6d43", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_Ubuntu-22.04_aarch64.tar", + ), + "AmazonLinux-2": ( + "ee4fa47246f16323d05d91135ef70a8c355ff60209307754b8532b5744d9cfe9", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_AmazonLinux-2_aarch64.tar", + ), + "AmazonLinux-2023": ( + "640487dfc7ab6eca48b448264013c9aa972b84af9f0c6fc8734fa5e8dc008e43", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_AmazonLinux-2023_aarch64.tar", + ), + }, "23.04.1": { "RHEL-7": ( "5e84daaf0510f73c235723112f9241bbd744ed89eb4f70f089bac05cf2aad2c4", @@ -200,7 +234,7 @@ class Acfl(Package): """ homepage = "https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux" - url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_Ubuntu-22.04_aarch64.tar" + url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_Ubuntu-22.04_aarch64.tar" maintainers("annop-w") From 5b73081504caafb0bcb6ef90cee2eb0344cecb56 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 19 Oct 2023 09:09:45 +0200 Subject: [PATCH 235/408] julia: Fix build for @1.9 (#39045) julia@1.9 tries to download ittapi, which requires cmake. Disable it explicitly. --- var/spack/repos/builtin/packages/julia/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 1730f1960baafc..99e71f0b9a63f3 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -318,6 +318,8 @@ def edit(self, spec, prefix): "JULIA_PRECOMPILE:={0}".format("1" if spec.variants["precompile"].value else "0"), # we want to use `patchelf --add-rpath` instead of `patchelf --set-rpath` "override PATCHELF_SET_RPATH_ARG:=--add-rpath", # @1.9: + # Otherwise, Julia tries to download and build ittapi + "USE_INTEL_JITEVENTS:=0", # @1.9: ] options.append("USEGCC:={}".format("1" if "%gcc" in spec else "0")) From 49ef8b5f3e7b6bb9c686784a1be046b4c9f6e4d8 Mon Sep 17 00:00:00 2001 From: Aiden Grossman Date: Thu, 19 Oct 2023 00:55:06 -0700 Subject: [PATCH 236/408] busybox: respect compiler choice (#39239) --- var/spack/repos/builtin/packages/busybox/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/busybox/package.py b/var/spack/repos/builtin/packages/busybox/package.py index 4b74b34611d5a1..a71e28907fae8b 100644 --- a/var/spack/repos/builtin/packages/busybox/package.py +++ b/var/spack/repos/builtin/packages/busybox/package.py @@ -22,8 +22,8 @@ class Busybox(MakefilePackage): def build(self, spec, prefix): make("defconfig") - make() + make("CC={0}".format(spack_cc)) def install(self, spec, prefix): - make("install") + make("install", "CC={0}".format(spack_cc)) install_tree(".", prefix) From 0160a34c4c41931a5bbddf5ff38224bc8e9b4384 Mon Sep 17 00:00:00 2001 From: Aiden Grossman Date: Thu, 19 Oct 2023 00:55:57 -0700 Subject: [PATCH 237/408] bioawk: respect compiler choice (#39241) --- var/spack/repos/builtin/packages/bioawk/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/bioawk/package.py b/var/spack/repos/builtin/packages/bioawk/package.py index 57d0c629eb40c7..6754a660b1c036 100644 --- a/var/spack/repos/builtin/packages/bioawk/package.py +++ b/var/spack/repos/builtin/packages/bioawk/package.py @@ -22,6 +22,9 @@ class Bioawk(MakefilePackage): parallel = False + def build(self, spec, prefix): + make("CC={0}".format(spack_cc)) + def install(self, spec, prefix): mkdirp(prefix.bin) install("bioawk", prefix.bin) From 7d9e60a1f2fb449d6deb2a8b0cc7e3bf01e33bd4 Mon Sep 17 00:00:00 2001 From: Aiden Grossman Date: Thu, 19 Oct 2023 00:58:58 -0700 Subject: [PATCH 238/408] connect-proxy: respect compiler choice (#39243) --- var/spack/repos/builtin/packages/connect-proxy/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/connect-proxy/package.py b/var/spack/repos/builtin/packages/connect-proxy/package.py index 2ddcffb8b22a9d..e1fbb1391b78e5 100644 --- a/var/spack/repos/builtin/packages/connect-proxy/package.py +++ b/var/spack/repos/builtin/packages/connect-proxy/package.py @@ -17,6 +17,9 @@ class ConnectProxy(MakefilePackage): version("1.105", sha256="07366026b1f81044ecd8da9b5b5b51321327ecdf6ba23576271a311bbd69d403") + def build(self, spec, prefix): + make("CC={0}".format(spack_cc)) + def install(self, spec, prefix): mkdir(prefix.bin) install("connect", prefix.bin) From 78aebcaf7e9d218db2b901fdee5a2b313db63f98 Mon Sep 17 00:00:00 2001 From: Aiden Grossman Date: Thu, 19 Oct 2023 01:20:34 -0700 Subject: [PATCH 239/408] byte-unixbench: respect compiler choice (#39242) Co-authored-by: Harmen Stoppels --- var/spack/repos/builtin/packages/byte-unixbench/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/byte-unixbench/package.py b/var/spack/repos/builtin/packages/byte-unixbench/package.py index cb90ea5611676c..6cd3ec223bd6ec 100644 --- a/var/spack/repos/builtin/packages/byte-unixbench/package.py +++ b/var/spack/repos/builtin/packages/byte-unixbench/package.py @@ -16,6 +16,10 @@ class ByteUnixbench(MakefilePackage): build_directory = "UnixBench" + @property + def build_targets(self): + return [f"CC={spack_cc}"] + def install(self, spec, prefix): with working_dir(self.build_directory): install_tree(".", prefix) From 18eb0496933a992f62f931379f444d3177967f61 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Thu, 19 Oct 2023 05:03:54 -0700 Subject: [PATCH 240/408] Stand-alone test feature deprecation postponed to v0.22 (#40600) --- lib/spack/spack/package_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py index 37a08e074b4300..416b16cefc0621 100644 --- a/lib/spack/spack/package_base.py +++ b/lib/spack/spack/package_base.py @@ -1157,7 +1157,7 @@ def install_test_root(self): """Return the install test root directory.""" tty.warn( "The 'pkg.install_test_root' property is deprecated with removal " - "expected v0.21. Use 'install_test_root(pkg)' instead." + "expected v0.22. Use 'install_test_root(pkg)' instead." ) return install_test_root(self) @@ -1829,7 +1829,7 @@ def cache_extra_test_sources(self, srcs): """ msg = ( "'pkg.cache_extra_test_sources(srcs) is deprecated with removal " - "expected in v0.21. Use 'cache_extra_test_sources(pkg, srcs)' " + "expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' " "instead." ) warnings.warn(msg) From c7551a6de001894e6eb04a1cae48321dffac0bc6 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 19 Oct 2023 16:00:45 +0200 Subject: [PATCH 241/408] ASP-based solver: single Spec instance per dag hash (#39590) Reused specs used to be referenced directly into the built spec. This might cause issues like in issue 39570 where two objects in memory represent the same node, because two reused specs were loaded from different sources but referred to the same spec by DAG hash. The issue is solved by copying concrete specs to a dictionary keyed by dag hash. --- lib/spack/spack/solver/asp.py | 114 ++++++++++++++++++++++------- lib/spack/spack/test/concretize.py | 45 +++++++++++- 2 files changed, 127 insertions(+), 32 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 1ff5ccf31889e0..115de02096ce36 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -13,7 +13,7 @@ import re import types import warnings -from typing import List, NamedTuple, Optional, Sequence, Tuple, Union +from typing import Dict, List, NamedTuple, Optional, Sequence, Tuple, Union import archspec.cpu @@ -971,6 +971,70 @@ def _model_has_cycles(self, models): return cycle_result.unsatisfiable +class ConcreteSpecsByHash(collections.abc.Mapping): + """Mapping containing concrete specs keyed by DAG hash. + + The mapping is ensured to be consistent, i.e. if a spec in the mapping has a dependency with + hash X, it is ensured to be the same object in memory as the spec keyed by X. + """ + + def __init__(self) -> None: + self.data: Dict[str, spack.spec.Spec] = {} + + def __getitem__(self, dag_hash: str) -> spack.spec.Spec: + return self.data[dag_hash] + + def add(self, spec: spack.spec.Spec) -> bool: + """Adds a new concrete spec to the mapping. Returns True if the spec was just added, + False if the spec was already in the mapping. + + Args: + spec: spec to be added + + Raises: + ValueError: if the spec is not concrete + """ + if not spec.concrete: + msg = ( + f"trying to store the non-concrete spec '{spec}' in a container " + f"that only accepts concrete" + ) + raise ValueError(msg) + + dag_hash = spec.dag_hash() + if dag_hash in self.data: + return False + + # Here we need to iterate on the input and rewire the copy. + self.data[spec.dag_hash()] = spec.copy(deps=False) + nodes_to_reconstruct = [spec] + + while nodes_to_reconstruct: + input_parent = nodes_to_reconstruct.pop() + container_parent = self.data[input_parent.dag_hash()] + + for edge in input_parent.edges_to_dependencies(): + input_child = edge.spec + container_child = self.data.get(input_child.dag_hash()) + # Copy children that don't exist yet + if container_child is None: + container_child = input_child.copy(deps=False) + self.data[input_child.dag_hash()] = container_child + nodes_to_reconstruct.append(input_child) + + # Rewire edges + container_parent.add_dependency_edge( + dependency_spec=container_child, depflag=edge.depflag, virtuals=edge.virtuals + ) + return True + + def __len__(self) -> int: + return len(self.data) + + def __iter__(self): + return iter(self.data) + + class SpackSolverSetup: """Class to set up and run a Spack concretization solve.""" @@ -994,9 +1058,7 @@ def __init__(self, tests=False): # (ID, CompilerSpec) -> dictionary of attributes self.compiler_info = collections.defaultdict(dict) - # hashes we've already added facts for - self.seen_hashes = set() - self.reusable_and_possible = {} + self.reusable_and_possible = ConcreteSpecsByHash() # id for dummy variables self._condition_id_counter = itertools.count() @@ -2318,25 +2380,29 @@ def define_variant_values(self): for pkg, variant, value in self.variant_values_from_specs: self.gen.fact(fn.pkg_fact(pkg, fn.variant_possible_value(variant, value))) - def _facts_from_concrete_spec(self, spec, possible): + def register_concrete_spec(self, spec, possible): # tell the solver about any installed packages that could # be dependencies (don't tell it about the others) - h = spec.dag_hash() - if spec.name in possible and h not in self.seen_hashes: - self.reusable_and_possible[h] = spec - try: - # Only consider installed packages for repo we know - spack.repo.PATH.get(spec) - except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError): - return + if spec.name not in possible: + return + + try: + # Only consider installed packages for repo we know + spack.repo.PATH.get(spec) + except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError) as e: + tty.debug(f"[REUSE] Issues when trying to reuse {spec.short_spec}: {str(e)}") + return + + self.reusable_and_possible.add(spec) + def concrete_specs(self): + """Emit facts for reusable specs""" + for h, spec in self.reusable_and_possible.items(): # this indicates that there is a spec like this installed self.gen.fact(fn.installed_hash(spec.name, h)) - # this describes what constraints it imposes on the solve self.impose(h, spec, body=True) self.gen.newline() - # Declare as possible parts of specs that are not in package.py # - Add versions to possible versions # - Add OS to possible OS's @@ -2347,15 +2413,12 @@ def _facts_from_concrete_spec(self, spec, possible): ) self.possible_oses.add(dep.os) - # add the hash to the one seen so far - self.seen_hashes.add(h) - def define_concrete_input_specs(self, specs, possible): # any concrete specs in the input spec list for input_spec in specs: for spec in input_spec.traverse(): if spec.concrete: - self._facts_from_concrete_spec(spec, possible) + self.register_concrete_spec(spec, possible) def setup( self, @@ -2422,14 +2485,13 @@ def setup( # get possible compilers self.possible_compilers = self.generate_possible_compilers(specs) - self.gen.h1("Concrete input spec definitions") + self.gen.h1("Reusable concrete specs") self.define_concrete_input_specs(specs, self.pkgs) - if reuse: - self.gen.h1("Reusable specs") self.gen.fact(fn.optimize_for_reuse()) for reusable_spec in reuse: - self._facts_from_concrete_spec(reusable_spec, self.pkgs) + self.register_concrete_spec(reusable_spec, self.pkgs) + self.concrete_specs() self.gen.h1("Generic statements on possible packages") node_counter.possible_packages_facts(self.gen, fn) @@ -2620,7 +2682,6 @@ def __init__(self, specs, hash_lookup=None): self._specs = {} self._result = None self._command_line_specs = specs - self._hash_specs = [] self._flag_sources = collections.defaultdict(lambda: set()) self._flag_compiler_defaults = set() @@ -2631,7 +2692,6 @@ def __init__(self, specs, hash_lookup=None): def hash(self, node, h): if node not in self._specs: self._specs[node] = self._hash_lookup[h] - self._hash_specs.append(node) def node(self, node): if node not in self._specs: @@ -2869,12 +2929,10 @@ def build_specs(self, function_tuples): # fix flags after all specs are constructed self.reorder_flags() - # cycle detection - roots = [spec.root for spec in self._specs.values() if not spec.root.installed] - # inject patches -- note that we' can't use set() to unique the # roots here, because the specs aren't complete, and the hash # function will loop forever. + roots = [spec.root for spec in self._specs.values() if not spec.root.installed] roots = dict((id(r), r) for r in roots) for root in roots.values(): spack.spec.Spec.inject_patches_variant(root) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index a794c8f1fd6b4c..d54ee6ff755aa4 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1224,7 +1224,7 @@ def test_external_package_versions(self, spec_str, is_external, expected): ) @pytest.mark.parametrize("mock_db", [True, False]) def test_reuse_does_not_overwrite_dev_specs( - self, dev_first, spec, mock_db, tmpdir, monkeypatch + self, dev_first, spec, mock_db, tmpdir, temporary_store, monkeypatch ): """Test that reuse does not mix dev specs with non-dev specs. @@ -1236,8 +1236,7 @@ def test_reuse_does_not_overwrite_dev_specs( # dev and non-dev specs that are otherwise identical spec = Spec(spec) dev_spec = spec.copy() - dev_constraint = "dev_path=%s" % tmpdir.strpath - dev_spec["dev-build-test-install"].constrain(dev_constraint) + dev_spec["dev-build-test-install"].constrain(f"dev_path={tmpdir.strpath}") # run the test in both orders first_spec = dev_spec if dev_first else spec @@ -1250,7 +1249,7 @@ def mock_fn(*args, **kwargs): return [first_spec] if mock_db: - monkeypatch.setattr(spack.store.STORE.db, "query", mock_fn) + temporary_store.db.add(first_spec, None) else: monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", mock_fn) @@ -2166,6 +2165,24 @@ def test_dont_define_new_version_from_input_if_checksum_required(self, working_e # when checksums are required Spec("a@=3.0").concretized() + @pytest.mark.regression("39570") + @pytest.mark.db + def test_reuse_python_from_cli_and_extension_from_db(self, mutable_database): + """Tests that reusing python with and explicit request on the command line, when the spec + also reuses a python extension from the DB, doesn't fail. + """ + s = Spec("py-extension1").concretized() + python_hash = s["python"].dag_hash() + s.package.do_install(fake=True, explicit=True) + + with spack.config.override("concretizer:reuse", True): + with_reuse = Spec(f"py-extension2 ^/{python_hash}").concretized() + + with spack.config.override("concretizer:reuse", False): + without_reuse = Spec("py-extension2").concretized() + + assert with_reuse.dag_hash() == without_reuse.dag_hash() + @pytest.fixture() def duplicates_test_repository(): @@ -2300,3 +2317,23 @@ def test_pure_build_virtual_dependency(self, strategy): def test_drop_moving_targets(v_str, v_opts, checksummed): v = Version(v_str) assert spack.solver.asp._is_checksummed_version((v, v_opts)) == checksummed + + +class TestConcreteSpecsByHash: + """Tests the container of concrete specs""" + + @pytest.mark.parametrize("input_specs", [["a"], ["a foobar=bar", "b"], ["a foobar=baz", "b"]]) + def test_adding_specs(self, input_specs, default_mock_concretization): + """Tests that concrete specs in the container are equivalent, but stored as different + objects in memory. + """ + container = spack.solver.asp.ConcreteSpecsByHash() + input_specs = [Spec(s).concretized() for s in input_specs] + for s in input_specs: + container.add(s) + + for root in input_specs: + for node in root.traverse(root=True): + assert node == container[node.dag_hash()] + assert node.dag_hash() in container + assert node is not container[node.dag_hash()] From 4dff8b9355b10a847bebbe2c515a2c6ea5f07786 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 19 Oct 2023 16:11:42 +0200 Subject: [PATCH 242/408] ci: remove incorrect compilers.yaml (#40610) --- .../configs/darwin/aarch64/compilers.yaml | 27 ------------------- 1 file changed, 27 deletions(-) delete mode 100644 share/spack/gitlab/cloud_pipelines/configs/darwin/aarch64/compilers.yaml diff --git a/share/spack/gitlab/cloud_pipelines/configs/darwin/aarch64/compilers.yaml b/share/spack/gitlab/cloud_pipelines/configs/darwin/aarch64/compilers.yaml deleted file mode 100644 index d5a0130341e246..00000000000000 --- a/share/spack/gitlab/cloud_pipelines/configs/darwin/aarch64/compilers.yaml +++ /dev/null @@ -1,27 +0,0 @@ -compilers: -- compiler: - spec: apple-clang@14.0.0 - paths: - cc: /usr/bin/clang - cxx: /usr/bin/clang++ - f77: /opt/homebrew/bin/gfortran - fc: /opt/homebrew/bin/gfortran - flags: {} - operating_system: ventura - target: aarch64 - modules: [] - environment: {} - extra_rpaths: [] -- compiler: - spec: gcc@12.2.0 - paths: - cc: /opt/homebrew/bin/gcc-12 - cxx: /opt/homebrew/bin/g++-12 - f77: /opt/homebrew/bin/gfortran-12 - fc: /opt/homebrew/bin/gfortran-12 - flags: {} - operating_system: ventura - target: aarch64 - modules: [] - environment: {} - extra_rpaths: [] From b14e1b77411a136e0b627ee2a297c243436d1e31 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Thu, 19 Oct 2023 09:31:02 -0500 Subject: [PATCH 243/408] petsc: add variant +sycl (#40562) * petsc: add variant +sycl * petsc: add in gmake as dependency - so that consistent make gets used between petsc and slepc builds [that can have different env for each of the builds] --- .../repos/builtin/packages/petsc/package.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 9d0d3a9016aa59..2f258edc17a183 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -87,6 +87,7 @@ class Petsc(Package, CudaPackage, ROCmPackage): variant("double", default=True, description="Switches between single and double precision") variant("complex", default=False, description="Build with complex numbers") variant("debug", default=False, description="Compile in debug mode") + variant("sycl", default=False, description="Enable sycl build") variant("metis", default=True, description="Activates support for metis and parmetis") variant( @@ -207,6 +208,8 @@ def check_fortran_compiler(self): patch("revert-3.18.0-ver-format-for-dealii.patch", when="@3.18.0") depends_on("diffutils", type="build") + # not listed as a "build" dependency - so that slepc build gets the same dependency + depends_on("gmake") # Virtual dependencies # Git repository needs sowing to build Fortran interface @@ -338,6 +341,9 @@ def check_fortran_compiler(self): when="+kokkos +rocm amdgpu_target=%s" % rocm_arch, ) + conflicts("~kokkos", when="+sycl", msg="+sycl requires +kokkos") + depends_on("kokkos+sycl", when="+sycl +kokkos") + phases = ["configure", "build", "install"] # Using the following tarballs @@ -434,6 +440,16 @@ def configure_options(self): else: options.append("--with-x=0") + if "+sycl" in spec: + sycl_compatible_compilers = ["icpx"] + if not (os.path.basename(self.compiler.cxx) in sycl_compatible_compilers): + raise InstallError("PETSc's SYCL GPU Backend requires oneAPI CXX (icpx) compiler.") + options.append("--with-sycl=1") + options.append("--with-syclc=" + self.compiler.cxx) + options.append("SYCLPPFLAGS=-Wno-tautological-constant-compare") + else: + options.append("--with-sycl=0") + if "trilinos" in spec: if spec.satisfies("^trilinos+boost"): options.append("--with-boost=1") From 50d8ba85ee51df93af5b9f5676e05dba67a9a329 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Thu, 19 Oct 2023 10:04:59 -0600 Subject: [PATCH 244/408] gitlab ci: Rework how mirrors are configured (#39939) Improve how mirrors are used in gitlab ci, where we have until now thought of them as only a string. By configuring ci mirrors ahead of time using the proposed mirror templates, and by taking advantage of the expressiveness that spack now has for mirrors, this PR will allow us to easily switch the protocol/url we use for fetching binary dependencies. This change also deprecates some gitlab functionality and marks it for removal in Spack 0.23: - arguments to "spack ci generate": * --buildcache-destination * --copy-to - gitlab configuration options: * enable-artifacts-buildcache * temporary-storage-url-prefix --- lib/spack/docs/pipelines.rst | 10 ++ lib/spack/spack/ci.py | 126 ++++++++++++------ lib/spack/spack/cmd/ci.py | 74 +++++++--- lib/spack/spack/schema/ci.py | 2 + lib/spack/spack/test/ci.py | 6 +- lib/spack/spack/test/cmd/ci.py | 47 +++++++ .../gitlab/cloud_pipelines/.gitlab-ci.yml | 41 +++--- .../gitlab/cloud_pipelines/configs/ci.yaml | 20 +-- .../copy-only-protected-mirrors.yaml.in | 11 ++ .../configs/multi-src-mirrors.yaml.in | 16 +++ .../configs/single-src-pr-mirrors.yaml.in | 6 + .../single-src-protected-mirrors.yaml.in | 6 + .../stacks/aws-isc-aarch64/spack.yaml | 3 - .../cloud_pipelines/stacks/aws-isc/spack.yaml | 3 - .../stacks/aws-pcluster-icelake/spack.yaml | 2 - .../aws-pcluster-neoverse_n1/spack.yaml | 3 - .../aws-pcluster-neoverse_v1/spack.yaml | 3 - .../stacks/aws-pcluster-skylake/spack.yaml | 2 - .../stacks/build_systems/spack.yaml | 2 - .../stacks/data-vis-sdk/spack.yaml | 2 - .../stacks/e4s-cray-rhel/spack.yaml | 2 - .../stacks/e4s-cray-sles/spack.yaml | 2 - .../stacks/e4s-neoverse_v1/spack.yaml | 2 - .../stacks/e4s-oneapi/spack.yaml | 2 - .../stacks/e4s-power/spack.yaml | 2 - .../stacks/e4s-rocm-external/spack.yaml | 2 - .../cloud_pipelines/stacks/e4s/spack.yaml | 2 - .../stacks/gpu-tests/spack.yaml | 2 - .../stacks/ml-darwin-aarch64-mps/spack.yaml | 2 - .../stacks/ml-linux-x86_64-cpu/spack.yaml | 3 - .../stacks/ml-linux-x86_64-cuda/spack.yaml | 3 - .../stacks/ml-linux-x86_64-rocm/spack.yaml | 3 - .../stacks/radiuss-aws-aarch64/spack.yaml | 2 - .../stacks/radiuss-aws/spack.yaml | 4 +- .../cloud_pipelines/stacks/radiuss/spack.yaml | 3 - .../stacks/tutorial/spack.yaml | 2 - 36 files changed, 273 insertions(+), 150 deletions(-) create mode 100644 share/spack/gitlab/cloud_pipelines/configs/copy-only-protected-mirrors.yaml.in create mode 100644 share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in create mode 100644 share/spack/gitlab/cloud_pipelines/configs/single-src-pr-mirrors.yaml.in create mode 100644 share/spack/gitlab/cloud_pipelines/configs/single-src-protected-mirrors.yaml.in diff --git a/lib/spack/docs/pipelines.rst b/lib/spack/docs/pipelines.rst index d594879aab51dd..4ebe90fb0b6de4 100644 --- a/lib/spack/docs/pipelines.rst +++ b/lib/spack/docs/pipelines.rst @@ -213,6 +213,16 @@ pipeline jobs. ``spack ci generate`` ^^^^^^^^^^^^^^^^^^^^^ +Throughout this documentation, references to the "mirror" mean the target +mirror which is checked for the presence of up-to-date specs, and where +any scheduled jobs should push built binary packages. In the past, this +defaulted to the mirror at index 0 in the mirror configs, and could be +overridden using the ``--buildcache-destination`` argument. Starting with +Spack 0.23, ``spack ci generate`` will require you to identify this mirror +by the name "buildcache-destination". While you can configure any number +of mirrors as sources for your pipelines, you will need to identify the +destination mirror by name. + Concretizes the specs in the active environment, stages them (as described in :ref:`staging_algorithm`), and writes the resulting ``.gitlab-ci.yml`` to disk. During concretization of the environment, ``spack ci generate`` also writes a diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index bf5aaa79a3fba6..fca28362540623 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -49,6 +49,7 @@ TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror" SPACK_RESERVED_TAGS = ["public", "protected", "notary"] +# TODO: Remove this in Spack 0.23 SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror" JOB_NAME_FORMAT = ( "{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}" @@ -678,7 +679,7 @@ def generate_gitlab_ci_yaml( remote_mirror_override (str): Typically only needed when one spack.yaml is used to populate several mirrors with binaries, based on some criteria. Spack protected pipelines populate different mirrors based - on branch name, facilitated by this option. + on branch name, facilitated by this option. DEPRECATED """ with spack.concretize.disable_compiler_existence_check(): with env.write_transaction(): @@ -775,17 +776,39 @@ def generate_gitlab_ci_yaml( "instead.", ) - if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1: - tty.die("spack ci generate requires an env containing a mirror") + pipeline_mirrors = spack.mirror.MirrorCollection(binary=True) + deprecated_mirror_config = False + buildcache_destination = None + if "buildcache-destination" in pipeline_mirrors: + if remote_mirror_override: + tty.die( + "Using the deprecated --buildcache-destination cli option and " + "having a mirror named 'buildcache-destination' at the same time " + "is not allowed" + ) + buildcache_destination = pipeline_mirrors["buildcache-destination"] + else: + deprecated_mirror_config = True + # TODO: This will be an error in Spack 0.23 - ci_mirrors = yaml_root["mirrors"] - mirror_urls = [url for url in ci_mirrors.values()] - remote_mirror_url = mirror_urls[0] + # TODO: Remove this block in spack 0.23 + remote_mirror_url = None + if deprecated_mirror_config: + if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1: + tty.die("spack ci generate requires an env containing a mirror") + + ci_mirrors = yaml_root["mirrors"] + mirror_urls = [url for url in ci_mirrors.values()] + remote_mirror_url = mirror_urls[0] spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None) if spack_buildcache_copy: buildcache_copies = {} - buildcache_copy_src_prefix = remote_mirror_override or remote_mirror_url + buildcache_copy_src_prefix = ( + buildcache_destination.fetch_url + if buildcache_destination + else remote_mirror_override or remote_mirror_url + ) buildcache_copy_dest_prefix = spack_buildcache_copy # Check for a list of "known broken" specs that we should not bother @@ -797,6 +820,7 @@ def generate_gitlab_ci_yaml( enable_artifacts_buildcache = False if "enable-artifacts-buildcache" in ci_config: + tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23") enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"] rebuild_index_enabled = True @@ -805,13 +829,15 @@ def generate_gitlab_ci_yaml( temp_storage_url_prefix = None if "temporary-storage-url-prefix" in ci_config: + tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23") temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"] # If a remote mirror override (alternate buildcache destination) was # specified, add it here in case it has already built hashes we might # generate. + # TODO: Remove this block in Spack 0.23 mirrors_to_check = None - if remote_mirror_override: + if deprecated_mirror_config and remote_mirror_override: if spack_pipeline_type == "spack_protected_branch": # Overriding the main mirror in this case might result # in skipping jobs on a release pipeline because specs are @@ -831,8 +857,9 @@ def generate_gitlab_ci_yaml( cfg.default_modify_scope(), ) + # TODO: Remove this block in Spack 0.23 shared_pr_mirror = None - if spack_pipeline_type == "spack_pull_request": + if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request": stack_name = os.environ.get("SPACK_CI_STACK_NAME", "") shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name) spack.mirror.add( @@ -884,6 +911,7 @@ def generate_gitlab_ci_yaml( job_log_dir = os.path.join(pipeline_artifacts_dir, "logs") job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction") job_test_dir = os.path.join(pipeline_artifacts_dir, "tests") + # TODO: Remove this line in Spack 0.23 local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror") user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data") @@ -898,11 +926,11 @@ def generate_gitlab_ci_yaml( rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir) rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir) rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir) + # TODO: Remove this line in Spack 0.23 rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir) rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir) # Speed up staging by first fetching binary indices from all mirrors - # (including the override mirror we may have just added above). try: bindist.binary_index.update() except bindist.FetchCacheError as e: @@ -1113,6 +1141,7 @@ def main_script_replacements(cmd): }, ) + # TODO: Remove this block in Spack 0.23 if enable_artifacts_buildcache: bc_root = os.path.join(local_mirror_dir, "build_cache") job_object["artifacts"]["paths"].extend( @@ -1142,10 +1171,12 @@ def main_script_replacements(cmd): _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions) # Clean up remote mirror override if enabled - if remote_mirror_override: - spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope()) - if spack_pipeline_type == "spack_pull_request": - spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope()) + # TODO: Remove this block in Spack 0.23 + if deprecated_mirror_config: + if remote_mirror_override: + spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope()) + if spack_pipeline_type == "spack_pull_request": + spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope()) tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id)) @@ -1176,10 +1207,28 @@ def main_script_replacements(cmd): sync_job["needs"] = [ {"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)} ] + + if "variables" not in sync_job: + sync_job["variables"] = {} + + sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = ( + buildcache_destination.fetch_url + if buildcache_destination + else remote_mirror_override or remote_mirror_url + ) + + if "buildcache-source" in pipeline_mirrors: + buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url + else: + # TODO: Remove this condition in Spack 0.23 + buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None) + sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source + output_object["copy"] = sync_job job_id += 1 if job_id > 0: + # TODO: Remove this block in Spack 0.23 if temp_storage_url_prefix: # There were some rebuild jobs scheduled, so we will need to # schedule a job to clean up the temporary storage location @@ -1213,6 +1262,13 @@ def main_script_replacements(cmd): signing_job["when"] = "always" signing_job["retry"] = {"max": 2, "when": ["always"]} signing_job["interruptible"] = True + if "variables" not in signing_job: + signing_job["variables"] = {} + signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = ( + buildcache_destination.push_url # need the s3 url for aws s3 sync + if buildcache_destination + else remote_mirror_override or remote_mirror_url + ) output_object["sign-pkgs"] = signing_job @@ -1221,13 +1277,13 @@ def main_script_replacements(cmd): stage_names.append("stage-rebuild-index") final_job = spack_ci_ir["jobs"]["reindex"]["attributes"] - index_target_mirror = mirror_urls[0] - if remote_mirror_override: - index_target_mirror = remote_mirror_override final_job["stage"] = "stage-rebuild-index" + target_mirror = remote_mirror_override or remote_mirror_url + if buildcache_destination: + target_mirror = buildcache_destination.push_url final_job["script"] = _unpack_script( final_job["script"], - op=lambda cmd: cmd.replace("{index_target_mirror}", index_target_mirror), + op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror), ) final_job["when"] = "always" @@ -1249,20 +1305,24 @@ def main_script_replacements(cmd): "SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir, "SPACK_VERSION": spack_version, "SPACK_CHECKOUT_VERSION": version_to_clone, + # TODO: Remove this line in Spack 0.23 "SPACK_REMOTE_MIRROR_URL": remote_mirror_url, "SPACK_JOB_LOG_DIR": rel_job_log_dir, "SPACK_JOB_REPRO_DIR": rel_job_repro_dir, "SPACK_JOB_TEST_DIR": rel_job_test_dir, + # TODO: Remove this line in Spack 0.23 "SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir, "SPACK_PIPELINE_TYPE": str(spack_pipeline_type), "SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"), + # TODO: Remove this line in Spack 0.23 "SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None", "SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag), "SPACK_REBUILD_EVERYTHING": str(rebuild_everything), "SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"), } - if remote_mirror_override: + # TODO: Remove this block in Spack 0.23 + if deprecated_mirror_config and remote_mirror_override: (output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None) @@ -2002,43 +2062,23 @@ def process_command(name, commands, repro_dir, run=True, exit_on_failure=True): def create_buildcache( - input_spec: spack.spec.Spec, - *, - pipeline_mirror_url: Optional[str] = None, - buildcache_mirror_url: Optional[str] = None, - sign_binaries: bool = False, + input_spec: spack.spec.Spec, *, destination_mirror_urls: List[str], sign_binaries: bool = False ) -> List[PushResult]: """Create the buildcache at the provided mirror(s). Arguments: input_spec: Installed spec to package and push - buildcache_mirror_url: URL for the buildcache mirror - pipeline_mirror_url: URL for the pipeline mirror + destination_mirror_urls: List of urls to push to sign_binaries: Whether or not to sign buildcache entry Returns: A list of PushResults, indicating success or failure. """ results = [] - # Create buildcache in either the main remote mirror, or in the - # per-PR mirror, if this is a PR pipeline - if buildcache_mirror_url: - results.append( - PushResult( - success=push_mirror_contents(input_spec, buildcache_mirror_url, sign_binaries), - url=buildcache_mirror_url, - ) - ) - - # Create another copy of that buildcache in the per-pipeline - # temporary storage mirror (this is only done if either - # artifacts buildcache is enabled or a temporary storage url - # prefix is set) - if pipeline_mirror_url: + for mirror_url in destination_mirror_urls: results.append( PushResult( - success=push_mirror_contents(input_spec, pipeline_mirror_url, sign_binaries), - url=pipeline_mirror_url, + success=push_mirror_contents(input_spec, mirror_url, sign_binaries), url=mirror_url ) ) diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py index cf2ee11c044b40..6c573193026fbf 100644 --- a/lib/spack/spack/cmd/ci.py +++ b/lib/spack/spack/cmd/ci.py @@ -191,6 +191,14 @@ def ci_generate(args): """ env = spack.cmd.require_active_env(cmd_name="ci generate") + if args.copy_to: + tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23") + + if args.buildcache_destination: + tty.warn( + "The flag --buildcache-destination is deprecated and will be removed in Spack 0.23" + ) + output_file = args.output_file copy_yaml_to = args.copy_to run_optimizer = args.optimize @@ -264,12 +272,6 @@ def ci_rebuild(args): if not ci_config: tty.die("spack ci rebuild requires an env containing ci cfg") - tty.msg( - "SPACK_BUILDCACHE_DESTINATION={0}".format( - os.environ.get("SPACK_BUILDCACHE_DESTINATION", None) - ) - ) - # Grab the environment variables we need. These either come from the # pipeline generation step ("spack ci generate"), where they were written # out as variables, or else provided by GitLab itself. @@ -277,6 +279,7 @@ def ci_rebuild(args): job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR") job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR") repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR") + # TODO: Remove this in Spack 0.23 local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR") concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR") ci_pipeline_id = os.environ.get("CI_PIPELINE_ID") @@ -285,9 +288,12 @@ def ci_rebuild(args): job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME") job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH") spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE") + # TODO: Remove this in Spack 0.23 remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE") + # TODO: Remove this in Spack 0.23 remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL") spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME") + # TODO: Remove this in Spack 0.23 shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL") rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING") require_signing = os.environ.get("SPACK_REQUIRE_SIGNING") @@ -344,21 +350,36 @@ def ci_rebuild(args): full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False + pipeline_mirrors = spack.mirror.MirrorCollection(binary=True) + deprecated_mirror_config = False + buildcache_destination = None + if "buildcache-destination" in pipeline_mirrors: + buildcache_destination = pipeline_mirrors["buildcache-destination"] + else: + deprecated_mirror_config = True + # TODO: This will be an error in Spack 0.23 + # If no override url exists, then just push binary package to the # normal remote mirror url. + # TODO: Remove in Spack 0.23 buildcache_mirror_url = remote_mirror_override or remote_mirror_url + if buildcache_destination: + buildcache_mirror_url = buildcache_destination.push_url # Figure out what is our temporary storage mirror: Is it artifacts # buildcache? Or temporary-storage-url-prefix? In some cases we need to # force something or pipelines might not have a way to propagate build # artifacts from upstream to downstream jobs. + # TODO: Remove this in Spack 0.23 pipeline_mirror_url = None + # TODO: Remove this in Spack 0.23 temp_storage_url_prefix = None if "temporary-storage-url-prefix" in ci_config: temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"] pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id) + # TODO: Remove this in Spack 0.23 enable_artifacts_mirror = False if "enable-artifacts-buildcache" in ci_config: enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"] @@ -454,12 +475,14 @@ def ci_rebuild(args): # If we decided there should be a temporary storage mechanism, add that # mirror now so it's used when we check for a hash match already # built for this spec. + # TODO: Remove this block in Spack 0.23 if pipeline_mirror_url: mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME) spack.mirror.add(mirror, cfg.default_modify_scope()) pipeline_mirrors.append(pipeline_mirror_url) # Check configured mirrors for a built spec with a matching hash + # TODO: Remove this block in Spack 0.23 mirrors_to_check = None if remote_mirror_override: if spack_pipeline_type == "spack_protected_branch": @@ -477,7 +500,8 @@ def ci_rebuild(args): ) pipeline_mirrors.append(remote_mirror_override) - if spack_pipeline_type == "spack_pull_request": + # TODO: Remove this in Spack 0.23 + if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request": if shared_pr_mirror_url != "None": pipeline_mirrors.append(shared_pr_mirror_url) @@ -499,6 +523,7 @@ def ci_rebuild(args): tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name)) for match in matches: tty.msg(" {0}".format(match["mirror_url"])) + # TODO: Remove this block in Spack 0.23 if enable_artifacts_mirror: matching_mirror = matches[0]["mirror_url"] build_cache_dir = os.path.join(local_mirror_dir, "build_cache") @@ -513,7 +538,8 @@ def ci_rebuild(args): # only want to keep the mirror being used by the current pipeline as it's binary # package destination. This ensures that the when we rebuild everything, we only # consume binary dependencies built in this pipeline. - if full_rebuild: + # TODO: Remove this in Spack 0.23 + if deprecated_mirror_config and full_rebuild: spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope()) # No hash match anywhere means we need to rebuild spec @@ -678,21 +704,25 @@ def ci_rebuild(args): # print out some instructions on how to reproduce this build failure # outside of the pipeline environment. if install_exit_code == 0: - if buildcache_mirror_url or pipeline_mirror_url: - for result in spack_ci.create_buildcache( - input_spec=job_spec, - buildcache_mirror_url=buildcache_mirror_url, - pipeline_mirror_url=pipeline_mirror_url, - sign_binaries=spack_ci.can_sign_binaries(), - ): - msg = tty.msg if result.success else tty.warn - msg( - "{} {} to {}".format( - "Pushed" if result.success else "Failed to push", - job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()), - result.url, - ) + mirror_urls = [buildcache_mirror_url] + + # TODO: Remove this block in Spack 0.23 + if pipeline_mirror_url: + mirror_urls.append(pipeline_mirror_url) + + for result in spack_ci.create_buildcache( + input_spec=job_spec, + destination_mirror_urls=mirror_urls, + sign_binaries=spack_ci.can_sign_binaries(), + ): + msg = tty.msg if result.success else tty.warn + msg( + "{} {} to {}".format( + "Pushed" if result.success else "Failed to push", + job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()), + result.url, ) + ) # If this is a develop pipeline, check if the spec that we just built is # on the broken-specs list. If so, remove it. diff --git a/lib/spack/spack/schema/ci.py b/lib/spack/spack/schema/ci.py index 92edf2f13968bc..9ba65b26820830 100644 --- a/lib/spack/spack/schema/ci.py +++ b/lib/spack/spack/schema/ci.py @@ -141,6 +141,7 @@ } ) +# TODO: Remove in Spack 0.23 ci_properties = { "anyOf": [ { @@ -166,6 +167,7 @@ properties = { "ci": { "oneOf": [ + # TODO: Replace with core-shared-properties in Spack 0.23 ci_properties, # Allow legacy format under `ci` for `config update ci` spack.schema.gitlab_ci.gitlab_ci_properties, diff --git a/lib/spack/spack/test/ci.py b/lib/spack/spack/test/ci.py index 1b9833894e20af..53ed8382520745 100644 --- a/lib/spack/spack/test/ci.py +++ b/lib/spack/spack/test/ci.py @@ -451,9 +451,7 @@ def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkey monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c: True) results = ci.create_buildcache( - None, - buildcache_mirror_url="file:///fake-url-one", - pipeline_mirror_url="file:///fake-url-two", + None, destination_mirror_urls=["file:///fake-url-one", "file:///fake-url-two"] ) assert len(results) == 2 @@ -463,7 +461,7 @@ def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkey assert result2.success assert result2.url == "file:///fake-url-two" - results = ci.create_buildcache(None, buildcache_mirror_url="file:///fake-url-one") + results = ci.create_buildcache(None, destination_mirror_urls=["file:///fake-url-one"]) assert len(results) == 1 assert results[0].success diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index d02e1caa2dec74..84e9e66bf05576 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -2209,3 +2209,50 @@ def test_gitlab_config_scopes( assert all([t in rebuild_tags for t in ["spack", "service"]]) expected_vars = ["CI_JOB_SIZE", "KUBERNETES_CPU_REQUEST", "KUBERNETES_MEMORY_REQUEST"] assert all([v in rebuild_vars for v in expected_vars]) + + +def test_ci_generate_mirror_config( + tmpdir, + mutable_mock_env_path, + install_mockery, + mock_packages, + monkeypatch, + ci_base_environment, + mock_binary_index, +): + """Make sure the correct mirror gets used as the buildcache destination""" + filename = str(tmpdir.join("spack.yaml")) + with open(filename, "w") as f: + f.write( + """\ +spack: + specs: + - archive-files + mirrors: + some-mirror: file:///this/is/a/source/mirror + buildcache-destination: file:///push/binaries/here + ci: + pipeline-gen: + - submapping: + - match: + - archive-files + build-job: + tags: + - donotcare + image: donotcare +""" + ) + + with tmpdir.as_cwd(): + env_cmd("create", "test", "./spack.yaml") + outputfile = str(tmpdir.join(".gitlab-ci.yml")) + + with ev.read("test"): + ci_cmd("generate", "--output-file", outputfile) + with open(outputfile) as of: + pipeline_doc = syaml.load(of.read()) + assert "rebuild-index" in pipeline_doc + reindex_job = pipeline_doc["rebuild-index"] + assert "script" in reindex_job + reindex_step = reindex_job["script"][0] + assert "file:///push/binaries/here" in reindex_step diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index b5e57e3e8429a6..905901ff295e5d 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -3,6 +3,12 @@ stages: [ "generate", "build", "publish" ] variables: SPACK_DISABLE_LOCAL_CONFIG: "1" SPACK_USER_CACHE_PATH: "${CI_PROJECT_DIR}/tmp/_user_cache/" + # PR_MIRROR_FETCH_DOMAIN: "https://binaries-prs.spack.io" + PR_MIRROR_FETCH_DOMAIN: "s3://spack-binaries-prs" + PR_MIRROR_PUSH_DOMAIN: "s3://spack-binaries-prs" + # PROTECTED_MIRROR_FETCH_DOMAIN: "https://binaries.spack.io" + PROTECTED_MIRROR_FETCH_DOMAIN: "s3://spack-binaries" + PROTECTED_MIRROR_PUSH_DOMAIN: "s3://spack-binaries" default: image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] } @@ -68,7 +74,9 @@ default: ######################################## .base-job: variables: - SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" + PIPELINE_MIRROR_TEMPLATE: "single-src-protected-mirrors.yaml.in" + # TODO: We can remove this when we drop the "deprecated" stack + PUSH_BUILDCACHE_DEPRECATED: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" rules: - if: $CI_COMMIT_REF_NAME == "develop" @@ -76,7 +84,7 @@ default: when: always variables: SPACK_PIPELINE_TYPE: "spack_protected_branch" - SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}" + SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}" SPACK_REQUIRE_SIGNING: "True" AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY} @@ -86,7 +94,7 @@ default: when: always variables: SPACK_PIPELINE_TYPE: "spack_protected_branch" - SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}" + SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}" SPACK_PRUNE_UNTOUCHED: "False" SPACK_PRUNE_UP_TO_DATE: "False" SPACK_REQUIRE_SIGNING: "True" @@ -98,8 +106,8 @@ default: when: always variables: SPACK_PIPELINE_TYPE: "spack_copy_only" - SPACK_SOURCE_MIRROR: "s3://spack-binaries/SPACK_REPLACE_VERSION/${SPACK_CI_STACK_NAME}" - SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}" + SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}" + PIPELINE_MIRROR_TEMPLATE: "copy-only-protected-mirrors.yaml.in" AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY} OIDC_TOKEN_AUDIENCE: "protected_binary_mirror" @@ -108,9 +116,16 @@ default: when: always variables: SPACK_PIPELINE_TYPE: "spack_pull_request" - SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-prs/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" + # TODO: We can remove this when we drop the "deprecated" stack + PUSH_BUILDCACHE_DEPRECATED: "${PR_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" SPACK_PRUNE_UNTOUCHED: "True" SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH: "1" + # TODO: Change sync script to include target in branch name. Then we could + # TODO: have multiple types of "PR" pipeline here. It would be better if we could + # TODO: keep just this one and use a regex to capture the target branch, but so + # TODO: far gitlab doesn't support that. + PR_TARGET_REF_NAME: "develop" + PIPELINE_MIRROR_TEMPLATE: "multi-src-mirrors.yaml.in" AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY} OIDC_TOKEN_AUDIENCE: "pr_binary_mirror" @@ -126,13 +141,15 @@ default: - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - spack env activate --without-view . - export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs" + - spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))" + < "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" + - spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" - spack --config-scope "${SPACK_CI_CONFIG_ROOT}" --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}" --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}" ${CI_STACK_CONFIG_SCOPES} ci generate --check-index-only - --buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}" --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml" after_script: @@ -182,7 +199,7 @@ default: - spack env activate --without-view . - spack ci generate --check-index-only - --buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}" + --buildcache-destination "${PUSH_BUILDCACHE_DEPRECATED}" --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml" after_script: @@ -219,8 +236,7 @@ protected-publish: max: 2 when: ["runner_system_failure", "stuck_or_timeout_failure"] variables: - SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" - SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}" + SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}" SPACK_PIPELINE_TYPE: "spack_protected_branch" AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY} @@ -253,11 +269,6 @@ protected-publish: # you should inlclude your custom definitions at the end of the of the # extends list. # -# Also note that if extending .base-job, the mirror url given in your -# spack.yaml should take the form: -# -# s3://spack-binaries/develop/${SPACK_CI_STACK_NAME} -# ######################################## # My Super Cool Pipeline ######################################## diff --git a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml index 5f7e904ba58bb3..9aad850b5df065 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml +++ b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml @@ -21,7 +21,8 @@ ci: - k=$CI_GPG_KEY_ROOT/intermediate_ci_signing_key.gpg; [[ -r $k ]] && spack gpg trust $k - k=$CI_GPG_KEY_ROOT/spack_public_key.gpg; [[ -r $k ]] && spack gpg trust $k script:: - - - spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - - spack config blame mirrors + - spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) - - spack python ${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts/common/aggregate_package_logs.spack.py --prefix /home/software/spack:${CI_PROJECT_DIR} --log install_times.json @@ -40,10 +41,10 @@ ci: image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] } tags: ["aws"] script: - - - aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp + - - aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_BUILDCACHE_DESTINATION}/build_cache /tmp - /sign.sh - - aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache - - aws s3 cp /tmp/public_keys ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/_pgp --recursive --exclude "*" --include "*.pub" + - aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_BUILDCACHE_DESTINATION}/build_cache + - aws s3 cp /tmp/public_keys ${SPACK_BUILDCACHE_DESTINATION}/build_cache/_pgp --recursive --exclude "*" --include "*.pub" id_tokens: GITLAB_OIDC_TOKEN: aud: "${OIDC_TOKEN_AUDIENCE}" @@ -54,14 +55,14 @@ ci: before_script: - - if [[ $CI_COMMIT_TAG == "v"* ]]; then export SPACK_REPLACE_VERSION=$(echo "$CI_COMMIT_TAG" | sed 's/\(v[[:digit:]]\+\.[[:digit:]]\+\).*/releases\/\1/'); fi - if [[ $CI_COMMIT_TAG == "develop-"* ]]; then export SPACK_REPLACE_VERSION=develop; fi - - export SPACK_BUILDCACHE_SOURCE=${SPACK_SOURCE_MIRROR//SPACK_REPLACE_VERSION/${SPACK_REPLACE_VERSION}} + - export SPACK_COPY_ONLY_SOURCE=${SPACK_BUILDCACHE_SOURCE//SPACK_REPLACE_VERSION/${SPACK_REPLACE_VERSION}} script: - - spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR} - - echo Copying environment specs from ${SRC_MIRROR} to ${SPACK_BUILDCACHE_DESTINATION} - - spack buildcache sync "${SPACK_BUILDCACHE_SOURCE}" "${SPACK_BUILDCACHE_DESTINATION}" + - echo Copying environment specs from ${SPACK_COPY_ONLY_SOURCE} to ${SPACK_COPY_ONLY_DESTINATION} + - spack buildcache sync "${SPACK_COPY_ONLY_SOURCE}" "${SPACK_COPY_ONLY_DESTINATION}" - curl -fLsS https://spack.github.io/keys/spack-public-binary-key.pub -o /tmp/spack-public-binary-key.pub - - aws s3 cp /tmp/spack-public-binary-key.pub "${SPACK_BUILDCACHE_DESTINATION}/build_cache/_pgp/spack-public-binary-key.pub" - - spack buildcache update-index --keys "${SPACK_BUILDCACHE_DESTINATION}" + - aws s3 cp /tmp/spack-public-binary-key.pub "${SPACK_COPY_ONLY_DESTINATION}/build_cache/_pgp/spack-public-binary-key.pub" + - spack buildcache update-index --keys "${SPACK_COPY_ONLY_DESTINATION}" when: "always" retry: max: 2 @@ -89,6 +90,7 @@ ci: GITLAB_OIDC_TOKEN: aud: "${OIDC_TOKEN_AUDIENCE}" + # TODO: Remove this block in Spack 0.23 - cleanup-job: tags: ["service"] variables: diff --git a/share/spack/gitlab/cloud_pipelines/configs/copy-only-protected-mirrors.yaml.in b/share/spack/gitlab/cloud_pipelines/configs/copy-only-protected-mirrors.yaml.in new file mode 100644 index 00000000000000..39e5c733b236d2 --- /dev/null +++ b/share/spack/gitlab/cloud_pipelines/configs/copy-only-protected-mirrors.yaml.in @@ -0,0 +1,11 @@ +mirrors: + buildcache-source: + fetch: ${PROTECTED_MIRROR_FETCH_DOMAIN}/SPACK_REPLACE_VERSION/${SPACK_CI_STACK_NAME} + push: ${PROTECTED_MIRROR_PUSH_DOMAIN}/SPACK_REPLACE_VERSION/${SPACK_CI_STACK_NAME} + source: False + binary: True + buildcache-destination: + fetch: ${PROTECTED_MIRROR_FETCH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + push: ${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + source: False + binary: True diff --git a/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in b/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in new file mode 100644 index 00000000000000..0ad46d5fc9014f --- /dev/null +++ b/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in @@ -0,0 +1,16 @@ +mirrors: + buildcache-source: + fetch: ${PROTECTED_MIRROR_FETCH_DOMAIN}/${PR_TARGET_REF_NAME}/${SPACK_CI_STACK_NAME} + push: ${PROTECTED_MIRROR_PUSH_DOMAIN}/${PR_TARGET_REF_NAME}/${SPACK_CI_STACK_NAME} + source: False + binary: True + buildcache-destination: + fetch: ${PR_MIRROR_FETCH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + push: ${PR_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + source: False + binary: True + buildcache-shared: + fetch: ${PR_MIRROR_FETCH_DOMAIN}/shared_pr_mirror/${SPACK_CI_STACK_NAME} + push: ${PR_MIRROR_PUSH_DOMAIN}/shared_pr_mirror/${SPACK_CI_STACK_NAME} + source: False + binary: True diff --git a/share/spack/gitlab/cloud_pipelines/configs/single-src-pr-mirrors.yaml.in b/share/spack/gitlab/cloud_pipelines/configs/single-src-pr-mirrors.yaml.in new file mode 100644 index 00000000000000..0a2775a4a27def --- /dev/null +++ b/share/spack/gitlab/cloud_pipelines/configs/single-src-pr-mirrors.yaml.in @@ -0,0 +1,6 @@ +mirrors: + buildcache-destination: + fetch: ${PR_MIRROR_FETCH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + push: ${PR_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + source: False + binary: True diff --git a/share/spack/gitlab/cloud_pipelines/configs/single-src-protected-mirrors.yaml.in b/share/spack/gitlab/cloud_pipelines/configs/single-src-protected-mirrors.yaml.in new file mode 100644 index 00000000000000..a55cd7273750ee --- /dev/null +++ b/share/spack/gitlab/cloud_pipelines/configs/single-src-protected-mirrors.yaml.in @@ -0,0 +1,6 @@ +mirrors: + buildcache-destination: + fetch: ${PROTECTED_MIRROR_FETCH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + push: ${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + source: False + binary: True diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml index 1c4e2de308eea2..abd8f4d0242df2 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml @@ -131,9 +131,6 @@ spack: - - $compiler - - $target - - mirrors: { "mirror": "s3://spack-binaries/develop/aws-isc-aarch64" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml index 0a898d1a752b37..038761ac1873fa 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml @@ -142,9 +142,6 @@ spack: - - $compiler - - $target - - mirrors: { "mirror": "s3://spack-binaries/develop/aws-isc" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-icelake/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-icelake/spack.yaml index 5ce6d1c8692e2c..85cf7660686d90 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-icelake/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-icelake/spack.yaml @@ -30,8 +30,6 @@ spack: - $optimized_configs # - $optimized_libs - mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-icelake" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_n1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_n1/spack.yaml index 5708338a2b3efe..50ba40992a7bc9 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_n1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_n1/spack.yaml @@ -30,9 +30,6 @@ spack: - $optimized_configs - $optimized_libs - - mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-neoverse_n1" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml index f2df7696106aa9..50ba40992a7bc9 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml @@ -30,9 +30,6 @@ spack: - $optimized_configs - $optimized_libs - - mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-neoverse_v1" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-skylake/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-skylake/spack.yaml index 029dd67351c1f6..85cf7660686d90 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-skylake/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-skylake/spack.yaml @@ -30,8 +30,6 @@ spack: - $optimized_configs # - $optimized_libs - mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-skylake" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml index 78a3ea785c827d..d154894830c155 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml @@ -21,7 +21,5 @@ spack: - - $default_specs - - $arch - mirrors: { "mirror": "s3://spack-binaries/develop/build_systems" } - cdash: build-group: Build Systems diff --git a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml index 9963f4b777c4e9..bf298d606db0ea 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml @@ -58,8 +58,6 @@ spack: - ["~paraview +visit"] - [$^visit_specs] - mirrors: {mirror: s3://spack-binaries/develop/data-vis-sdk} - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml index 83e3d9f2905128..413fdf34eb28f9 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml @@ -172,7 +172,5 @@ spack: # - variorum # variorum: /opt/cray/pe/cce/15.0.1/binutils/x86_64/x86_64-pc-linux-gnu/bin/ld: /opt/cray/pe/lib64/libpals.so.0: undefined reference to `json_array_append_new@@libjansson.so.4' # - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu # openblas: ftn-2307 ftn: ERROR in command line: The "-m" option must be followed by 0, 1, 2, 3 or 4.; make[2]: *** [: spotrf2.o] Error 1; make[1]: *** [Makefile:27: lapacklib] Error 2; make: *** [Makefile:250: netlib] Error 2 - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-cray-rhel" } - cdash: build-group: E4S Cray diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml index dace63659ec54f..c141cd9bf9233a 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml @@ -171,7 +171,5 @@ spack: # - variorum # - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-cray-sles" } - cdash: build-group: E4S Cray SLES diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml index b4e8114df67f51..db903c15c47a6f 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml @@ -340,8 +340,6 @@ spack: # - tasmanian +cuda cuda_arch=90 # tasmanian: conflicts with cuda@12 # - upcxx +cuda cuda_arch=90 # upcxx: needs NVIDIA driver - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-arm-neoverse_v1" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index ec86c35b33d055..d170b0a272772c 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -236,8 +236,6 @@ spack: - py-scipy - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-oneapi" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index 72e06b060d2f1c..5f8f3d0e628dae 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -251,8 +251,6 @@ spack: # - trilinos +cuda cuda_arch=70 # trilinos: https://github.com/trilinos/Trilinos/issues/11630 # - upcxx +cuda cuda_arch=70 # upcxx: needs NVIDIA driver - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-power" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml index cd9addbef0548c..885dbb538b0476 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml @@ -335,8 +335,6 @@ spack: # - lbann ~cuda +rocm amdgpu_target=gfx90a # aluminum: https://github.com/spack/spack/issues/38807 # - papi +rocm amdgpu_target=gfx90a # papi: https://github.com/spack/spack/issues/27898 - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-rocm-external" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 65ab32e80d0579..410a379ee06321 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -382,8 +382,6 @@ spack: # - lbann ~cuda +rocm amdgpu_target=gfx90a # aluminum: https://github.com/spack/spack/issues/38807 # - papi +rocm amdgpu_target=gfx90a # papi: https://github.com/spack/spack/issues/27898 - mirrors: { "mirror": "s3://spack-binaries/develop/e4s" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/gpu-tests/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/gpu-tests/spack.yaml index 69a88597457e6f..263d8e29b30578 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/gpu-tests/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/gpu-tests/spack.yaml @@ -49,8 +49,6 @@ spack: # FAILURES # - kokkos +wrapper +cuda cuda_arch=80 ^cuda@12.0.0 # https://github.com/spack/spack/issues/35378 - mirrors: { "mirror": "s3://spack-binaries/develop/gpu-tests" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml index c137b138ee3ccb..6d8a0b7491f9a3 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml @@ -82,8 +82,6 @@ spack: # - r-xgboost - xgboost - mirrors: { "mirror": "s3://spack-binaries/develop/ml-darwin-aarch64-mps" } - ci: pipeline-gen: - build-job-remove: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml index fa7bf02755be5a..71670d5a91568d 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml @@ -76,9 +76,6 @@ spack: # - r-xgboost - xgboost - mirrors: - mirror: s3://spack-binaries/develop/ml-linux-x86_64-cpu - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml index 5a24d42f23242b..88291690382784 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml @@ -79,9 +79,6 @@ spack: # - r-xgboost - xgboost - mirrors: - mirror: s3://spack-binaries/develop/ml-linux-x86_64-cuda - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml index e49d43db3d4654..620a95715b41e5 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml @@ -82,9 +82,6 @@ spack: # - r-xgboost - xgboost - mirrors: - mirror: s3://spack-binaries/develop/ml-linux-x86_64-rocm - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml index b05b45f76378cf..6453d2a5fe6722 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml @@ -38,8 +38,6 @@ spack: - - $compiler - - $target - mirrors: { "mirror": "s3://spack-binaries/develop/radiuss-aws-aarch64" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml index fd297ede91049a..ca7de563c44fe0 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml @@ -28,7 +28,7 @@ spack: - mfem +cuda ^hypre+cuda - raja - raja +cuda - - umpire + - umpire - umpire +cuda - compiler: @@ -44,8 +44,6 @@ spack: - - $compiler - - $target - mirrors: { "mirror": "s3://spack-binaries/develop/radiuss-aws" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml index c80bcf10eed975..ca8e1a990519db 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml @@ -40,9 +40,6 @@ spack: - xbraid - zfp - mirrors: - mirror: "s3://spack-binaries/develop/radiuss" - specs: - matrix: - [$radiuss] diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 4b39be884612f8..0bc36ce8e44447 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -49,8 +49,6 @@ spack: - $clang_packages - $gcc_spack_built_packages - mirrors: - mirror: s3://spack-binaries/develop/tutorial ci: pipeline-gen: - build-job: From c7212099fc9a1b6971469351a513d7863de76145 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 19 Oct 2023 19:15:18 +0200 Subject: [PATCH 245/408] libvorbis: drop -force_cpusubtype_ALL flag (#40616) This flag was only relevant when targeting powerpc from apple-clang, which we don't do. The flag is removed from apple-clang@15. Let's drop it unconditionally. --- var/spack/repos/builtin/packages/libvorbis/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/libvorbis/package.py b/var/spack/repos/builtin/packages/libvorbis/package.py index 9f4fc58fc99757..cfd0420ca36bd8 100644 --- a/var/spack/repos/builtin/packages/libvorbis/package.py +++ b/var/spack/repos/builtin/packages/libvorbis/package.py @@ -22,5 +22,8 @@ class Libvorbis(AutotoolsPackage): depends_on("pkgconfig", type="build") + def patch(self): + filter_file(r"-force_cpusubtype_ALL", "", "configure", string=True) + # `make check` crashes when run in parallel parallel = False From 039ea29f3606a84ce98c39164b0e40bbc9b10d6a Mon Sep 17 00:00:00 2001 From: Vanessasaurus <814322+vsoch@users.noreply.github.com> Date: Thu, 19 Oct 2023 11:16:42 -0600 Subject: [PATCH 246/408] Automated deployment to update package flux-core 2023-10-19 (#40605) Co-authored-by: github-actions --- var/spack/repos/builtin/packages/flux-core/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py index 09d5b2e4999da3..bb150b154dc9b8 100644 --- a/var/spack/repos/builtin/packages/flux-core/package.py +++ b/var/spack/repos/builtin/packages/flux-core/package.py @@ -20,6 +20,7 @@ class FluxCore(AutotoolsPackage): maintainers("grondo") version("master", branch="master") + version("0.55.0", sha256="2925b8a084e9d1069a96de7689b515ad6f2051ecfb9fbbe4d2643507de7ccd30") version("0.54.0", sha256="721fc3fff64b3b167ae55d0e29379ff3211729248ef97e3b9855816219063b42") version("0.53.0", sha256="2f14d032a2d54f34e066c8a15c79917089e9f7f8558baa03dbfe63dbf56918b7") version("0.52.0", sha256="dca434238405e4cae4686c8143f2cc79919bfd9e26b09c980e1e5f69ffd0c448") From a1ccbcd16bd0d6fe1a08bf8002431e7952c55d54 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 19 Oct 2023 20:33:01 +0200 Subject: [PATCH 247/408] spack checksum: restore ability to select top n (#40531) The ability to select the top N versions got removed in the checksum overhaul, cause initially numbers were used for commands. Now that we settled on characters for commands, let's make numbers pick the top N again. --- lib/spack/spack/stage.py | 23 +++++++++++++++++------ lib/spack/spack/test/cmd/checksum.py | 23 +++++++++++++++++++++++ 2 files changed, 40 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index d53ec5fee8995a..1c7ebdec5c50df 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -921,7 +921,7 @@ def interactive_version_filter( print_header = True - print("commands:") + tty.info(colorize("Enter @*{number} of versions to take, or use a @*{command}:")) commands = ( "@*b{[c]}hecksum", "@*b{[e]}dit", @@ -931,10 +931,10 @@ def interactive_version_filter( "@*b{[r]}estart", "@*b{[q]}uit", ) - colify(list(map(colorize, commands)), indent=2) + colify(list(map(colorize, commands)), indent=4) try: - command = input(colorize("@*g{command>} ")).strip().lower() + command = input(colorize("@*g{action>} ")).strip().lower() except EOFError: print() command = "q" @@ -1039,9 +1039,20 @@ def interactive_version_filter( print() return None else: - tty.warn(f"Ignoring invalid command: {command}") - print_header = False - continue + # Last restort: filter the top N versions + try: + n = int(command) + invalid_command = n < 1 + except ValueError: + invalid_command = True + + if invalid_command: + tty.warn(f"Ignoring invalid command: {command}") + print_header = False + continue + + sorted_and_filtered = sorted_and_filtered[:n] + return {v: url_dict[v] for v in sorted_and_filtered} diff --git a/lib/spack/spack/test/cmd/checksum.py b/lib/spack/spack/test/cmd/checksum.py index 8001334e3e2142..b2fc9d5f6ce11c 100644 --- a/lib/spack/spack/test/cmd/checksum.py +++ b/lib/spack/spack/test/cmd/checksum.py @@ -202,6 +202,29 @@ def test_checksum_interactive_new_only(): } +def test_checksum_interactive_top_n(): + """Test integers select top n versions""" + input = input_from_commands("2", "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + } + + +def test_checksum_interactive_unrecognized_command(): + """Unrecognized commands should be ignored""" + input = input_from_commands("-1", "0", "hello", "c") + v = {Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz"} + assert interactive_version_filter(v.copy(), input=input) == v + + def test_checksum_versions(mock_packages, mock_clone_repo, mock_fetch, mock_stage): pkg_cls = spack.repo.PATH.get_pkg_class("zlib") versions = [str(v) for v in pkg_cls.versions] From f7946c15aa5708c492f0f599072af2a6d5d56b9d Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 19 Oct 2023 20:44:05 +0200 Subject: [PATCH 248/408] Improve setup build / run / test environment (#35737) This adds a `SetupContext` class which is responsible for setting package.py module globals, and computing the changes to environment variables for the build, test or run context. The class uses `effective_deptypes` which takes a list of specs (e.g. single item of a spec to build, or a list of environment roots) and a context (build, run, test), and outputs a flat list of specs that affect the environment together with a flag in what way they do so. This list is topologically ordered from root to leaf, so that one can be assured that dependents override variables set by dependencies, not the other way around. This is used to replace the logic in `modifications_from_dependencies`, which has several issues: missing calls to `setup_run_environment`, and the order in which operations are applied. Further, it should improve performance a bit in certain cases, since `effective_deptypes` run in O(v + e) time, whereas `spack env activate` currently can take up to O(v^2 + e) time due to loops over roots. Each edge in the DAG is visited once by calling `effective_deptypes` with `env.concrete_roots()`. By marking and propagating flags through the DAG, this commit also fixes a bug where Spack wouldn't call `setup_run_environment` for runtime dependencies of link dependencies. And this PR ensures that Spack correctly sets up the runtime environment of direct build dependencies. Regarding test dependencies: in a build context they are are build-time test deps, whereas in a test context they are install-time test deps. Since there are no means to distinguish the build/install type test deps, they're both. Further changes: - all `package.py` module globals are guaranteed to be set before any of the `setup_(dependent)_(run|build)_env` functions is called - traversal order during setup: first the group of externals, then the group of non-externals, with specs in each group traversed topological (dependencies are setup before dependents) - modules: only ever call `setup_dependent_run_environment` of *direct* link/run type deps - the marker in `set_module_variables_for_package` is dropped, since we should call the method once per spec. This allows us to set only a cheap subset of globals on the module: for example it's not necessary to compute the expensive `cmake_args` and w/e if the spec under consideration is not the root node to be built. - `spack load`'s `--only` is deprecated (it has no effect now), and `spack load x` now means: do everything that's required for `x` to work at runtime, which requires runtime deps to be setup -- just like `spack env activate`. - `spack load` no longer loads build deps (of build deps) ... - `spack env activate` on partially installed or broken environments: this is all or nothing now. If some spec errors during setup of its runtime env, you'll only get the unconditional variables + a warning that says the runtime changes for specs couldn't be applied. - Remove traversal in upward direction from `setup_dependent_*` in packages. Upward traversal may iterate to specs that aren't children of the roots (e.g. zlib / python have hundreds of dependents, only a small fraction is reachable from the roots. Packages should only modify the direct dependent they receive as an argument) --- lib/spack/spack/bootstrap/core.py | 13 +- lib/spack/spack/build_environment.py | 498 ++++++++++-------- lib/spack/spack/cmd/build_env.py | 3 +- lib/spack/spack/cmd/common/env_utility.py | 20 +- lib/spack/spack/cmd/load.py | 16 +- lib/spack/spack/cmd/test_env.py | 3 +- lib/spack/spack/cmd/unload.py | 3 +- lib/spack/spack/context.py | 29 + lib/spack/spack/environment/environment.py | 58 +- lib/spack/spack/modules/common.py | 15 +- lib/spack/spack/test/build_environment.py | 73 ++- lib/spack/spack/test/cmd/env.py | 15 +- lib/spack/spack/test/cmd/load.py | 86 ++- lib/spack/spack/user_environment.py | 75 ++- share/spack/qa/setup-env-test.fish | 7 +- share/spack/qa/setup-env-test.sh | 5 +- .../repos/builtin/packages/gptune/package.py | 1 + .../repos/builtin/packages/llvm/package.py | 8 - .../repos/builtin/packages/perl/package.py | 11 +- .../repos/builtin/packages/python/package.py | 11 +- .../repos/builtin/packages/ruby/package.py | 5 +- .../repos/builtin/packages/tcl/package.py | 26 +- 22 files changed, 565 insertions(+), 416 deletions(-) create mode 100644 lib/spack/spack/context.py diff --git a/lib/spack/spack/bootstrap/core.py b/lib/spack/spack/bootstrap/core.py index 4b7807e47bba23..d7b39b02e0cc38 100644 --- a/lib/spack/spack/bootstrap/core.py +++ b/lib/spack/spack/bootstrap/core.py @@ -446,16 +446,11 @@ def ensure_executables_in_path_or_raise( current_bootstrapper.last_search["spec"], current_bootstrapper.last_search["command"], ) - env_mods = spack.util.environment.EnvironmentModifications() - for dep in concrete_spec.traverse( - root=True, order="post", deptype=("link", "run") - ): - env_mods.extend( - spack.user_environment.environment_modifications_for_spec( - dep, set_package_py_globals=False - ) + cmd.add_default_envmod( + spack.user_environment.environment_modifications_for_specs( + concrete_spec, set_package_py_globals=False ) - cmd.add_default_envmod(env_mods) + ) return cmd assert exception_handler, ( diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 881fcb5c9cf93d..96c8cb8a4ad71a 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -40,12 +40,15 @@ import sys import traceback import types +from collections import defaultdict +from enum import Flag, auto +from itertools import chain from typing import List, Tuple import llnl.util.tty as tty from llnl.string import plural from llnl.util.filesystem import join_path -from llnl.util.lang import dedupe +from llnl.util.lang import dedupe, stable_partition from llnl.util.symlink import symlink from llnl.util.tty.color import cescape, colorize from llnl.util.tty.log import MultiProcessFd @@ -55,17 +58,21 @@ import spack.build_systems.python import spack.builder import spack.config +import spack.deptypes as dt import spack.main import spack.package_base import spack.paths import spack.platforms import spack.repo import spack.schema.environment +import spack.spec import spack.store import spack.subprocess_context import spack.user_environment import spack.util.path import spack.util.pattern +from spack import traverse +from spack.context import Context from spack.error import NoHeadersError, NoLibrariesError from spack.install_test import spack_install_test_log from spack.installer import InstallError @@ -76,7 +83,6 @@ env_flag, filter_system_paths, get_path, - inspect_path, is_system_path, validate, ) @@ -109,7 +115,6 @@ SPACK_CCACHE_BINARY = "SPACK_CCACHE_BINARY" SPACK_SYSTEM_DIRS = "SPACK_SYSTEM_DIRS" - # Platform-specific library suffix. if sys.platform == "darwin": dso_suffix = "dylib" @@ -406,19 +411,13 @@ def set_compiler_environment_variables(pkg, env): def set_wrapper_variables(pkg, env): - """Set environment variables used by the Spack compiler wrapper - (which have the prefix `SPACK_`) and also add the compiler wrappers - to PATH. - - This determines the injected -L/-I/-rpath options; each - of these specifies a search order and this function computes these - options in a manner that is intended to match the DAG traversal order - in `modifications_from_dependencies`: that method uses a post-order - traversal so that `PrependPath` actions from dependencies take lower - precedence; we use a post-order traversal here to match the visitation - order of `modifications_from_dependencies` (so we are visiting the - lowest priority packages first). - """ + """Set environment variables used by the Spack compiler wrapper (which have the prefix + `SPACK_`) and also add the compiler wrappers to PATH. + + This determines the injected -L/-I/-rpath options; each of these specifies a search order and + this function computes these options in a manner that is intended to match the DAG traversal + order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext + is using topo order.""" # Set environment variables if specified for # the given compiler compiler = pkg.compiler @@ -537,45 +536,42 @@ def update_compiler_args_for_dep(dep): env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs)) -def set_module_variables_for_package(pkg): +def set_package_py_globals(pkg, context: Context = Context.BUILD): """Populate the Python module of a package with some useful global names. This makes things easier for package writers. """ - # Put a marker on this module so that it won't execute the body of this - # function again, since it is not needed - marker = "_set_run_already_called" - if getattr(pkg.module, marker, False): - return - module = ModuleChangePropagator(pkg) - jobs = determine_number_of_jobs(parallel=pkg.parallel) - m = module - m.make_jobs = jobs - - # TODO: make these build deps that can be installed if not found. - m.make = MakeExecutable("make", jobs) - m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False) - # TODO: johnwparent: add package or builder support to define these build tools - # for now there is no entrypoint for builders to define these on their - # own - if sys.platform == "win32": - m.nmake = Executable("nmake") - m.msbuild = Executable("msbuild") - # analog to configure for win32 - m.cscript = Executable("cscript") - - # Find the configure script in the archive path - # Don't use which for this; we want to find it in the current dir. - m.configure = Executable("./configure") - - # Standard CMake arguments - m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg) - m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg) - m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg) - - # Put spack compiler paths in module scope. + + if context == Context.BUILD: + jobs = determine_number_of_jobs(parallel=pkg.parallel) + m.make_jobs = jobs + + # TODO: make these build deps that can be installed if not found. + m.make = MakeExecutable("make", jobs) + m.gmake = MakeExecutable("gmake", jobs) + m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False) + # TODO: johnwparent: add package or builder support to define these build tools + # for now there is no entrypoint for builders to define these on their + # own + if sys.platform == "win32": + m.nmake = Executable("nmake") + m.msbuild = Executable("msbuild") + # analog to configure for win32 + m.cscript = Executable("cscript") + + # Find the configure script in the archive path + # Don't use which for this; we want to find it in the current dir. + m.configure = Executable("./configure") + + # Standard CMake arguments + m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg) + m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg) + m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg) + + # Put spack compiler paths in module scope. (Some packages use it + # in setup_run_environment etc, so don't put it context == build) link_dir = spack.paths.build_env_path m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"]) m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"]) @@ -599,9 +595,6 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs): m.static_to_shared_library = static_to_shared_library - # Put a marker on this module so that it won't execute the body of this - # function again, since it is not needed - setattr(m, marker, True) module.propagate_changes_to_mro() @@ -727,12 +720,15 @@ def load_external_modules(pkg): load_module(external_module) -def setup_package(pkg, dirty, context="build"): +def setup_package(pkg, dirty, context: Context = Context.BUILD): """Execute all environment setup routines.""" - if context not in ["build", "test"]: - raise ValueError("'context' must be one of ['build', 'test'] - got: {0}".format(context)) + if context not in (Context.BUILD, Context.TEST): + raise ValueError(f"'context' must be Context.BUILD or Context.TEST - got {context}") - set_module_variables_for_package(pkg) + # First populate the package.py's module with the relevant globals that could be used in any + # of the setup_* functions. + setup_context = SetupContext(pkg.spec, context=context) + setup_context.set_all_package_py_globals() # Keep track of env changes from packages separately, since we want to # issue warnings when packages make "suspicious" modifications. @@ -740,13 +736,15 @@ def setup_package(pkg, dirty, context="build"): env_mods = EnvironmentModifications() # setup compilers for build contexts - need_compiler = context == "build" or (context == "test" and pkg.test_requires_compiler) + need_compiler = context == Context.BUILD or ( + context == Context.TEST and pkg.test_requires_compiler + ) if need_compiler: set_compiler_environment_variables(pkg, env_mods) set_wrapper_variables(pkg, env_mods) tty.debug("setup_package: grabbing modifications from dependencies") - env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False)) + env_mods.extend(setup_context.get_env_modifications()) tty.debug("setup_package: collected all modifications from dependencies") # architecture specific setup @@ -754,7 +752,7 @@ def setup_package(pkg, dirty, context="build"): target = platform.target(pkg.spec.architecture.target) platform.setup_platform_environment(pkg, env_mods) - if context == "build": + if context == Context.BUILD: tty.debug("setup_package: setup build environment for root") builder = spack.builder.create(pkg) builder.setup_build_environment(env_mods) @@ -765,16 +763,7 @@ def setup_package(pkg, dirty, context="build"): "config to assume that the package is part of the system" " includes and omit it when invoked with '--cflags'." ) - elif context == "test": - tty.debug("setup_package: setup test environment for root") - env_mods.extend( - inspect_path( - pkg.spec.prefix, - spack.user_environment.prefix_inspections(pkg.spec.platform), - exclude=is_system_path, - ) - ) - pkg.setup_run_environment(env_mods) + elif context == Context.TEST: env_mods.prepend_path("PATH", ".") # First apply the clean environment changes @@ -813,158 +802,245 @@ def setup_package(pkg, dirty, context="build"): return env_base -def _make_runnable(pkg, env): - # Helper method which prepends a Package's bin/ prefix to the PATH - # environment variable - prefix = pkg.prefix - - for dirname in ["bin", "bin64"]: - bin_dir = os.path.join(prefix, dirname) - if os.path.isdir(bin_dir): - env.prepend_path("PATH", bin_dir) +class EnvironmentVisitor: + def __init__(self, *roots: spack.spec.Spec, context: Context): + # For the roots (well, marked specs) we follow different edges + # than for their deps, depending on the context. + self.root_hashes = set(s.dag_hash() for s in roots) + + if context == Context.BUILD: + # Drop direct run deps in build context + # We don't really distinguish between install and build time test deps, + # so we include them here as build-time test deps. + self.root_depflag = dt.BUILD | dt.TEST | dt.LINK + elif context == Context.TEST: + # This is more of an extended run environment + self.root_depflag = dt.TEST | dt.RUN | dt.LINK + elif context == Context.RUN: + self.root_depflag = dt.RUN | dt.LINK + + def neighbors(self, item): + spec = item.edge.spec + if spec.dag_hash() in self.root_hashes: + depflag = self.root_depflag + else: + depflag = dt.LINK | dt.RUN + return traverse.sort_edges(spec.edges_to_dependencies(depflag=depflag)) -def modifications_from_dependencies( - spec, context, custom_mods_only=True, set_package_py_globals=True -): - """Returns the environment modifications that are required by - the dependencies of a spec and also applies modifications - to this spec's package at module scope, if need be. +class UseMode(Flag): + #: Entrypoint spec (a spec to be built; an env root, etc) + ROOT = auto() - Environment modifications include: + #: A spec used at runtime, but no executables in PATH + RUNTIME = auto() - - Updating PATH so that executables can be found - - Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective - tools can find Spack-built dependencies - - Running custom package environment modifications + #: A spec used at runtime, with executables in PATH + RUNTIME_EXECUTABLE = auto() - Custom package modifications can conflict with the default PATH changes - we make (specifically for the PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH - environment variables), so this applies changes in a fixed order: + #: A spec that's a direct build or test dep + BUILDTIME_DIRECT = auto() - - All modifications (custom and default) from external deps first - - All modifications from non-external deps afterwards + #: A spec that should be visible in search paths in a build env. + BUILDTIME = auto() - With that order, `PrependPath` actions from non-external default - environment modifications will take precedence over custom modifications - from external packages. + #: Flag is set when the (node, mode) is finalized + ADDED = auto() - A secondary constraint is that custom and default modifications are - grouped on a per-package basis: combined with the post-order traversal this - means that default modifications of dependents can override custom - modifications of dependencies (again, this would only occur for PATH, - CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH). - Args: - spec (spack.spec.Spec): spec for which we want the modifications - context (str): either 'build' for build-time modifications or 'run' - for run-time modifications - custom_mods_only (bool): if True returns only custom modifications, if False - returns custom and default modifications - set_package_py_globals (bool): whether or not to set the global variables in the - package.py files (this may be problematic when using buildcaches that have - been built on a different but compatible OS) - """ - if context not in ["build", "run", "test"]: - raise ValueError( - "Expecting context to be one of ['build', 'run', 'test'], " "got: {0}".format(context) +def effective_deptypes( + *specs: spack.spec.Spec, context: Context = Context.BUILD +) -> List[Tuple[spack.spec.Spec, UseMode]]: + """Given a list of input specs and a context, return a list of tuples of + all specs that contribute to (environment) modifications, together with + a flag specifying in what way they do so. The list is ordered topologically + from root to leaf, meaning that environment modifications should be applied + in reverse so that dependents override dependencies, not the other way around.""" + visitor = traverse.TopoVisitor( + EnvironmentVisitor(*specs, context=context), + key=lambda x: x.dag_hash(), + root=True, + all_edges=True, + ) + traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor) + + # Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag. + use_modes = defaultdict(lambda: UseMode(0)) + nodes_with_type = [] + + for edge in visitor.edges: + parent, child, depflag = edge.parent, edge.spec, edge.depflag + + # Mark the starting point + if parent is None: + use_modes[child] = UseMode.ROOT + continue + + parent_mode = use_modes[parent] + + # Nothing to propagate. + if not parent_mode: + continue + + # Dependending on the context, include particular deps from the root. + if UseMode.ROOT & parent_mode: + if context == Context.BUILD: + if (dt.BUILD | dt.TEST) & depflag: + use_modes[child] |= UseMode.BUILDTIME_DIRECT + if dt.LINK & depflag: + use_modes[child] |= UseMode.BUILDTIME + + elif context == Context.TEST: + if (dt.RUN | dt.TEST) & depflag: + use_modes[child] |= UseMode.RUNTIME_EXECUTABLE + elif dt.LINK & depflag: + use_modes[child] |= UseMode.RUNTIME + + elif context == Context.RUN: + if dt.RUN & depflag: + use_modes[child] |= UseMode.RUNTIME_EXECUTABLE + elif dt.LINK & depflag: + use_modes[child] |= UseMode.RUNTIME + + # Propagate RUNTIME and RUNTIME_EXECUTABLE through link and run deps. + if (UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE | UseMode.BUILDTIME_DIRECT) & parent_mode: + if dt.LINK & depflag: + use_modes[child] |= UseMode.RUNTIME + if dt.RUN & depflag: + use_modes[child] |= UseMode.RUNTIME_EXECUTABLE + + # Propagate BUILDTIME through link deps. + if UseMode.BUILDTIME & parent_mode: + if dt.LINK & depflag: + use_modes[child] |= UseMode.BUILDTIME + + # Finalize the spec; the invariant is that all in-edges are processed + # before out-edges, meaning that parent is done. + if not (UseMode.ADDED & parent_mode): + use_modes[parent] |= UseMode.ADDED + nodes_with_type.append((parent, parent_mode)) + + # Attach the leaf nodes, since we only added nodes with out-edges. + for spec, parent_mode in use_modes.items(): + if parent_mode and not (UseMode.ADDED & parent_mode): + nodes_with_type.append((spec, parent_mode)) + + return nodes_with_type + + +class SetupContext: + """This class encapsulates the logic to determine environment modifications, and is used as + well to set globals in modules of package.py.""" + + def __init__(self, *specs: spack.spec.Spec, context: Context) -> None: + """Construct a ModificationsFromDag object. + Args: + specs: single root spec for build/test context, possibly more for run context + context: build, run, or test""" + if (context == Context.BUILD or context == Context.TEST) and not len(specs) == 1: + raise ValueError("Cannot setup build environment for multiple specs") + specs_with_type = effective_deptypes(*specs, context=context) + + self.specs = specs + self.context = context + self.external: List[Tuple[spack.spec.Spec, UseMode]] + self.nonexternal: List[Tuple[spack.spec.Spec, UseMode]] + # Reverse so we go from leaf to root + self.nodes_in_subdag = set(id(s) for s, _ in specs_with_type) + + # Split into non-external and external, maintaining topo order per group. + self.external, self.nonexternal = stable_partition( + reversed(specs_with_type), lambda t: t[0].external ) + self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE + self.should_setup_run_env = UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE + self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT - env = EnvironmentModifications() + if context == Context.RUN or context == Context.TEST: + self.should_be_runnable |= UseMode.ROOT + self.should_setup_run_env |= UseMode.ROOT - # Note: see computation of 'custom_mod_deps' and 'exe_deps' later in this - # function; these sets form the building blocks of those collections. - build_deps = set(spec.dependencies(deptype=("build", "test"))) - link_deps = set(spec.traverse(root=False, deptype="link")) - build_link_deps = build_deps | link_deps - build_and_supporting_deps = set() - for build_dep in build_deps: - build_and_supporting_deps.update(build_dep.traverse(deptype="run")) - run_and_supporting_deps = set(spec.traverse(root=False, deptype=("run", "link"))) - test_and_supporting_deps = set() - for test_dep in set(spec.dependencies(deptype="test")): - test_and_supporting_deps.update(test_dep.traverse(deptype="run")) - - # All dependencies that might have environment modifications to apply - custom_mod_deps = set() - if context == "build": - custom_mod_deps.update(build_and_supporting_deps) - # Tests may be performed after build - custom_mod_deps.update(test_and_supporting_deps) - else: - # test/run context - custom_mod_deps.update(run_and_supporting_deps) - if context == "test": - custom_mod_deps.update(test_and_supporting_deps) - custom_mod_deps.update(link_deps) - - # Determine 'exe_deps': the set of packages with binaries we want to use - if context == "build": - exe_deps = build_and_supporting_deps | test_and_supporting_deps - elif context == "run": - exe_deps = set(spec.traverse(deptype="run")) - elif context == "test": - exe_deps = test_and_supporting_deps - - def default_modifications_for_dep(dep): - if dep in build_link_deps and not is_system_path(dep.prefix) and context == "build": - prefix = dep.prefix - - env.prepend_path("CMAKE_PREFIX_PATH", prefix) - - for directory in ("lib", "lib64", "share"): - pcdir = os.path.join(prefix, directory, "pkgconfig") - if os.path.isdir(pcdir): - env.prepend_path("PKG_CONFIG_PATH", pcdir) - - if dep in exe_deps and not is_system_path(dep.prefix): - _make_runnable(dep, env) - - def add_modifications_for_dep(dep): - tty.debug("Adding env modifications for {0}".format(dep.name)) - # Some callers of this function only want the custom modifications. - # For callers that want both custom and default modifications, we want - # to perform the default modifications here (this groups custom - # and default modifications together on a per-package basis). - if not custom_mods_only: - default_modifications_for_dep(dep) - - # Perform custom modifications here (PrependPath actions performed in - # the custom method override the default environment modifications - # we do to help the build, namely for PATH, CMAKE_PREFIX_PATH, and - # PKG_CONFIG_PATH) - if dep in custom_mod_deps: - dpkg = dep.package - if set_package_py_globals: - set_module_variables_for_package(dpkg) - - current_module = ModuleChangePropagator(spec.package) - dpkg.setup_dependent_package(current_module, spec) - current_module.propagate_changes_to_mro() - - if context == "build": - builder = spack.builder.create(dpkg) - builder.setup_dependent_build_environment(env, spec) - else: - dpkg.setup_dependent_run_environment(env, spec) - tty.debug("Added env modifications for {0}".format(dep.name)) - - # Note that we want to perform environment modifications in a fixed order. - # The Spec.traverse method provides this: i.e. in addition to - # the post-order semantics, it also guarantees a fixed traversal order - # among dependencies which are not constrained by post-order semantics. - for dspec in spec.traverse(root=False, order="post"): - if dspec.external: - add_modifications_for_dep(dspec) - - for dspec in spec.traverse(root=False, order="post"): - # Default env modifications for non-external packages can override - # custom modifications of external packages (this can only occur - # for modifications to PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH) - if not dspec.external: - add_modifications_for_dep(dspec) - - return env + # Everything that calls setup_run_environment and setup_dependent_* needs globals set. + self.should_set_package_py_globals = ( + self.should_setup_dependent_build_env | self.should_setup_run_env | UseMode.ROOT + ) + # In a build context, the root and direct build deps need build-specific globals set. + self.needs_build_context = UseMode.ROOT | UseMode.BUILDTIME_DIRECT + + def set_all_package_py_globals(self): + """Set the globals in modules of package.py files.""" + for dspec, flag in chain(self.external, self.nonexternal): + pkg = dspec.package + + if self.should_set_package_py_globals & flag: + if self.context == Context.BUILD and self.needs_build_context & flag: + set_package_py_globals(pkg, context=Context.BUILD) + else: + # This includes runtime dependencies, also runtime deps of direct build deps. + set_package_py_globals(pkg, context=Context.RUN) + + for spec in dspec.dependents(): + # Note: some specs have dependents that are unreachable from the root, so avoid + # setting globals for those. + if id(spec) not in self.nodes_in_subdag: + continue + dependent_module = ModuleChangePropagator(spec.package) + pkg.setup_dependent_package(dependent_module, spec) + dependent_module.propagate_changes_to_mro() + + def get_env_modifications(self) -> EnvironmentModifications: + """Returns the environment variable modifications for the given input specs and context. + Environment modifications include: + - Updating PATH for packages that are required at runtime + - Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective + tools can find Spack-built dependencies (when context=build) + - Running custom package environment modifications (setup_run_environment, + setup_dependent_build_environment, setup_dependent_run_environment) + + The (partial) order imposed on the specs is externals first, then topological + from leaf to root. That way externals cannot contribute search paths that would shadow + Spack's prefixes, and dependents override variables set by dependencies.""" + env = EnvironmentModifications() + for dspec, flag in chain(self.external, self.nonexternal): + tty.debug(f"Adding env modifications for {dspec.name}") + pkg = dspec.package + + if self.should_setup_dependent_build_env & flag: + self._make_buildtime_detectable(dspec, env) + + for spec in self.specs: + builder = spack.builder.create(pkg) + builder.setup_dependent_build_environment(env, spec) + + if self.should_be_runnable & flag: + self._make_runnable(dspec, env) + + if self.should_setup_run_env & flag: + # TODO: remove setup_dependent_run_environment... + for spec in dspec.dependents(deptype=dt.RUN): + if id(spec) in self.nodes_in_subdag: + pkg.setup_dependent_run_environment(env, spec) + pkg.setup_run_environment(env) + return env + + def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications): + if is_system_path(dep.prefix): + return + + env.prepend_path("CMAKE_PREFIX_PATH", dep.prefix) + for d in ("lib", "lib64", "share"): + pcdir = os.path.join(dep.prefix, d, "pkgconfig") + if os.path.isdir(pcdir): + env.prepend_path("PKG_CONFIG_PATH", pcdir) + + def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications): + if is_system_path(dep.prefix): + return + + for d in ("bin", "bin64"): + bin_dir = os.path.join(dep.prefix, d) + if os.path.isdir(bin_dir): + env.prepend_path("PATH", bin_dir) def get_cmake_prefix_path(pkg): @@ -996,7 +1072,7 @@ def get_cmake_prefix_path(pkg): def _setup_pkg_and_run( serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2 ): - context = kwargs.get("context", "build") + context: str = kwargs.get("context", "build") try: # We are in the child process. Python sets sys.stdin to @@ -1012,7 +1088,7 @@ def _setup_pkg_and_run( if not kwargs.get("fake", False): kwargs["unmodified_env"] = os.environ.copy() kwargs["env_modifications"] = setup_package( - pkg, dirty=kwargs.get("dirty", False), context=context + pkg, dirty=kwargs.get("dirty", False), context=Context.from_string(context) ) return_value = function(pkg, kwargs) write_pipe.send(return_value) diff --git a/lib/spack/spack/cmd/build_env.py b/lib/spack/spack/cmd/build_env.py index 7da9213c5b0e7b..f5efca6e230484 100644 --- a/lib/spack/spack/cmd/build_env.py +++ b/lib/spack/spack/cmd/build_env.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import spack.cmd.common.env_utility as env_utility +from spack.context import Context description = ( "run a command in a spec's install environment, or dump its environment to screen or file" @@ -14,4 +15,4 @@ def build_env(parser, args): - env_utility.emulate_env_utility("build-env", "build", args) + env_utility.emulate_env_utility("build-env", Context.BUILD, args) diff --git a/lib/spack/spack/cmd/common/env_utility.py b/lib/spack/spack/cmd/common/env_utility.py index 1816a2c574700c..b8a6338d924f83 100644 --- a/lib/spack/spack/cmd/common/env_utility.py +++ b/lib/spack/spack/cmd/common/env_utility.py @@ -7,7 +7,6 @@ import llnl.util.tty as tty -import spack.build_environment as build_environment import spack.cmd import spack.cmd.common.arguments as arguments import spack.deptypes as dt @@ -15,7 +14,8 @@ import spack.paths import spack.spec import spack.store -from spack import traverse +from spack import build_environment, traverse +from spack.context import Context from spack.util.environment import dump_environment, pickle_environment @@ -42,14 +42,14 @@ def setup_parser(subparser): class AreDepsInstalledVisitor: - def __init__(self, context="build"): - if context not in ("build", "test"): - raise ValueError("context can only be build or test") - - if context == "build": + def __init__(self, context: Context = Context.BUILD): + if context == Context.BUILD: + # TODO: run deps shouldn't be required for build env. self.direct_deps = dt.BUILD | dt.LINK | dt.RUN - else: + elif context == Context.TEST: self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN + else: + raise ValueError("context can only be Context.BUILD or Context.TEST") self.has_uninstalled_deps = False @@ -76,7 +76,7 @@ def neighbors(self, item): return item.edge.spec.edges_to_dependencies(depflag=depflag) -def emulate_env_utility(cmd_name, context, args): +def emulate_env_utility(cmd_name, context: Context, args): if not args.spec: tty.die("spack %s requires a spec." % cmd_name) @@ -120,7 +120,7 @@ def emulate_env_utility(cmd_name, context, args): hashes=True, # This shows more than necessary, but we cannot dynamically change deptypes # in Spec.tree(...). - deptypes="all" if context == "build" else ("build", "test", "link", "run"), + deptypes="all" if context == Context.BUILD else ("build", "test", "link", "run"), ), ) diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py index e68fe48dce7fec..5cdd2909c7ae40 100644 --- a/lib/spack/spack/cmd/load.py +++ b/lib/spack/spack/cmd/load.py @@ -5,6 +5,8 @@ import sys +import llnl.util.tty as tty + import spack.cmd import spack.cmd.common.arguments as arguments import spack.cmd.find @@ -108,16 +110,14 @@ def load(parser, args): ) return 1 - with spack.store.STORE.db.read_transaction(): - if "dependencies" in args.things_to_load: - include_roots = "package" in args.things_to_load - specs = [ - dep for spec in specs for dep in spec.traverse(root=include_roots, order="post") - ] + if args.things_to_load != "package,dependencies": + tty.warn( + "The `--only` flag in spack load is deprecated and will be removed in Spack v0.22" + ) - env_mod = spack.util.environment.EnvironmentModifications() + with spack.store.STORE.db.read_transaction(): + env_mod = uenv.environment_modifications_for_specs(*specs) for spec in specs: - env_mod.extend(uenv.environment_modifications_for_spec(spec)) env_mod.prepend_path(uenv.spack_loaded_hashes_var, spec.dag_hash()) cmds = env_mod.shell_modifications(args.shell) diff --git a/lib/spack/spack/cmd/test_env.py b/lib/spack/spack/cmd/test_env.py index 049df9d5c0494b..070b766248d8dc 100644 --- a/lib/spack/spack/cmd/test_env.py +++ b/lib/spack/spack/cmd/test_env.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import spack.cmd.common.env_utility as env_utility +from spack.context import Context description = ( "run a command in a spec's test environment, or dump its environment to screen or file" @@ -14,4 +15,4 @@ def test_env(parser, args): - env_utility.emulate_env_utility("test-env", "test", args) + env_utility.emulate_env_utility("test-env", Context.TEST, args) diff --git a/lib/spack/spack/cmd/unload.py b/lib/spack/spack/cmd/unload.py index 1fecdc5b33ba1d..7fe634c56de91e 100644 --- a/lib/spack/spack/cmd/unload.py +++ b/lib/spack/spack/cmd/unload.py @@ -88,9 +88,8 @@ def unload(parser, args): ) return 1 - env_mod = spack.util.environment.EnvironmentModifications() + env_mod = uenv.environment_modifications_for_specs(*specs).reversed() for spec in specs: - env_mod.extend(uenv.environment_modifications_for_spec(spec).reversed()) env_mod.remove_path(uenv.spack_loaded_hashes_var, spec.dag_hash()) cmds = env_mod.shell_modifications(args.shell) diff --git a/lib/spack/spack/context.py b/lib/spack/spack/context.py new file mode 100644 index 00000000000000..de3311da22b0ff --- /dev/null +++ b/lib/spack/spack/context.py @@ -0,0 +1,29 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""This module provides classes used in user and build environment""" + +from enum import Enum + + +class Context(Enum): + """Enum used to indicate the context in which an environment has to be setup: build, + run or test.""" + + BUILD = 1 + RUN = 2 + TEST = 3 + + def __str__(self): + return ("build", "run", "test")[self.value - 1] + + @classmethod + def from_string(cls, s: str): + if s == "build": + return Context.BUILD + elif s == "run": + return Context.RUN + elif s == "test": + return Context.TEST + raise ValueError(f"context should be one of 'build', 'run', 'test', got {s}") diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index ee48955ac54038..62dda31034d2b7 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1690,41 +1690,18 @@ def check_views(self): "Loading the environment view will require reconcretization." % self.name ) - def _env_modifications_for_view(self, view: ViewDescriptor, reverse: bool = False): - all_mods = spack.util.environment.EnvironmentModifications() - - visited = set() - - errors = [] - for root_spec in self.concrete_roots(): - if root_spec in view and root_spec.installed and root_spec.package: - for spec in root_spec.traverse(deptype="run", root=True): - if spec.name in visited: - # It is expected that only one instance of the package - # can be added to the environment - do not attempt to - # add multiple. - tty.debug( - "Not adding {0} to shell modifications: " - "this package has already been added".format( - spec.format("{name}/{hash:7}") - ) - ) - continue - else: - visited.add(spec.name) - - try: - mods = uenv.environment_modifications_for_spec(spec, view) - except Exception as e: - msg = "couldn't get environment settings for %s" % spec.format( - "{name}@{version} /{hash:7}" - ) - errors.append((msg, str(e))) - continue - - all_mods.extend(mods.reversed() if reverse else mods) - - return all_mods, errors + def _env_modifications_for_view( + self, view: ViewDescriptor, reverse: bool = False + ) -> spack.util.environment.EnvironmentModifications: + try: + mods = uenv.environment_modifications_for_specs(*self.concrete_roots(), view=view) + except Exception as e: + # Failing to setup spec-specific changes shouldn't be a hard error. + tty.warn( + "couldn't load runtime environment due to {}: {}".format(e.__class__.__name__, e) + ) + return spack.util.environment.EnvironmentModifications() + return mods.reversed() if reverse else mods def add_view_to_env( self, env_mod: spack.util.environment.EnvironmentModifications, view: str @@ -1740,12 +1717,7 @@ def add_view_to_env( return env_mod env_mod.extend(uenv.unconditional_environment_modifications(descriptor)) - - mods, errors = self._env_modifications_for_view(descriptor) - env_mod.extend(mods) - if errors: - for err in errors: - tty.warn(*err) + env_mod.extend(self._env_modifications_for_view(descriptor)) # deduplicate paths from specs mapped to the same location for env_var in env_mod.group_by_name(): @@ -1767,9 +1739,7 @@ def rm_view_from_env( return env_mod env_mod.extend(uenv.unconditional_environment_modifications(descriptor).reversed()) - - mods, _ = self._env_modifications_for_view(descriptor, reverse=True) - env_mod.extend(mods) + env_mod.extend(self._env_modifications_for_view(descriptor, reverse=True)) return env_mod diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 4b60f52bf43d18..57b7da5ad52ab8 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -56,6 +56,7 @@ import spack.util.file_permissions as fp import spack.util.path import spack.util.spack_yaml as syaml +from spack.context import Context #: config section for this file @@ -717,10 +718,16 @@ def environment_modifications(self): ) # Let the extendee/dependency modify their extensions/dependencies - # before asking for package-specific modifications - env.extend(spack.build_environment.modifications_from_dependencies(spec, context="run")) - # Package specific modifications - spack.build_environment.set_module_variables_for_package(spec.package) + + # The only thing we care about is `setup_dependent_run_environment`, but + # for that to work, globals have to be set on the package modules, and the + # whole chain of setup_dependent_package has to be followed from leaf to spec. + # So: just run it here, but don't collect env mods. + spack.build_environment.SetupContext(context=Context.RUN).set_all_package_py_globals() + + # Then run setup_dependent_run_environment before setup_run_environment. + for dep in spec.dependencies(deptype=("link", "run")): + dep.package.setup_dependent_run_environment(env, spec) spec.package.setup_run_environment(env) # Modifications required from modules.yaml diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py index 2eb80fded3dc2d..0893b76a98a2f3 100644 --- a/lib/spack/spack/test/build_environment.py +++ b/lib/spack/spack/test/build_environment.py @@ -17,7 +17,8 @@ import spack.package_base import spack.spec import spack.util.spack_yaml as syaml -from spack.build_environment import _static_to_shared_library, dso_suffix +from spack.build_environment import UseMode, _static_to_shared_library, dso_suffix +from spack.context import Context from spack.paths import build_env_path from spack.util.cpus import determine_number_of_jobs from spack.util.environment import EnvironmentModifications @@ -438,10 +439,10 @@ def test_parallel_false_is_not_propagating(default_mock_concretization): # b (parallel =True) s = default_mock_concretization("a foobar=bar") - spack.build_environment.set_module_variables_for_package(s.package) + spack.build_environment.set_package_py_globals(s.package) assert s["a"].package.module.make_jobs == 1 - spack.build_environment.set_module_variables_for_package(s["b"].package) + spack.build_environment.set_package_py_globals(s["b"].package) assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs( parallel=s["b"].package.parallel ) @@ -575,3 +576,69 @@ def test_setting_attributes(self, default_mock_concretization): if current_module == spack.package_base: break assert current_module.SOME_ATTRIBUTE == 1 + + +def test_effective_deptype_build_environment(default_mock_concretization): + s = default_mock_concretization("dttop") + + # [ ] dttop@1.0 # + # [b ] ^dtbuild1@1.0 # <- direct build dep + # [b ] ^dtbuild2@1.0 # <- indirect build-only dep is dropped + # [bl ] ^dtlink2@1.0 # <- linkable, and runtime dep of build dep + # [ r ] ^dtrun2@1.0 # <- non-linkable, exectuable runtime dep of build dep + # [bl ] ^dtlink1@1.0 # <- direct build dep + # [bl ] ^dtlink3@1.0 # <- linkable, and runtime dep of build dep + # [b ] ^dtbuild2@1.0 # <- indirect build-only dep is dropped + # [bl ] ^dtlink4@1.0 # <- linkable, and runtime dep of build dep + # [ r ] ^dtrun1@1.0 # <- run-only dep is pruned (should it be in PATH?) + # [bl ] ^dtlink5@1.0 # <- children too + # [ r ] ^dtrun3@1.0 # <- children too + # [b ] ^dtbuild3@1.0 # <- children too + + expected_flags = { + "dttop": UseMode.ROOT, + "dtbuild1": UseMode.BUILDTIME_DIRECT, + "dtlink1": UseMode.BUILDTIME_DIRECT | UseMode.BUILDTIME, + "dtlink3": UseMode.BUILDTIME | UseMode.RUNTIME, + "dtlink4": UseMode.BUILDTIME | UseMode.RUNTIME, + "dtrun2": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE, + "dtlink2": UseMode.RUNTIME, + } + + for spec, effective_type in spack.build_environment.effective_deptypes( + s, context=Context.BUILD + ): + assert effective_type & expected_flags.pop(spec.name) == effective_type + assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes" + + +def test_effective_deptype_run_environment(default_mock_concretization): + s = default_mock_concretization("dttop") + + # [ ] dttop@1.0 # + # [b ] ^dtbuild1@1.0 # <- direct build-only dep is pruned + # [b ] ^dtbuild2@1.0 # <- children too + # [bl ] ^dtlink2@1.0 # <- children too + # [ r ] ^dtrun2@1.0 # <- children too + # [bl ] ^dtlink1@1.0 # <- runtime, not executable + # [bl ] ^dtlink3@1.0 # <- runtime, not executable + # [b ] ^dtbuild2@1.0 # <- indirect build only dep is pruned + # [bl ] ^dtlink4@1.0 # <- runtime, not executable + # [ r ] ^dtrun1@1.0 # <- runtime and executable + # [bl ] ^dtlink5@1.0 # <- runtime, not executable + # [ r ] ^dtrun3@1.0 # <- runtime and executable + # [b ] ^dtbuild3@1.0 # <- indirect build-only dep is pruned + + expected_flags = { + "dttop": UseMode.ROOT, + "dtlink1": UseMode.RUNTIME, + "dtlink3": UseMode.BUILDTIME | UseMode.RUNTIME, + "dtlink4": UseMode.BUILDTIME | UseMode.RUNTIME, + "dtrun1": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE, + "dtlink5": UseMode.RUNTIME, + "dtrun3": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE, + } + + for spec, effective_type in spack.build_environment.effective_deptypes(s, context=Context.RUN): + assert effective_type & expected_flags.pop(spec.name) == effective_type + assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes" diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 4845d122060ea6..7d0eb37951b862 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -168,7 +168,7 @@ def test_env_remove(capfd): foo = ev.read("foo") with foo: - with pytest.raises(spack.main.SpackCommandError): + with pytest.raises(SpackCommandError): with capfd.disabled(): env("remove", "-y", "foo") assert "foo" in env("list") @@ -283,7 +283,7 @@ def setup_error(pkg, env): _, err = capfd.readouterr() assert "cmake-client had issues!" in err - assert "Warning: couldn't get environment settings" in err + assert "Warning: couldn't load runtime environment" in err def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch, monkeypatch): @@ -500,11 +500,14 @@ def test_env_activate_broken_view( # switch to a new repo that doesn't include the installed package # test that Spack detects the missing package and fails gracefully with spack.repo.use_repositories(mock_custom_repository): - with pytest.raises(SpackCommandError): - env("activate", "--sh", "test") + wrong_repo = env("activate", "--sh", "test") + assert "Warning: couldn't load runtime environment" in wrong_repo + assert "Unknown namespace: builtin.mock" in wrong_repo # test replacing repo fixes it - env("activate", "--sh", "test") + normal_repo = env("activate", "--sh", "test") + assert "Warning: couldn't load runtime environment" not in normal_repo + assert "Unknown namespace: builtin.mock" not in normal_repo def test_to_lockfile_dict(): @@ -1044,7 +1047,7 @@ def test_env_commands_die_with_no_env_arg(): env("remove") # these have an optional env arg and raise errors via tty.die - with pytest.raises(spack.main.SpackCommandError): + with pytest.raises(SpackCommandError): env("loads") # This should NOT raise an error with no environment diff --git a/lib/spack/spack/test/cmd/load.py b/lib/spack/spack/test/cmd/load.py index 1aa220b570eed8..26fa374a05d34d 100644 --- a/lib/spack/spack/test/cmd/load.py +++ b/lib/spack/spack/test/cmd/load.py @@ -9,6 +9,7 @@ import spack.spec import spack.user_environment as uenv +import spack.util.environment from spack.main import SpackCommand load = SpackCommand("load") @@ -27,74 +28,63 @@ def test_manpath_trailing_colon( manpath search path via a trailing colon""" install("mpileaks") - sh_out = load("--sh", "--only", "package", "mpileaks") + sh_out = load("--sh", "mpileaks") lines = sh_out.split("\n") assert any(re.match(r"export MANPATH=.*:;", ln) for ln in lines) os.environ["MANPATH"] = "/tmp/man:" - sh_out = load("--sh", "--only", "package", "mpileaks") + sh_out = load("--sh", "mpileaks") lines = sh_out.split("\n") assert any(re.match(r"export MANPATH=.*:/tmp/man:;", ln) for ln in lines) -def test_load(install_mockery, mock_fetch, mock_archive, mock_packages): - """Test that the commands generated by load add the specified prefix - inspections. Also test that Spack records loaded specs by hash in the - user environment. - - CMAKE_PREFIX_PATH is the only prefix inspection guaranteed for fake - packages, since it keys on the prefix instead of a subdir.""" - install_out = install("mpileaks", output=str, fail_on_error=False) - print("spack install mpileaks") - print(install_out) +def test_load_recursive(install_mockery, mock_fetch, mock_archive, mock_packages, working_env): + """Test that `spack load` applies prefix inspections of its required runtime deps in + topo-order""" + install("mpileaks") mpileaks_spec = spack.spec.Spec("mpileaks").concretized() - sh_out = load("--sh", "--only", "package", "mpileaks") - csh_out = load("--csh", "--only", "package", "mpileaks") + # Ensure our reference variable is cleed. + os.environ["CMAKE_PREFIX_PATH"] = "/hello:/world" + + sh_out = load("--sh", "mpileaks") + csh_out = load("--csh", "mpileaks") + + def extract_cmake_prefix_path(output, prefix): + return next(cmd for cmd in output.split(";") if cmd.startswith(prefix))[ + len(prefix) : + ].split(":") - # Test prefix inspections - sh_out_test = "export CMAKE_PREFIX_PATH=%s" % mpileaks_spec.prefix - csh_out_test = "setenv CMAKE_PREFIX_PATH %s" % mpileaks_spec.prefix - assert sh_out_test in sh_out - assert csh_out_test in csh_out + # Map a prefix found in CMAKE_PREFIX_PATH back to a package name in mpileaks' DAG. + prefix_to_pkg = lambda prefix: next( + s.name for s in mpileaks_spec.traverse() if s.prefix == prefix + ) - # Test hashes recorded properly - hash_test_replacements = (uenv.spack_loaded_hashes_var, mpileaks_spec.dag_hash()) - sh_hash_test = "export %s=%s" % hash_test_replacements - csh_hash_test = "setenv %s %s" % hash_test_replacements - assert sh_hash_test in sh_out - assert csh_hash_test in csh_out + paths_sh = extract_cmake_prefix_path(sh_out, prefix="export CMAKE_PREFIX_PATH=") + paths_csh = extract_cmake_prefix_path(csh_out, prefix="setenv CMAKE_PREFIX_PATH ") + # Shouldn't be a difference between loading csh / sh, so check they're the same. + assert paths_sh == paths_csh -def test_load_recursive(install_mockery, mock_fetch, mock_archive, mock_packages): - """Test that the '-r' option to the load command prepends dependency prefix - inspections in post-order""" - install("mpileaks") - mpileaks_spec = spack.spec.Spec("mpileaks").concretized() + # We should've prepended new paths, and keep old ones. + assert paths_sh[-2:] == ["/hello", "/world"] - sh_out = load("--sh", "mpileaks") - csh_out = load("--csh", "mpileaks") + # All but the last two paths are added by spack load; lookup what packages they're from. + pkgs = [prefix_to_pkg(p) for p in paths_sh[:-2]] - # Test prefix inspections - prefix_test_replacement = ":".join( - reversed([s.prefix for s in mpileaks_spec.traverse(order="post")]) + # Do we have all the runtime packages? + assert set(pkgs) == set( + s.name for s in mpileaks_spec.traverse(deptype=("link", "run"), root=True) ) - sh_prefix_test = "export CMAKE_PREFIX_PATH=%s" % prefix_test_replacement - csh_prefix_test = "setenv CMAKE_PREFIX_PATH %s" % prefix_test_replacement - assert sh_prefix_test in sh_out - assert csh_prefix_test in csh_out + # Finally, do we list them in topo order? + for i, pkg in enumerate(pkgs): + set(s.name for s in mpileaks_spec[pkg].traverse(direction="parents")) in set(pkgs[:i]) - # Test spack records loaded hashes properly - hash_test_replacement = ( - uenv.spack_loaded_hashes_var, - ":".join(reversed([s.dag_hash() for s in mpileaks_spec.traverse(order="post")])), - ) - sh_hash_test = "export %s=%s" % hash_test_replacement - csh_hash_test = "setenv %s %s" % hash_test_replacement - assert sh_hash_test in sh_out - assert csh_hash_test in csh_out + # Lastly, do we keep track that mpileaks was loaded? + assert f"export {uenv.spack_loaded_hashes_var}={mpileaks_spec.dag_hash()}" in sh_out + assert f"setenv {uenv.spack_loaded_hashes_var} {mpileaks_spec.dag_hash()}" in csh_out def test_load_includes_run_env(install_mockery, mock_fetch, mock_archive, mock_packages): diff --git a/lib/spack/spack/user_environment.py b/lib/spack/spack/user_environment.py index 0be11c046cdf07..5d1561a8eaedb7 100644 --- a/lib/spack/spack/user_environment.py +++ b/lib/spack/spack/user_environment.py @@ -4,11 +4,18 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import sys +from contextlib import contextmanager +from typing import Callable + +from llnl.util.lang import nullcontext import spack.build_environment import spack.config +import spack.spec import spack.util.environment as environment import spack.util.prefix as prefix +from spack import traverse +from spack.context import Context #: Environment variable name Spack uses to track individually loaded packages spack_loaded_hashes_var = "SPACK_LOADED_HASHES" @@ -62,40 +69,58 @@ def unconditional_environment_modifications(view): return env -def environment_modifications_for_spec(spec, view=None, set_package_py_globals=True): +@contextmanager +def projected_prefix(*specs: spack.spec.Spec, projection: Callable[[spack.spec.Spec], str]): + """Temporarily replace every Spec's prefix with projection(s)""" + prefixes = dict() + for s in traverse.traverse_nodes(specs, key=lambda s: s.dag_hash()): + if s.external: + continue + prefixes[s.dag_hash()] = s.prefix + s.prefix = prefix.Prefix(projection(s)) + + yield + + for s in traverse.traverse_nodes(specs, key=lambda s: s.dag_hash()): + s.prefix = prefixes.get(s.dag_hash(), s.prefix) + + +def environment_modifications_for_specs( + *specs: spack.spec.Spec, view=None, set_package_py_globals: bool = True +): """List of environment (shell) modifications to be processed for spec. This list is specific to the location of the spec or its projection in the view. Args: - spec (spack.spec.Spec): spec for which to list the environment modifications + specs: spec(s) for which to list the environment modifications view: view associated with the spec passed as first argument - set_package_py_globals (bool): whether or not to set the global variables in the + set_package_py_globals: whether or not to set the global variables in the package.py files (this may be problematic when using buildcaches that have been built on a different but compatible OS) """ - spec = spec.copy() - if view and not spec.external: - spec.prefix = prefix.Prefix(view.get_projection_for_spec(spec)) - - # generic environment modifications determined by inspecting the spec - # prefix - env = environment.inspect_path( - spec.prefix, prefix_inspections(spec.platform), exclude=environment.is_system_path - ) - - # Let the extendee/dependency modify their extensions/dependents - # before asking for package-specific modifications - env.extend( - spack.build_environment.modifications_from_dependencies( - spec, context="run", set_package_py_globals=set_package_py_globals - ) - ) - - if set_package_py_globals: - spack.build_environment.set_module_variables_for_package(spec.package) - - spec.package.setup_run_environment(env) + env = environment.EnvironmentModifications() + topo_ordered = traverse.traverse_nodes(specs, root=True, deptype=("run", "link"), order="topo") + + if view: + maybe_projected = projected_prefix(*specs, projection=view.get_projection_for_spec) + else: + maybe_projected = nullcontext() + + with maybe_projected: + # Static environment changes (prefix inspections) + for s in reversed(list(topo_ordered)): + static = environment.inspect_path( + s.prefix, prefix_inspections(s.platform), exclude=environment.is_system_path + ) + env.extend(static) + + # Dynamic environment changes (setup_run_environment etc) + setup_context = spack.build_environment.SetupContext(*specs, context=Context.RUN) + if set_package_py_globals: + setup_context.set_all_package_py_globals() + dynamic = setup_context.get_env_modifications() + env.extend(dynamic) return env diff --git a/share/spack/qa/setup-env-test.fish b/share/spack/qa/setup-env-test.fish index 86563b4b08ce3a..6474917b70766b 100755 --- a/share/spack/qa/setup-env-test.fish +++ b/share/spack/qa/setup-env-test.fish @@ -335,15 +335,14 @@ set _b_bin $_b_loc"/bin" set _a_loc (spack -m location -i shell-a) set _a_bin $_a_loc"/bin" -spt_contains "set -gx PATH $_b_bin" spack -m load --only package --fish shell-b +spt_contains "set -gx PATH $_b_bin" spack -m load --fish shell-b spt_succeeds spack -m load shell-b set LIST_CONTENT (spack -m load shell-b; spack load --list) spt_contains "shell-b@" echo $LIST_CONTENT spt_does_not_contain "shell-a@" echo $LIST_CONTENT # test a variable MacOS clears and one it doesn't for recursive loads -spt_contains "set -gx PATH $_a_bin:$_b_bin" spack -m load --fish shell-a -spt_succeeds spack -m load --only dependencies shell-a -spt_succeeds spack -m load --only package shell-a + +spt_succeeds spack -m load shell-a spt_fails spack -m load d spt_contains "usage: spack load " spack -m load -h spt_contains "usage: spack load " spack -m load -h d diff --git a/share/spack/qa/setup-env-test.sh b/share/spack/qa/setup-env-test.sh index 58feca69ea0487..4172a40155590b 100755 --- a/share/spack/qa/setup-env-test.sh +++ b/share/spack/qa/setup-env-test.sh @@ -104,7 +104,7 @@ contains "usage: spack module " spack -m module --help contains "usage: spack module " spack -m module title 'Testing `spack load`' -contains "export PATH=$(spack -m location -i shell-b)/bin" spack -m load --only package --sh shell-b +contains "export PATH=$(spack -m location -i shell-b)/bin" spack -m load --sh shell-b succeeds spack -m load shell-b LIST_CONTENT=`spack -m load shell-b; spack load --list` contains "shell-b@" echo $LIST_CONTENT @@ -113,8 +113,7 @@ fails spack -m load -l # test a variable MacOS clears and one it doesn't for recursive loads contains "export PATH=$(spack -m location -i shell-a)/bin" spack -m load --sh shell-a contains "export PATH=$(spack -m location -i shell-b)/bin" spack -m load --sh shell-b -succeeds spack -m load --only dependencies shell-a -succeeds spack -m load --only package shell-a +succeeds spack -m load shell-a fails spack -m load d contains "usage: spack load " spack -m load -h contains "usage: spack load " spack -m load -h d diff --git a/var/spack/repos/builtin/packages/gptune/package.py b/var/spack/repos/builtin/packages/gptune/package.py index 2affba20effb04..c0c321c9a4a0d7 100644 --- a/var/spack/repos/builtin/packages/gptune/package.py +++ b/var/spack/repos/builtin/packages/gptune/package.py @@ -52,6 +52,7 @@ class Gptune(CMakePackage): depends_on("py-pyaml", type=("build", "run")) depends_on("py-statsmodels@0.13.0:", type=("build", "run")) depends_on("py-mpi4py@3.0.3:", type=("build", "run")) + depends_on("python", type=("build", "run")) depends_on("pygmo", type=("build", "run")) depends_on("openturns", type=("build", "run")) depends_on("py-pymoo", type=("build", "run"), when="@3.0.0:") diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index b7a78c6fdd9fda..7e110a248ecf0a 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -752,14 +752,6 @@ def setup_build_environment(self, env): os.symlink(bin, sym) env.prepend_path("PATH", self.stage.path) - def setup_run_environment(self, env): - if "+clang" in self.spec: - env.set("CC", join_path(self.spec.prefix.bin, "clang")) - env.set("CXX", join_path(self.spec.prefix.bin, "clang++")) - if "+flang" in self.spec: - env.set("FC", join_path(self.spec.prefix.bin, "flang")) - env.set("F77", join_path(self.spec.prefix.bin, "flang")) - root_cmakelists_dir = "llvm" def cmake_args(self): diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index bffaf875a2fa8d..7a4e15c1f7ca90 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -401,14 +401,13 @@ def install_cpanm(self): maker() maker("install") - def _setup_dependent_env(self, env, dependent_spec, deptype): + def _setup_dependent_env(self, env, dependent_spec): """Set PATH and PERL5LIB to include the extension and any other perl extensions it depends on, assuming they were installed with INSTALL_BASE defined.""" perl_lib_dirs = [] - for d in dependent_spec.traverse(deptype=deptype): - if d.package.extends(self.spec): - perl_lib_dirs.append(d.prefix.lib.perl5) + if dependent_spec.package.extends(self.spec): + perl_lib_dirs.append(dependent_spec.prefix.lib.perl5) if perl_lib_dirs: perl_lib_path = ":".join(perl_lib_dirs) env.prepend_path("PERL5LIB", perl_lib_path) @@ -416,10 +415,10 @@ def _setup_dependent_env(self, env, dependent_spec, deptype): env.append_path("PATH", self.prefix.bin) def setup_dependent_build_environment(self, env, dependent_spec): - self._setup_dependent_env(env, dependent_spec, deptype=("build", "run", "test")) + self._setup_dependent_env(env, dependent_spec) def setup_dependent_run_environment(self, env, dependent_spec): - self._setup_dependent_env(env, dependent_spec, deptype=("run",)) + self._setup_dependent_env(env, dependent_spec) def setup_dependent_package(self, module, dependent_spec): """Called before perl modules' install() methods. diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 59306d8f2377ca..83dbfb382fd94d 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -1244,12 +1244,11 @@ def setup_dependent_run_environment(self, env, dependent_spec): """Set PYTHONPATH to include the site-packages directory for the extension and any other python extensions it depends on. """ - for d in dependent_spec.traverse(deptype=("run"), root=True): - if d.package.extends(self.spec): - # Packages may be installed in platform-specific or platform-independent - # site-packages directories - for directory in {self.platlib, self.purelib}: - env.prepend_path("PYTHONPATH", os.path.join(d.prefix, directory)) + if dependent_spec.package.extends(self.spec): + # Packages may be installed in platform-specific or platform-independent + # site-packages directories + for directory in {self.platlib, self.purelib}: + env.prepend_path("PYTHONPATH", os.path.join(dependent_spec.prefix, directory)) def setup_dependent_package(self, module, dependent_spec): """Called before python modules' install() methods.""" diff --git a/var/spack/repos/builtin/packages/ruby/package.py b/var/spack/repos/builtin/packages/ruby/package.py index 60cbc76cd87fa1..694c2551e1b372 100644 --- a/var/spack/repos/builtin/packages/ruby/package.py +++ b/var/spack/repos/builtin/packages/ruby/package.py @@ -83,9 +83,8 @@ def url_for_version(self, version): return url.format(version.up_to(2), version) def setup_dependent_run_environment(self, env, dependent_spec): - for d in dependent_spec.traverse(deptype=("run"), root=True): - if d.package.extends(self.spec): - env.prepend_path("GEM_PATH", d.prefix) + if dependent_spec.package.extends(self.spec): + env.prepend_path("GEM_PATH", dependent_spec.prefix) def setup_dependent_package(self, module, dependent_spec): """Called before ruby modules' install() methods. Sets GEM_HOME diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py index 33bff65e252138..c0082dc52cc1f7 100644 --- a/var/spack/repos/builtin/packages/tcl/package.py +++ b/var/spack/repos/builtin/packages/tcl/package.py @@ -151,13 +151,12 @@ def setup_dependent_build_environment(self, env, dependent_spec): # https://core.tcl-lang.org/tk/tktview/447bd3e4abe17452d19a80e6840dcc8a2603fcbc env.prepend_path("TCLLIBPATH", self.spec["tcl"].libs.directories[0], separator=" ") - for d in dependent_spec.traverse(deptype=("build", "run", "test")): - if d.package.extends(self.spec): - # Tcl libraries may be installed in lib or lib64, see #19546 - for lib in ["lib", "lib64"]: - tcllibpath = join_path(d.prefix, lib) - if os.path.exists(tcllibpath): - env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ") + if dependent_spec.package.extends(self.spec): + # Tcl libraries may be installed in lib or lib64, see #19546 + for lib in ["lib", "lib64"]: + tcllibpath = join_path(dependent_spec.prefix, lib) + if os.path.exists(tcllibpath): + env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ") def setup_dependent_run_environment(self, env, dependent_spec): """Set TCLLIBPATH to include the tcl-shipped directory for @@ -167,10 +166,9 @@ def setup_dependent_run_environment(self, env, dependent_spec): * https://wiki.tcl-lang.org/page/TCLLIBPATH """ - for d in dependent_spec.traverse(deptype=("build", "run", "test")): - if d.package.extends(self.spec): - # Tcl libraries may be installed in lib or lib64, see #19546 - for lib in ["lib", "lib64"]: - tcllibpath = join_path(d.prefix, lib) - if os.path.exists(tcllibpath): - env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ") + if dependent_spec.package.extends(self.spec): + # Tcl libraries may be installed in lib or lib64, see #19546 + for lib in ["lib", "lib64"]: + tcllibpath = join_path(dependent_spec.prefix, lib) + if os.path.exists(tcllibpath): + env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ") From 077cddc2b6e60305072af5674a6eb4f6ba02dc1e Mon Sep 17 00:00:00 2001 From: Cody Balos Date: Thu, 19 Oct 2023 12:08:24 -0700 Subject: [PATCH 249/408] add nvechip to sundials components when mfem+rocm (#40512) --- var/spack/repos/builtin/packages/mfem/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index b4c66447a16139..5fac0860ea1040 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -1198,6 +1198,8 @@ def sundials_components(self): sun_comps += ",nvecparhyp,nvecparallel" if "+cuda" in spec and "+cuda" in spec["sundials"]: sun_comps += ",nveccuda" + if "+rocm" in spec and "+rocm" in spec["sundials"]: + sun_comps += ",nvechip" return sun_comps @property From cadab557a9ac12a1661e398e7b43542ed2bf1b0d Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 19 Oct 2023 14:08:43 -0500 Subject: [PATCH 250/408] py-rasterio: add v1.3.9 (#40621) --- var/spack/repos/builtin/packages/py-rasterio/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-rasterio/package.py b/var/spack/repos/builtin/packages/py-rasterio/package.py index e837c282c1263f..83db363c9814c3 100644 --- a/var/spack/repos/builtin/packages/py-rasterio/package.py +++ b/var/spack/repos/builtin/packages/py-rasterio/package.py @@ -20,6 +20,7 @@ class PyRasterio(PythonPackage): maintainers("adamjstewart") version("master", branch="master") + version("1.3.9", sha256="fc6d0d290492fa1a5068711cfebb21cc936968891b7ed9da0690c8a7388885c5") version("1.3.8", sha256="ffdd18e78efdf8ad5861065fd812a66dd34264293317ff6540a078ea891cdef8") version("1.3.7", sha256="abfdcb8f10210b8fad939f40d545d6c47e9e3b5cf4a43773ca8dd11c58204304") version("1.3.6", sha256="c8b90eb10e16102d1ab0334a7436185f295de1c07f0d197e206d1c005fc33905") @@ -37,6 +38,7 @@ class PyRasterio(PythonPackage): version("1.0a12", sha256="47d460326e04c64590ff56952271a184a6307f814efc34fb319c12e690585f3c") # From pyproject.toml + depends_on("py-setuptools@67.8:", when="@1.3.9:", type="build") depends_on("py-cython@0.29.29:", when="@1.3.3:", type="build") depends_on("py-cython@0.29.24:0.29", when="@1.3.0:1.3.2", type="build") From 565a2735c2653eb84fcce3a8eada16845c05d11e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Oct 2023 23:02:51 +0200 Subject: [PATCH 251/408] build(deps): bump urllib3 from 2.0.6 to 2.0.7 in /lib/spack/docs (#40583) --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 8b6c32750401e2..67c09000fdbf75 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -5,7 +5,7 @@ sphinx-rtd-theme==1.3.0 python-levenshtein==0.23.0 docutils==0.18.1 pygments==2.16.1 -urllib3==2.0.6 +urllib3==2.0.7 pytest==7.4.2 isort==5.12.0 black==23.9.1 From c5ad77ff68138e0b38b14b1eb663230c58e8087f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Oct 2023 23:03:23 +0200 Subject: [PATCH 252/408] build(deps): bump mypy from 1.6.0 to 1.6.1 in /.github/workflows/style (#40602) --- .github/workflows/style/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/style/requirements.txt b/.github/workflows/style/requirements.txt index 125c3559afe877..079c4557f64c72 100644 --- a/.github/workflows/style/requirements.txt +++ b/.github/workflows/style/requirements.txt @@ -2,6 +2,6 @@ black==23.9.1 clingo==5.6.2 flake8==6.1.0 isort==5.12.0 -mypy==1.6.0 +mypy==1.6.1 types-six==1.16.21.9 vermin==1.5.2 From 01616608c1cd2691cd2d0bf189325c59602c0d1c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Oct 2023 23:03:48 +0200 Subject: [PATCH 253/408] build(deps): bump mypy from 1.6.0 to 1.6.1 in /lib/spack/docs (#40603) --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 67c09000fdbf75..8c7b4e88cc47a1 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -10,4 +10,4 @@ pytest==7.4.2 isort==5.12.0 black==23.9.1 flake8==6.1.0 -mypy==1.6.0 +mypy==1.6.1 From b77a73e4439b33e5bdffe48bb778ce5684a3f495 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Oct 2023 23:04:40 +0200 Subject: [PATCH 254/408] build(deps): bump actions/checkout from 4.1.0 to 4.1.1 (#40584) --- .github/workflows/audit.yaml | 2 +- .github/workflows/bootstrap.yml | 22 +++++++++++----------- .github/workflows/build-containers.yml | 2 +- .github/workflows/ci.yaml | 2 +- .github/workflows/nightly-win-builds.yml | 2 +- .github/workflows/unit_tests.yaml | 10 +++++----- .github/workflows/valid-style.yml | 6 +++--- .github/workflows/windows_python.yml | 6 +++--- 8 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/workflows/audit.yaml b/.github/workflows/audit.yaml index f5eddeca128646..749e6f526aa4b1 100644 --- a/.github/workflows/audit.yaml +++ b/.github/workflows/audit.yaml @@ -22,7 +22,7 @@ jobs: matrix: operating_system: ["ubuntu-latest", "macos-latest"] steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 with: python-version: ${{inputs.python_version}} diff --git a/.github/workflows/bootstrap.yml b/.github/workflows/bootstrap.yml index ab98e83f4ccebf..db64ca94d5e6a0 100644 --- a/.github/workflows/bootstrap.yml +++ b/.github/workflows/bootstrap.yml @@ -24,7 +24,7 @@ jobs: make patch unzip which xz python3 python3-devel tree \ cmake bison bison-devel libstdc++-static - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup non-root user @@ -62,7 +62,7 @@ jobs: make patch unzip xz-utils python3 python3-dev tree \ cmake bison - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup non-root user @@ -99,7 +99,7 @@ jobs: bzip2 curl file g++ gcc gfortran git gnupg2 gzip \ make patch unzip xz-utils python3 python3-dev tree - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup non-root user @@ -133,7 +133,7 @@ jobs: make patch unzip which xz python3 python3-devel tree \ cmake bison - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup repo @@ -158,7 +158,7 @@ jobs: run: | brew install cmake bison@2.7 tree - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Bootstrap clingo run: | source share/spack/setup-env.sh @@ -179,7 +179,7 @@ jobs: run: | brew install tree - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Bootstrap clingo run: | set -ex @@ -204,7 +204,7 @@ jobs: runs-on: ubuntu-20.04 steps: - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup repo @@ -247,7 +247,7 @@ jobs: bzip2 curl file g++ gcc patchelf gfortran git gzip \ make patch unzip xz-utils python3 python3-dev tree - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup non-root user @@ -284,7 +284,7 @@ jobs: make patch unzip xz-utils python3 python3-dev tree \ gawk - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup non-root user @@ -317,7 +317,7 @@ jobs: # Remove GnuPG since we want to bootstrap it sudo rm -rf /usr/local/bin/gpg - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Bootstrap GnuPG run: | source share/spack/setup-env.sh @@ -335,7 +335,7 @@ jobs: # Remove GnuPG since we want to bootstrap it sudo rm -rf /usr/local/bin/gpg - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Bootstrap GnuPG run: | source share/spack/setup-env.sh diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml index 5d6ba6adf565e1..807bf6c858d25d 100644 --- a/.github/workflows/build-containers.yml +++ b/.github/workflows/build-containers.yml @@ -56,7 +56,7 @@ jobs: if: github.repository == 'spack/spack' steps: - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 - name: Set Container Tag Normal (Nightly) run: | diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index af0d8dd8f98f10..047109ca76e70e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -35,7 +35,7 @@ jobs: core: ${{ steps.filter.outputs.core }} packages: ${{ steps.filter.outputs.packages }} steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 if: ${{ github.event_name == 'push' }} with: fetch-depth: 0 diff --git a/.github/workflows/nightly-win-builds.yml b/.github/workflows/nightly-win-builds.yml index 682ecc4b8384ab..511316a2a35504 100644 --- a/.github/workflows/nightly-win-builds.yml +++ b/.github/workflows/nightly-win-builds.yml @@ -14,7 +14,7 @@ jobs: build-paraview-deps: runs-on: windows-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml index 4e1d909f025848..7f7f3808b018c0 100644 --- a/.github/workflows/unit_tests.yaml +++ b/.github/workflows/unit_tests.yaml @@ -51,7 +51,7 @@ jobs: on_develop: false steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 with: fetch-depth: 0 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 @@ -98,7 +98,7 @@ jobs: shell: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 with: fetch-depth: 0 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 @@ -137,7 +137,7 @@ jobs: dnf install -y \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ make patch tcl unzip which xz - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 - name: Setup repo and non-root user run: | git --version @@ -156,7 +156,7 @@ jobs: clingo-cffi: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 with: fetch-depth: 0 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 @@ -191,7 +191,7 @@ jobs: matrix: python-version: ["3.11"] steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 with: fetch-depth: 0 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 diff --git a/.github/workflows/valid-style.yml b/.github/workflows/valid-style.yml index 2f93c627685e05..5b9f33913eadf6 100644 --- a/.github/workflows/valid-style.yml +++ b/.github/workflows/valid-style.yml @@ -18,7 +18,7 @@ jobs: validate: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 with: python-version: '3.11' @@ -35,7 +35,7 @@ jobs: style: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 @@ -69,7 +69,7 @@ jobs: dnf install -y \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ make patch tcl unzip which xz - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 - name: Setup repo and non-root user run: | git --version diff --git a/.github/workflows/windows_python.yml b/.github/workflows/windows_python.yml index 9a99a6ef976516..137c00a9bdbc70 100644 --- a/.github/workflows/windows_python.yml +++ b/.github/workflows/windows_python.yml @@ -15,7 +15,7 @@ jobs: unit-tests: runs-on: windows-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 @@ -39,7 +39,7 @@ jobs: unit-tests-cmd: runs-on: windows-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 @@ -63,7 +63,7 @@ jobs: build-abseil: runs-on: windows-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 From 518c1d6e81d6cb45edb5fdd1ff6390d71ffb5bd7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Fri, 20 Oct 2023 00:00:24 +0200 Subject: [PATCH 255/408] perl: change permissions in order to apply patch on version 5.38.0 (#40609) Co-authored-by: Harmen Stoppels --- .../repos/builtin/packages/perl/package.py | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index 7a4e15c1f7ca90..299ae19436eae9 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -32,6 +32,8 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package url = "http://www.cpan.org/src/5.0/perl-5.34.0.tar.gz" tags = ["windows"] + maintainers("LydDeb") + executables = [r"^perl(-?\d+.*)?$"] # see https://www.cpan.org/src/README.html for @@ -258,13 +260,23 @@ def determine_variants(cls, exes, version): # aren't writeable so make pp.c user writeable # before patching. This should probably walk the # source and make everything writeable in the future. + # The patch "zlib-ng.patch" also fail. So, apply chmod + # to Makefile.PL and Zlib.xs too. def do_stage(self, mirror_only=False): # Do Spack's regular stage super().do_stage(mirror_only) - # Add write permissions on file to be patched - filename = join_path(self.stage.source_path, "pp.c") - perm = os.stat(filename).st_mode - os.chmod(filename, perm | 0o200) + # Add write permissions on files to be patched + files_to_chmod = [ + join_path(self.stage.source_path, "pp.c"), + join_path(self.stage.source_path, "cpan/Compress-Raw-Zlib/Makefile.PL"), + join_path(self.stage.source_path, "cpan/Compress-Raw-Zlib/Zlib.xs"), + ] + for filename in files_to_chmod: + try: + perm = os.stat(filename).st_mode + os.chmod(filename, perm | 0o200) + except IOError: + continue def nmake_arguments(self): args = [] From 26c37790caa6f9a007ccce7883482e0f77c561c3 Mon Sep 17 00:00:00 2001 From: Vicente Bolea Date: Thu, 19 Oct 2023 20:02:25 -0400 Subject: [PATCH 256/408] vtk-m: update to latest release (#40624) --- var/spack/repos/builtin/packages/vtk-m/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/vtk-m/package.py b/var/spack/repos/builtin/packages/vtk-m/package.py index bec8dc963af12d..ce58cc1d6f0ad7 100644 --- a/var/spack/repos/builtin/packages/vtk-m/package.py +++ b/var/spack/repos/builtin/packages/vtk-m/package.py @@ -29,7 +29,7 @@ class VtkM(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") version("release", branch="release") - version("2.1.0-rc1", sha256="337df672ac5c2e0b442571a1380aa98ae70a155c93488c32198d055cb893417a") + version("2.1.0-rc2", sha256="94631fff9f668f40c9c797f03cf32a0d22d57111e309b1e8133c2a3f292b4af1") version( "2.0.0", sha256="32643cf3564fa77f8e2a2a5456a574b6b2355bb68918eb62ccde493993ade1a3", From 664f7075c24919a272d72d117cda82011558ba20 Mon Sep 17 00:00:00 2001 From: wspear Date: Thu, 19 Oct 2023 22:24:17 -0700 Subject: [PATCH 257/408] TAU: Respect ~fortran for +mpi (#40617) --- var/spack/repos/builtin/packages/tau/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index d34b65d848573d..b61ab5753ca64c 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -261,8 +261,9 @@ def install(self, spec, prefix): if "+mpi" in spec: env["CC"] = spec["mpi"].mpicc env["CXX"] = spec["mpi"].mpicxx - env["F77"] = spec["mpi"].mpif77 - env["FC"] = spec["mpi"].mpifc + if "+fortran" in spec: + env["F77"] = spec["mpi"].mpif77 + env["FC"] = spec["mpi"].mpifc options.append("-mpiinc=%s" % spec["mpi"].prefix.include) options.append("-mpilib=%s" % spec["mpi"].prefix.lib) From c87d2af628689f7976ea8838871e49e1b3848eb8 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 20 Oct 2023 01:29:38 -0500 Subject: [PATCH 258/408] Drop support for external PythonX.Y (#40628) On some systems, multiple pythonx.y are placed in the same prefix as pythonx (where only one of them is associated with that pythonx). Spack external detection for Python was willing to register all of these as external versions. Moreover, the `package.py` for Python was able to distinguish these. This can cause an issue for some build systems, which will just look for python3 for example, so if that python3 is actually python3.6, and the build system needs 3.7 (which spack may have found in the same prefix, and offered as a suitable external), it will fail when invoking python3. To avoid that issue, we simply avoid treating pythonx.y as external candidates. In the above case, Spack would only detect a Python 3.6 external, and the build would be forced to use a Spack-built Python 3.7 (which we consider a good thing). --- var/spack/repos/builtin/packages/python/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 83dbfb382fd94d..8253ef0f9a6d92 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -326,7 +326,7 @@ class Python(Package): # An in-source build with --enable-optimizations fails for python@3.X build_directory = "spack-build" - executables = [r"^python[\d.]*[mw]?$"] + executables = [r"^python\d?$"] @classmethod def determine_version(cls, exe): From dac8d8be2955ece7c47dc22e3186d01ddd24b350 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 20 Oct 2023 09:51:49 +0200 Subject: [PATCH 259/408] schema/compilers.py: fix validation of 2+ entries (#40627) Fix the following syntax which validates only the first array entry: ```python "compilers": { "type": "array", "items": [ { "type": ... } ] } ``` to ```python "compilers": { "type": "array", "items": { "type": ... } } ``` which validates the entire array. Oops... --- lib/spack/spack/schema/compilers.py | 106 ++++++++++++++-------------- 1 file changed, 52 insertions(+), 54 deletions(-) diff --git a/lib/spack/spack/schema/compilers.py b/lib/spack/spack/schema/compilers.py index 6caaf9cc2385b6..924fee7a21ff76 100644 --- a/lib/spack/spack/schema/compilers.py +++ b/lib/spack/spack/schema/compilers.py @@ -14,63 +14,61 @@ properties = { "compilers": { "type": "array", - "items": [ - { - "type": "object", - "additionalProperties": False, - "properties": { - "compiler": { - "type": "object", - "additionalProperties": False, - "required": ["paths", "spec", "modules", "operating_system"], - "properties": { - "paths": { - "type": "object", - "required": ["cc", "cxx", "f77", "fc"], - "additionalProperties": False, - "properties": { - "cc": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "cxx": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "f77": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "fc": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - }, + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "compiler": { + "type": "object", + "additionalProperties": False, + "required": ["paths", "spec", "modules", "operating_system"], + "properties": { + "paths": { + "type": "object", + "required": ["cc", "cxx", "f77", "fc"], + "additionalProperties": False, + "properties": { + "cc": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "cxx": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "f77": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "fc": {"anyOf": [{"type": "string"}, {"type": "null"}]}, }, - "flags": { - "type": "object", - "additionalProperties": False, - "properties": { - "cflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "cxxflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "fflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "cppflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "ldflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "ldlibs": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - }, - }, - "spec": {"type": "string"}, - "operating_system": {"type": "string"}, - "target": {"type": "string"}, - "alias": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "modules": { - "anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}] - }, - "implicit_rpaths": { - "anyOf": [ - {"type": "array", "items": {"type": "string"}}, - {"type": "boolean"}, - ] - }, - "environment": spack.schema.environment.definition, - "extra_rpaths": { - "type": "array", - "default": [], - "items": {"type": "string"}, + }, + "flags": { + "type": "object", + "additionalProperties": False, + "properties": { + "cflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "cxxflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "fflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "cppflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "ldflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "ldlibs": {"anyOf": [{"type": "string"}, {"type": "null"}]}, }, }, - } - }, - } - ], + "spec": {"type": "string"}, + "operating_system": {"type": "string"}, + "target": {"type": "string"}, + "alias": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "modules": { + "anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}] + }, + "implicit_rpaths": { + "anyOf": [ + {"type": "array", "items": {"type": "string"}}, + {"type": "boolean"}, + ] + }, + "environment": spack.schema.environment.definition, + "extra_rpaths": { + "type": "array", + "default": [], + "items": {"type": "string"}, + }, + }, + } + }, + }, } } From 88a6a713ee4c920eec0e930eaf88d9c6a45f059d Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Fri, 20 Oct 2023 06:53:41 -0500 Subject: [PATCH 260/408] Add package py-macs3 and dependencies (#40498) * py-cykhash: adding new package py-cykhash * py-hmmlearn: adding new package py-hmmlearn * py-macs3: adding new package py-macs3 * py-macs3: adding python version restriction and other changes. --- .../builtin/packages/py-cykhash/package.py | 20 +++++++++++++ .../builtin/packages/py-hmmlearn/package.py | 28 +++++++++++++++++++ .../builtin/packages/py-macs3/package.py | 26 +++++++++++++++++ 3 files changed, 74 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-cykhash/package.py create mode 100644 var/spack/repos/builtin/packages/py-hmmlearn/package.py create mode 100644 var/spack/repos/builtin/packages/py-macs3/package.py diff --git a/var/spack/repos/builtin/packages/py-cykhash/package.py b/var/spack/repos/builtin/packages/py-cykhash/package.py new file mode 100644 index 00000000000000..85d67fb9799f9e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cykhash/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCykhash(PythonPackage): + """Cython wrapper for khash-sets/maps, efficient implementation of isin and unique.""" + + homepage = "https://github.com/realead/cykhash" + pypi = "cykhash/cykhash-2.0.1.tar.gz" + + maintainers("snehring") + + version("2.0.1", sha256="b4794bc9f549114d8cf1d856d9f64e08ff5f246bf043cf369fdb414e9ceb97f7") + + depends_on("py-setuptools", type="build") + depends_on("py-cython@0.28:", type="build") diff --git a/var/spack/repos/builtin/packages/py-hmmlearn/package.py b/var/spack/repos/builtin/packages/py-hmmlearn/package.py new file mode 100644 index 00000000000000..16bfb20ccf3f66 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-hmmlearn/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyHmmlearn(PythonPackage): + """hmmlearn is a set of algorithms for unsupervised learning and + inference of Hidden Markov Models.""" + + homepage = "https://github.com/hmmlearn/hmmlearn" + pypi = "hmmlearn/hmmlearn-0.3.0.tar.gz" + + maintainers("snehring") + + version("0.3.0", sha256="d13a91ea3695df881465e3d36132d7eef4e84d483f4ba538a4b46e24b5ea100f") + + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm@3.3:", type="build") + depends_on("py-pybind11@2.6:", type="build") + + depends_on("py-numpy@1.10:", type=("build", "run")) + depends_on("py-scikit-learn@0.16:", type=("build", "run")) + depends_on("py-scipy@0.19:", type=("build", "run")) + + conflicts("py-scikit-learn@=0.22.0", msg="Not compatible with scikit-learn@0.22.0") diff --git a/var/spack/repos/builtin/packages/py-macs3/package.py b/var/spack/repos/builtin/packages/py-macs3/package.py new file mode 100644 index 00000000000000..be94b9c290cd7c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-macs3/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMacs3(PythonPackage): + """MACS: Model-based Analysis for ChIP-Seq""" + + homepage = "https://github.com/macs3-project/MACS/" + pypi = "MACS3/MACS3-3.0.0b3.tar.gz" + + maintainers("snehring") + + version("3.0.0b3", sha256="caa794d4cfcd7368447eae15878505315dac44c21546e8fecebb3561e9cee362") + + depends_on("python@3.9:", type=("build", "run")) + + depends_on("py-setuptools@60.0:", type="build") + depends_on("py-cython@0.29:0", type=("build", "run")) + + depends_on("py-numpy@1.19:", type=("build", "run")) + depends_on("py-cykhash@2", type=("build", "run")) + depends_on("py-hmmlearn@0.3:", type=("build", "run")) From d67f425c73568768a98672cae40908b52fff29c3 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 20 Oct 2023 13:55:41 +0200 Subject: [PATCH 261/408] py-bidscoin: add v4.1.1 and py-argparse-manpage: add new package (#40414) * py-bidscoin: add 4.1.1 * Fix style * Fix restrictions for dependencies --- .../packages/py-argparse-manpage/package.py | 24 +++++++++++++++++++ .../builtin/packages/py-bidscoin/package.py | 15 ++++++++---- 2 files changed, 35 insertions(+), 4 deletions(-) create mode 100644 var/spack/repos/builtin/packages/py-argparse-manpage/package.py diff --git a/var/spack/repos/builtin/packages/py-argparse-manpage/package.py b/var/spack/repos/builtin/packages/py-argparse-manpage/package.py new file mode 100644 index 00000000000000..74108bfbdfbaaf --- /dev/null +++ b/var/spack/repos/builtin/packages/py-argparse-manpage/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyArgparseManpage(PythonPackage): + """Build manual page from python's ArgumentParser object.""" + + homepage = "https://github.com/praiskup/argparse-manpage" + pypi = "argparse-manpage/argparse-manpage-4.5.tar.gz" + + version("4.5", sha256="213c061878a10bf0e40f6a293382f6e82409e5110d0683b16ebf87f903d604db") + + variant("setuptools", default=False, description="Enable the setuptools.builds_meta backend") + + depends_on("py-setuptools", type="build") + depends_on("py-packaging", type="build") + + depends_on("py-tomli", when="^python@:3.10", type=("build", "run")) + + depends_on("py-setuptools", when="+setuptools", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bidscoin/package.py b/var/spack/repos/builtin/packages/py-bidscoin/package.py index 3c840f69716f9c..a5b7dd83024578 100644 --- a/var/spack/repos/builtin/packages/py-bidscoin/package.py +++ b/var/spack/repos/builtin/packages/py-bidscoin/package.py @@ -13,20 +13,23 @@ class PyBidscoin(PythonPackage): homepage = "https://github.com/Donders-Institute/bidscoin" pypi = "bidscoin/bidscoin-3.7.4.tar.gz" + version("4.1.1", sha256="28730e9202d3c44d77c0bbdea9565e00adfdd23e85a6f3f121c1bfce1a7b462b") version("4.0.0", sha256="3b0c26f2e250e06b6f526cdbee09517e1f339da8035c0a316609b4463d75824d") version("3.7.4", sha256="efa32238fb7b75e533e7f5cc318ad5a703716d291985435d43f1de4f18402517") depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools@62.2:", when="@4.1:", type="build") depends_on("py-setuptools@61:", when="@4:", type="build") depends_on("py-setuptools", type="build") - depends_on("py-pytest-runner", type="build") + depends_on("py-argparse-manpage+setuptools", when="@4.1:", type="build") depends_on("py-pandas", type=("build", "run")) depends_on("py-matplotlib", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) depends_on("py-pydicom@2:", type=("build", "run")) - depends_on("py-pyqt5@5.12.1:", type=("build", "run")) + depends_on("py-pyqt6", when="@4.1:", type=("build", "run")) depends_on("py-ruamel-yaml@0.15.35:", type=("build", "run")) + depends_on("py-tomli@1.1:", when="@4.1: ^python@:3.10", type=("build", "run")) depends_on("py-coloredlogs", type=("build", "run")) depends_on("py-tqdm@4.60:", when="@4:", type=("build", "run")) depends_on("py-tqdm", type=("build", "run")) @@ -34,6 +37,10 @@ class PyBidscoin(PythonPackage): depends_on("py-python-dateutil", type=("build", "run")) depends_on("py-nibabel", type=("build", "run")) depends_on("py-bids-validator", when="@4:", type=("build", "run")) - depends_on("py-pydeface", when="@4:", type=("build", "run")) - depends_on("py-pytest", when="@4:", type=("build", "run")) depends_on("dcm2niix", type=("build", "run")) + + # Historical dependencies + depends_on("py-pytest-runner", when="@:3", type="build") + depends_on("py-pyqt5@5.12.1:", when="@:4.0", type=("build", "run")) + depends_on("py-pydeface", when="@4.0", type=("build", "run")) + depends_on("py-pytest", when="@4.0", type=("build", "run")) From f01cde4a0091178f5a7fb61ebb86fd5671e2d8b6 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 20 Oct 2023 13:56:48 +0200 Subject: [PATCH 262/408] py-dcm2bids: add v3.1.0 (#40447) * py-dcm2bids: add 3.1.0 * Fix python restriction --- .../repos/builtin/packages/py-dcm2bids/package.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-dcm2bids/package.py b/var/spack/repos/builtin/packages/py-dcm2bids/package.py index 2aa34b5eab1914..fe3deb15629ada 100644 --- a/var/spack/repos/builtin/packages/py-dcm2bids/package.py +++ b/var/spack/repos/builtin/packages/py-dcm2bids/package.py @@ -13,11 +13,15 @@ class PyDcm2bids(PythonPackage): homepage = "https://github.com/unfmontreal/Dcm2Bids" pypi = "dcm2bids/dcm2bids-2.1.9.tar.gz" + version("3.1.0", sha256="53a8a177d556df897e19d72bd517fdae0245927a8938bb9fbbd51f9f33f54f84") version("2.1.9", sha256="d962bd0a7f1ed200ecb699e8ddb29ff58f09ab2f850a7f37511b79c62189f715") - depends_on("python@3.7:", type=("build", "run")) + depends_on("python@3.8:", when="@3:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-setuptools-scm", type="build") - depends_on("py-future@0.17.1:", type=("build", "run")) + depends_on("py-packaging@23.1:", when="@3:", type=("build", "run")) depends_on("dcm2niix", type=("build", "run")) + + # Historical dependencies + depends_on("py-setuptools-scm", when="@2", type="build") + depends_on("py-future@0.17.1:", when="@2", type=("build", "run")) From 308b6d5d908268de1c963ba8be11afce2d802265 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Fri, 20 Oct 2023 13:59:26 +0200 Subject: [PATCH 263/408] py-statsmodels: add 0.14.0 (#39156) * py-statsmodels: add 0.14.0 * Fix style * Update var/spack/repos/builtin/packages/py-statsmodels/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/py-statsmodels/package.py Co-authored-by: Adam J. Stewart * Remove python limits * Remove comment --------- Co-authored-by: Adam J. Stewart --- .../packages/py-statsmodels/package.py | 55 +++++++++++-------- 1 file changed, 32 insertions(+), 23 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-statsmodels/package.py b/var/spack/repos/builtin/packages/py-statsmodels/package.py index 36968986a33abc..2fe227de4532cf 100644 --- a/var/spack/repos/builtin/packages/py-statsmodels/package.py +++ b/var/spack/repos/builtin/packages/py-statsmodels/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import glob import os from spack.package import * @@ -13,7 +14,9 @@ class PyStatsmodels(PythonPackage): homepage = "https://www.statsmodels.org" pypi = "statsmodels/statsmodels-0.8.0.tar.gz" + git = "https://github.com/statsmodels/statsmodels.git" + version("0.14.0", sha256="6875c7d689e966d948f15eb816ab5616f4928706b180cf470fd5907ab6f647a4") version("0.13.5", sha256="593526acae1c0fda0ea6c48439f67c3943094c542fe769f8b90fe9e6c6cc4871") version("0.13.2", sha256="77dc292c9939c036a476f1770f9d08976b05437daa229928da73231147cde7d4") version("0.13.1", sha256="006ec8d896d238873af8178d5475203844f2c391194ed8d42ddac37f5ff77a69") @@ -23,36 +26,42 @@ class PyStatsmodels(PythonPackage): version("0.10.2", sha256="9cd2194c6642a8754e85f9a6e6912cdf996bebf6ff715d3cc67f65dadfd37cc9") version("0.10.1", sha256="320659a80f916c2edf9dfbe83512d9004bb562b72eedb7d9374562038697fa10") - depends_on("python@2.7:2.8,3.4:", when="@0.10.1:", type=("build", "link", "run")) - depends_on("python@3.6:", when="@0.12.1:", type=("build", "link", "run")) + depends_on("python@3.8:", when="@0.14:", type=("build", "link", "run")) + depends_on("python", type=("build", "link", "run")) - # according to https://www.statsmodels.org/dev/install.html earlier versions might work. - depends_on("py-setuptools", type="build") - depends_on("py-setuptools@59.2.0:", type="build", when="@0.13.5:") + depends_on("py-setuptools@59.2:", when="@0.13.3:", type="build") + depends_on("py-setuptools@0.6c5:", type="build") - # https://github.com/statsmodels/statsmodels/blob/01b19d7d111b29c183f620ff0a949ef6391ff8ee/pyproject.toml - depends_on("py-cython@0", type="build") - depends_on("py-cython@0.29.14:", type="build", when="@0.12.0:") - depends_on("py-cython@0.29.22:", type="build", when="@0.13.0:") - depends_on("py-cython@0.29.32:", type="build", when="@0.13.5:") + # pyproject.toml + depends_on("py-cython@0.29.26:2", when="@0.14:", type="build") + depends_on("py-cython@0.29.32:2", when="@0.13.5:0.13", type="build") + depends_on("py-cython@0.29.22:2", when="@0.13:", type="build") + depends_on("py-cython@0.29.14:2", when="@0.12:", type="build") + depends_on("py-cython@0.29:2", type="build") + depends_on("py-setuptools-scm+toml@7.0", when="@0.13.3:", type="build") # patsy@0.5.1 works around a Python change # https://github.com/statsmodels/statsmodels/issues/5343 and # https://github.com/pydata/patsy/pull/131 - depends_on("py-numpy@1.11.0:", type=("build", "link", "run"), when="@0.10.1:") - depends_on("py-numpy@1.15.0:", type=("build", "link", "run"), when="@0.12.1:") - depends_on("py-numpy@1.17.0:", type=("build", "link", "run"), when="@0.13.0:") - depends_on("py-pandas@0.19:", type=("build", "run"), when="@0.10.1:") - depends_on("py-pandas@0.23:", type=("build", "run"), when="@0.12.0:") - depends_on("py-pandas@0.25:", type=("build", "run"), when="@0.13.0:") - depends_on("py-patsy@0.4.0:", type=("build", "run"), when="@0.10.1:") - depends_on("py-patsy@0.5.1:", type=("build", "run"), when="@0.12.0:") - depends_on("py-patsy@0.5.2:", type=("build", "run"), when="@0.13.0:") - depends_on("py-scipy@0.18:", type=("build", "run"), when="@0.10.1:") - depends_on("py-scipy@1.2:", type=("build", "run"), when="@0.12.0:") - depends_on("py-scipy@1.3:", type=("build", "run"), when="@0.13.0:") - depends_on("py-packaging@21.3:", type=("build", "run"), when="@0.13.2:") + # requirements.txt + depends_on("py-numpy@1.18:", when="@0.14:", type=("build", "link", "run")) + depends_on("py-numpy@1.17:", when="@0.13:", type=("build", "link", "run")) + depends_on("py-numpy@1.15:", when="@0.12.1:", type=("build", "link", "run")) + depends_on("py-numpy@1.11:", when="@0.10.1:", type=("build", "link", "run")) + depends_on("py-scipy@1.4:", when="@0.13.5:", type=("build", "run")) + conflicts("^py-scipy@1.9.2") + depends_on("py-scipy@1.3:", when="@0.13:", type=("build", "run")) + depends_on("py-scipy@1.2:", when="@0.12:", type=("build", "run")) + depends_on("py-scipy@0.18:", when="@0.10.1:", type=("build", "run")) + depends_on("py-pandas@1:", when="@0.14:", type=("build", "run")) + depends_on("py-pandas@0.25:", when="@0.13:", type=("build", "run")) + depends_on("py-pandas@0.23:", when="@0.12:", type=("build", "run")) + depends_on("py-pandas@0.19:", when="@0.10.1:", type=("build", "run")) + depends_on("py-patsy@0.5.2:", when="@0.13:", type=("build", "run")) + depends_on("py-patsy@0.5.1:", when="@0.12:", type=("build", "run")) + depends_on("py-patsy@0.4:", when="@0.10.1:", type=("build", "run")) + depends_on("py-packaging@21.3:", when="@0.13.2:", type=("build", "run")) depends_on("py-pytest", type="test") From 311cb7f24fe236a5aeea4b3d7d5b1d929f6b5dc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Fri, 20 Oct 2023 14:00:46 +0200 Subject: [PATCH 264/408] [add] py-css-parser: new package (#40550) Co-authored-by: LydDeb --- .../builtin/packages/py-css-parser/package.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-css-parser/package.py diff --git a/var/spack/repos/builtin/packages/py-css-parser/package.py b/var/spack/repos/builtin/packages/py-css-parser/package.py new file mode 100644 index 00000000000000..7ed99c56ca6e20 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-css-parser/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCssParser(PythonPackage): + """A CSS Cascading Style Sheets library for Python.""" + + homepage = "https://github.com/ebook-utils/css-parser" + pypi = "css-parser/css-parser-1.0.9.tar.gz" + + maintainers("LydDeb") + + version("1.0.9", sha256="196db822cef22745af6a58d180cf8206949ced58b48f5f3ee98f1de1627495bb") + + depends_on("py-setuptools", type="build") From 002534fe0c1738df34a74c73ce22748de7016e71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Fri, 20 Oct 2023 14:01:38 +0200 Subject: [PATCH 265/408] [add] py-cssutils: new package (#40551) Co-authored-by: LydDeb --- .../builtin/packages/py-cssutils/package.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-cssutils/package.py diff --git a/var/spack/repos/builtin/packages/py-cssutils/package.py b/var/spack/repos/builtin/packages/py-cssutils/package.py new file mode 100644 index 00000000000000..3a6772edc5f134 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cssutils/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCssutils(PythonPackage): + """A CSS Cascading Style Sheets library for Python.""" + + homepage = "https://github.com/jaraco/cssutils" + pypi = "cssutils/cssutils-2.7.1.tar.gz" + + maintainers("LydDeb") + + version("2.7.1", sha256="340ecfd9835d21df8f98500f0dfcea0aee41cb4e19ecbc2cf94f0a6d36d7cb6c") + + depends_on("py-setuptools@56:", type="build") + depends_on("py-setuptools-scm@3.4.1:+toml", type="build") + depends_on("py-importlib-metadata", type=("build", "run"), when="^python@:3.7") From 4a9793f12cdf6dc4435829e032c53b1d8688b0cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Fri, 20 Oct 2023 14:03:48 +0200 Subject: [PATCH 266/408] [add] py-fraction: new package (#40554) Co-authored-by: LydDeb --- .../builtin/packages/py-fraction/package.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-fraction/package.py diff --git a/var/spack/repos/builtin/packages/py-fraction/package.py b/var/spack/repos/builtin/packages/py-fraction/package.py new file mode 100644 index 00000000000000..919d14cadf0c19 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-fraction/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyFraction(PythonPackage): + """ + Fraction carries out all the fraction operations including addition, subtraction, multiplicati + on, division, reciprocation. + """ + + homepage = "https://github.com/bradley101/fraction" + pypi = "Fraction/Fraction-2.2.0.tar.gz" + + maintainers("LydDeb") + + version("2.2.0", sha256="2c1179f20c8b749622935fe04db1c7f2987f011f2376bdad84c2a39c8e3d0fdb") + + depends_on("py-setuptools", type="build") From 5061ce1b307d3ab64a94c9948731e7000d231cd6 Mon Sep 17 00:00:00 2001 From: Claire Guilbaud <34576189+Bidibulke@users.noreply.github.com> Date: Fri, 20 Oct 2023 14:18:41 +0200 Subject: [PATCH 267/408] add recipes for sphinx-book-theme and its dependencies if unknown (#40312) * add recipes for sphinx-book-theme and its dependencies if unknown * fix version and mission https * fix based on reviewers remarks --- .../py-accessible-pygments/package.py | 19 +++++++++++++ .../py-pydata-sphinx-theme/package.py | 28 +++++++++++++++++++ .../packages/py-sphinx-book-theme/package.py | 22 +++++++++++++++ 3 files changed, 69 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-accessible-pygments/package.py create mode 100644 var/spack/repos/builtin/packages/py-pydata-sphinx-theme/package.py create mode 100644 var/spack/repos/builtin/packages/py-sphinx-book-theme/package.py diff --git a/var/spack/repos/builtin/packages/py-accessible-pygments/package.py b/var/spack/repos/builtin/packages/py-accessible-pygments/package.py new file mode 100644 index 00000000000000..e2254161c79dda --- /dev/null +++ b/var/spack/repos/builtin/packages/py-accessible-pygments/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyAccessiblePygments(PythonPackage): + """This package includes a collection of accessible themes for pygments based on + different sources.""" + + homepage = "https://github.com/Quansight-Labs/accessible-pygments" + pypi = "accessible-pygments/accessible-pygments-0.0.4.tar.gz" + + version("0.0.4", sha256="e7b57a9b15958e9601c7e9eb07a440c813283545a20973f2574a5f453d0e953e") + + depends_on("py-pygments@1.5:", type=("build", "run")) + depends_on("py-setuptools", type=("build")) diff --git a/var/spack/repos/builtin/packages/py-pydata-sphinx-theme/package.py b/var/spack/repos/builtin/packages/py-pydata-sphinx-theme/package.py new file mode 100644 index 00000000000000..d1dfd45dab65b7 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pydata-sphinx-theme/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPydataSphinxTheme(PythonPackage): + """A clean, three-column, Bootstrap-based Sphinx theme by and for the PyData community.""" + + homepage = "https://pydata-sphinx-theme.readthedocs.io/en/stable" + pypi = "pydata_sphinx_theme/pydata_sphinx_theme-0.14.1.tar.gz" + + version("0.14.1", sha256="d8d4ac81252c16a002e835d21f0fea6d04cf3608e95045c816e8cc823e79b053") + + depends_on("python@3.8:", type=("build", "run")) + + depends_on("py-sphinx-theme-builder", type="build") + + depends_on("py-sphinx@5:", type=("build", "run")) + depends_on("py-beautifulsoup4", type=("build", "run")) + depends_on("py-docutils@:0.16,0.17.1:", type=("build", "run")) + depends_on("py-packaging", type=("build", "run")) + depends_on("py-babel", type=("build", "run")) + depends_on("py-pygments@2.7:", type=("build", "run")) + depends_on("py-accessible-pygments", type=("build", "run")) + depends_on("py-typing-extensions", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-sphinx-book-theme/package.py b/var/spack/repos/builtin/packages/py-sphinx-book-theme/package.py new file mode 100644 index 00000000000000..d34efeb4aefc3f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-sphinx-book-theme/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PySphinxBookTheme(PythonPackage): + """Lightweight Sphinx theme designed to mimic the look-and-feel of an interactive book.""" + + homepage = "https://sphinx-book-theme.readthedocs.io/en/latest" + pypi = "sphinx_book_theme/sphinx_book_theme-1.0.1.tar.gz" + + version("1.0.1", sha256="927b399a6906be067e49c11ef1a87472f1b1964075c9eea30fb82c64b20aedee") + + depends_on("python@3.7:", type=("build", "run")) + + depends_on("py-sphinx-theme-builder@0.2.0a7:", type="build") + + depends_on("py-sphinx@4:6", type=("build", "run")) + depends_on("py-pydata-sphinx-theme@0.13.3:", type=("build", "run")) From b9e872e5fa68c1ca32131cda8d9f2fc8a6ba6d7c Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 20 Oct 2023 07:23:54 -0500 Subject: [PATCH 268/408] TensorFlow/Keras/TensorBoard: add v2.14.0 (#40297) Co-authored-by: adamjstewart --- .../builtin/packages/py-keras/package.py | 3 +- .../builtin/packages/py-ml-dtypes/package.py | 25 +++ .../packages/py-tensorboard/package.py | 22 ++- .../builtin/packages/py-tensorflow/package.py | 171 ++++++------------ 4 files changed, 96 insertions(+), 125 deletions(-) create mode 100644 var/spack/repos/builtin/packages/py-ml-dtypes/package.py diff --git a/var/spack/repos/builtin/packages/py-keras/package.py b/var/spack/repos/builtin/packages/py-keras/package.py index 5604adf859792e..c6f65dc957cb07 100644 --- a/var/spack/repos/builtin/packages/py-keras/package.py +++ b/var/spack/repos/builtin/packages/py-keras/package.py @@ -21,6 +21,7 @@ class PyKeras(PythonPackage): git = "https://github.com/keras-team/keras.git" url = "https://github.com/keras-team/keras/archive/refs/tags/v2.7.0.tar.gz" + version("2.14.0", sha256="a845d446b6ae626f61dde5ab2fa952530b6c17b4f9ed03e9362bd20172d00cca") version("2.13.1", sha256="b3591493cce75a69adef7b192cec6be222e76e2386d132cd4e34aa190b0ecbd5") version("2.12.0", sha256="6336cebb6b2b0a91f7efd3ff3a9db3a94f2abccf07a40323138afb80826aec62") version("2.11.0", sha256="e7a7c4199ac76ea750d145c1d84ae1b932e68b9bca34e83596bd66b2fc2ad79e") @@ -61,7 +62,7 @@ class PyKeras(PythonPackage): depends_on("py-pydot", type=("build", "run")) depends_on("py-scipy", type=("build", "run")) depends_on("py-six", type=("build", "run")) - for minor_ver in range(6, 14): + for minor_ver in range(6, 15): depends_on( "py-tensorflow@2.{}".format(minor_ver), type=("build", "run"), diff --git a/var/spack/repos/builtin/packages/py-ml-dtypes/package.py b/var/spack/repos/builtin/packages/py-ml-dtypes/package.py new file mode 100644 index 00000000000000..192069e21973d1 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ml-dtypes/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMlDtypes(PythonPackage): + """A stand-alone implementation of several NumPy dtype extensions + used in machine learning libraries.""" + + homepage = "https://github.com/jax-ml/ml_dtypes" + pypi = "ml_dtypes/ml_dtypes-0.3.1.tar.gz" + git = "https://github.com/jax-ml/ml_dtypes.git" + submodules = True + + version("0.3.1", tag="v0.3.1", commit="bbeedd470ecac727c42e97648c0f27bfc312af30") + version("0.2.0", tag="v0.2.0", commit="5b9fc9ad978757654843f4a8d899715dbea30e88") + + depends_on("python@3.9:", when="@0.3:", type=("build", "link", "run")) + depends_on("py-numpy@1.21:", type=("build", "link", "run")) + # Build dependencies are overconstrained, older versions work just fine + depends_on("py-pybind11", type=("build", "link")) + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-tensorboard/package.py b/var/spack/repos/builtin/packages/py-tensorboard/package.py index f1cd059f69794d..f1a9b03bc16ee9 100644 --- a/var/spack/repos/builtin/packages/py-tensorboard/package.py +++ b/var/spack/repos/builtin/packages/py-tensorboard/package.py @@ -7,9 +7,8 @@ class PyTensorboard(PythonPackage): - """TensorBoard is a suite of web applications for - inspecting and understanding your TensorFlow runs and - graphs.""" + """TensorBoard is a suite of web applications for inspecting and understanding + your TensorFlow runs and graphs.""" homepage = "https://github.com/tensorflow/tensorboard" url = "https://files.pythonhosted.org/packages/py3/t/tensorboard/tensorboard-2.9.1-py3-none-any.whl" @@ -17,6 +16,16 @@ class PyTensorboard(PythonPackage): maintainers("aweits") + version( + "2.14.1", + sha256="3db108fb58f023b6439880e177743c5f1e703e9eeb5fb7d597871f949f85fd58", + expand=False, + ) + version( + "2.14.0", + sha256="3667f9745d99280836ad673022362c840f60ed8fefd5a3e30bf071f5a8fd0017", + expand=False, + ) version( "2.13.0", sha256="ab69961ebddbddc83f5fa2ff9233572bdad5b883778c35e4fe94bf1798bd8481", @@ -118,6 +127,7 @@ class PyTensorboard(PythonPackage): expand=False, ) + depends_on("python@3.9:", type=("build", "run"), when="@2.14:") depends_on("python@3.8:", type=("build", "run"), when="@2.12:") depends_on("py-absl-py@0.4:", type=("build", "run")) depends_on("py-grpcio@1.48.2:", type=("build", "run"), when="@2.12:") @@ -135,10 +145,10 @@ class PyTensorboard(PythonPackage): depends_on("py-protobuf@3.6.0:3.19", type=("build", "run"), when="@:2.8") depends_on("py-requests@2.21.0:2", type=("build", "run")) depends_on("py-setuptools@41.0.0:", type=("build", "run")) + depends_on("py-six@1.10.0:", type=("build", "run"), when="@:2.4,2.14:") depends_on("py-tensorboard-data-server@0.7", type=("build", "run"), when="@2.12:") depends_on("py-tensorboard-data-server@0.6", type=("build", "run"), when="@2.5:2.11") - depends_on("py-tensorboard-plugin-wit@1.6.0:", type=("build", "run")) + depends_on("py-tensorboard-plugin-wit@1.6.0:", type=("build", "run"), when="@:2.13") depends_on("py-werkzeug@1.0.1:", type=("build", "run"), when="@2.9:") depends_on("py-werkzeug@0.11.15:", type=("build", "run")) - depends_on("py-wheel@0.26:", type="build") - depends_on("py-six@1.10.0:", type=("build", "run"), when="@:2.4") + depends_on("py-wheel@0.26:", type="build", when="@:2.13") diff --git a/var/spack/repos/builtin/packages/py-tensorflow/package.py b/var/spack/repos/builtin/packages/py-tensorflow/package.py index cd90b532c93336..c85f078689fe7d 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow/package.py @@ -10,18 +10,7 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): - """An Open Source Machine Learning Framework for Everyone. - - TensorFlow is an end-to-end open source platform for machine learning. It has a - comprehensive, flexible ecosystem of tools, libraries, and community resources that - lets researchers push the state-of-the-art in ML and developers easily build and - deploy ML-powered applications. - - TensorFlow was originally developed by researchers and engineers working on the - Google Brain team within Google's Machine Intelligence Research organization to - conduct machine learning and deep neural networks research. The system is general - enough to be applicable in a wide variety of other domains, as well. - """ + """TensorFlow is an open source machine learning framework for everyone.""" homepage = "https://www.tensorflow.org" url = "https://github.com/tensorflow/tensorflow/archive/v2.3.1.tar.gz" @@ -29,6 +18,8 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): maintainers("adamjstewart", "aweits") import_modules = ["tensorflow"] + version("2.14.0", sha256="ce357fd0728f0d1b0831d1653f475591662ec5bca736a94ff789e6b1944df19f") + version("2.13.1", sha256="89c07aebd4f41fbe0d08cc88aef00305542134f2f16d3b62918dc3c1182f33e2") version("2.13.0", sha256="e58c939079588623e6fa1d054aec2f90f95018266e0a970fd353a5244f5173dc") version("2.12.1", sha256="6bc4600cc0b88e9e40f1800096f5bddbbd3b6e5527a030dea631b87f2ae46b5b") version("2.12.0", sha256="c030cb1905bff1d2446615992aad8d8d85cbe90c4fb625cee458c63bf466bc8e") @@ -157,16 +148,16 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): extends("python") # Python support based on wheel availability - depends_on("python@3.8:3.11", when="@2.12:", type=("build", "run")) - depends_on("python@3.7:3.10", when="@2.8:2.11", type=("build", "run")) - depends_on("python@3.7:3.9", when="@2.7", type=("build", "run")) - depends_on("python@3.6:3.9", when="@2.5:2.6", type=("build", "run")) - depends_on("python@3.6:3.8", when="@2.4", type=("build", "run")) - depends_on("python@3.5:3.8", when="@2.2:2.3", type=("build", "run")) - depends_on("python@2.7,3.5:3.7", when="@:2.1", type=("build", "run")) + depends_on("python@3.9:3.11", when="@2.14:", type=("build", "run")) + depends_on("python@3.8:3.11", when="@2.12:2.13", type=("build", "run")) + depends_on("python@:3.10", when="@2.8:2.11", type=("build", "run")) + depends_on("python@:3.9", when="@2.5:2.7", type=("build", "run")) + depends_on("python@:3.8", when="@2.2:2.4", type=("build", "run")) + depends_on("python@:3.7", when="@:2.1", type=("build", "run")) # See .bazelversion - depends_on("bazel@5.3.0", type="build", when="@2.11:") + depends_on("bazel@6.1.0", type="build", when="@2.14:") + depends_on("bazel@5.3.0", type="build", when="@2.11:2.13") depends_on("bazel@5.1.1", type="build", when="@2.10") # See _TF_MIN_BAZEL_VERSION and _TF_MAX_BAZEL_VERSION in configure.py depends_on("bazel@4.2.2:5.99.0", type="build", when="@2.9") @@ -203,13 +194,15 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): depends_on("py-astunparse@1.6:", type=("build", "run"), when="@2.7:") depends_on("py-astunparse@1.6.3:1.6", type=("build", "run"), when="@2.4:2.6") depends_on("py-astunparse@1.6.3", type=("build", "run"), when="@2.2:2.3") - depends_on("py-flatbuffers@23.1.21:", type=("build", "run"), when="@2.13:") + depends_on("py-flatbuffers@23.5.26:", type=("build", "run"), when="@2.14:") + depends_on("py-flatbuffers@23.1.21:", type=("build", "run"), when="@2.13") depends_on("py-flatbuffers@2:", type=("build", "run"), when="@2.10:2.12") depends_on("py-flatbuffers@1.12:1", type=("build", "run"), when="@2.9") depends_on("py-flatbuffers@1.12:", type=("build", "run"), when="@2.8") depends_on("py-flatbuffers@1.12:2", type=("build", "run"), when="@2.7") depends_on("py-flatbuffers@1.12", type=("build", "run"), when="@2.4:2.6") - depends_on("py-gast@0.2.1:0.4.0", type=("build", "run"), when="@2.9:") + depends_on("py-gast@0.2.1:0.4,0.5.3:", type=("build", "run"), when="@2.14:") + depends_on("py-gast@0.2.1:0.4.0", type=("build", "run"), when="@2.9:2.13") depends_on("py-gast@0.2.1:", type=("build", "run"), when="@2.8") depends_on("py-gast@0.2.1:0.4", type=("build", "run"), when="@2.7") depends_on("py-gast@0.4.0", type=("build", "run"), when="@2.5:2.6") @@ -232,6 +225,8 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): depends_on("hdf5~mpi", type="build", when="@1.15.5,2.0.4,2.1.3:~mpi") depends_on("py-libclang@13:", type=("build", "run"), when="@2.9:") depends_on("py-libclang@9.0.1:", type=("build", "run"), when="@2.7:2.8") + depends_on("py-ml-dtypes@0.2.0", type=("build", "run"), when="@2.14:") + depends_on("py-numpy@1.23.5:", type=("build", "run"), when="@2.14:") depends_on("py-numpy@1.22:1.24.3", type=("build", "run"), when="@2.13:") depends_on("py-numpy@1.22:1.23", type=("build", "run"), when="@2.12") depends_on("py-numpy@1.20:", type=("build", "run"), when="@2.8:2.11") @@ -269,32 +264,17 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): depends_on("py-six@1.10:", type=("build", "run"), when="@:2.0") depends_on("py-termcolor@1.1:", type=("build", "run"), when="@1.6:2.3,2.7:") depends_on("py-termcolor@1.1", type=("build", "run"), when="@2.4:2.6") - depends_on("py-typing-extensions@3.6.6:4.5", type=("build", "run"), when="@2.13:") - depends_on("py-typing-extensions@3.6.6:", type=("build", "run"), when="@2.7:2.12") + depends_on("py-typing-extensions@3.6.6:", type=("build", "run"), when="@2.7:2.12,2.14:") + depends_on("py-typing-extensions@3.6.6:4.5", type=("build", "run"), when="@2.13") depends_on("py-typing-extensions@3.7.4:3.7", type=("build", "run"), when="@2.4:2.6") - depends_on("py-wrapt@1.11:", type=("build", "run"), when="@2.13:") - depends_on("py-wrapt@1.11:1.14", type=("build", "run"), when="@2.12") - depends_on("py-wrapt@1.11:", type=("build", "run"), when="@2.7:2.11") + depends_on("py-wrapt@1.11:1.14", type=("build", "run"), when="@2.12,2.14:") + depends_on("py-wrapt@1.11:", type=("build", "run"), when="@2.7:2.11,2.13") depends_on("py-wrapt@1.12.1:1.12", type=("build", "run"), when="@2.4:2.6") depends_on("py-wrapt@1.11.1:", type=("build", "run"), when="@1.12.1,1.14:2.3") + # TODO: add packages for these dependencies # depends_on('py-tensorflow-io-gcs-filesystem@0.23.1:', type=('build', 'run'), when='@2.8:') # depends_on('py-tensorflow-io-gcs-filesystem@0.21:', type=('build', 'run'), when='@2.7') - with when("+rocm"): - depends_on("hip") - depends_on("rocrand") - depends_on("rocblas") - depends_on("rocfft") - depends_on("hipfft") - depends_on("rccl", when="+nccl") - depends_on("hipsparse") - depends_on("hipcub") - depends_on("rocsolver") - depends_on("rocprim") - depends_on("miopen-hip") - depends_on("llvm-amdgpu") - depends_on("hsa-rocr-dev") - depends_on("rocminfo") if sys.byteorder == "little": # Only builds correctly on little-endian machines @@ -304,7 +284,7 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): depends_on("py-grpcio@1.32", type=("build", "run"), when="@2.4") depends_on("py-grpcio@1.8.6:", type=("build", "run"), when="@1.6:2.3") - for minor_ver in range(5, 14): + for minor_ver in range(5, 15): depends_on( "py-tensorboard@2.{}".format(minor_ver), type=("build", "run"), @@ -356,6 +336,22 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): # depends_on('android-ndk@10:18', when='+android') # depends_on('android-sdk', when='+android') + with when("+rocm"): + depends_on("hip") + depends_on("rocrand") + depends_on("rocblas") + depends_on("rocfft") + depends_on("hipfft") + depends_on("rccl", when="+nccl") + depends_on("hipsparse") + depends_on("hipcub") + depends_on("rocsolver") + depends_on("rocprim") + depends_on("miopen-hip") + depends_on("llvm-amdgpu") + depends_on("hsa-rocr-dev") + depends_on("rocminfo") + # Check configure and configure.py to see when these variants are supported conflicts("+mkl", when="@:1.0") conflicts("+mkl", when="platform=darwin", msg="Darwin is not yet supported") @@ -375,7 +371,13 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): conflicts("+gdr", when="@:1.3") conflicts("+verbs", when="@:1.1") conflicts("+ngraph", when="@:1.10") + conflicts("+opencl", when="platform=windows") conflicts("+computecpp", when="~opencl") + conflicts( + "+cuda", + when="+rocm", + msg="CUDA / ROCm are mututally exclusive. At most 1 GPU platform can be configured", + ) conflicts("+cuda", when="platform=darwin", msg="There is no GPU support for macOS") conflicts( "cuda_arch=none", @@ -422,6 +424,7 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): conflicts( "+nccl", when="platform=cray", msg="Currently NCCL is only supported on Linux platform" ) + conflicts("+mpi", when="platform=windows") conflicts("+mpi", when="@:1.2") conflicts("+android", when="@:1.4") conflicts("+ios", when="@:1.12.0,1.12.2:1.13") @@ -515,6 +518,7 @@ def setup_build_environment(self, env): # Please input the desired Python library path to use env.set("PYTHON_LIB_PATH", python_platlib) + env.set("TF_PYTHON_VERSION", spec["python"].version.up_to(2)) # Ensure swig is in PATH or set SWIG_PATH env.set("SWIG_PATH", spec["swig"].prefix.bin.swig) @@ -696,6 +700,12 @@ def setup_build_environment(self, env): else: env.set("TF_NEED_CUDA", "0") + # Do you want to use Clang to build TensorFlow? + if "%clang" in spec: + env.set("TF_NEED_CLANG", "1") + else: + env.set("TF_NEED_CLANG", "0") + # Do you wish to download a fresh release of clang? (Experimental) env.set("TF_DOWNLOAD_CLANG", "0") @@ -789,14 +799,6 @@ def post_configure_fixes(self): "tensorflow/workspace.bzl", ) - # starting with tensorflow 1.3, tensorboard becomes a dependency - # -> remove from list of required packages - filter_file( - r"'tensorflow-tensorboard", - r"#'tensorflow-tensorboard", - "tensorflow/tools/pip_package/setup.py", - ) - if spec.satisfies("@1.5.0: ~gcp"): # google cloud support seems to be installed on default, leading # to boringssl error manually set the flag to false to avoid @@ -808,15 +810,6 @@ def post_configure_fixes(self): ".tf_configure.bazelrc", ) - if spec.satisfies("@1.6.0:2.1"): - # tensorboard name changed - # there are no corresponding versions of these in spack - filter_file( - r"(^\s*)'tensorboard (>=|~=)", - r"\1#'tensorboard \2", - "tensorflow/tools/pip_package/setup.py", - ) - if spec.satisfies("@1.8.0: ~opencl"): # 1.8.0 and 1.9.0 aborts with numpy import error during python_api # generation somehow the wrong PYTHONPATH is used... @@ -826,64 +819,6 @@ def post_configure_fixes(self): f.write("build --distinct_host_configuration=false\n") f.write('build --action_env PYTHONPATH="{0}"\n'.format(env["PYTHONPATH"])) - if spec.satisfies("@1.13.1:"): - # tensorflow_estimator is an API for tensorflow - # tensorflow-estimator imports tensorflow during build, so - # tensorflow has to be set up first - filter_file( - r"(^\s*)'tensorflow_estimator (>=|~=)", - r"\1#'tensorflow_estimator \2", - "tensorflow/tools/pip_package/setup.py", - ) - - if spec.satisfies("@2.5"): - filter_file( - r"(^\s*)'keras-nightly (>=|~=)", - r"\1#'keras-nightly \2", - "tensorflow/tools/pip_package/setup.py", - ) - - if spec.satisfies("@2.6:"): - filter_file( - r"(^\s*)'keras (>=|~=)", r"\1#'keras \2", "tensorflow/tools/pip_package/setup.py" - ) - - if spec.satisfies("@2.6"): - filter_file( - r"(^\s*)'clang (>=|~=)", r"\1#'clang \2", "tensorflow/tools/pip_package/setup.py" - ) - - # TODO: add support for tensorflow-io-gcs-filesystem - if spec.satisfies("@2.7:"): - filter_file( - r"(^\s*)'tensorflow-io-gcs-filesystem (>=|~=)", - r"\1#'tensorflow-io-gcs-filesystem \2", - "tensorflow/tools/pip_package/setup.py", - ) - - if spec.satisfies("@2.0.0:"): - # now it depends on the nightly versions... - filter_file( - r"REQUIRED_PACKAGES\[i\] = 'tb-nightly (>=|~=)", - r"pass #REQUIRED_PACKAGES[i] = 'tb-nightly \1", - "tensorflow/tools/pip_package/setup.py", - ) - filter_file( - r"REQUIRED_PACKAGES\[i\] = 'tensorflow-estimator-2.0-preview", - r"pass #REQUIRED_PACKAGES[i] = 'tensorflow-estimator-2.0-preview", - "tensorflow/tools/pip_package/setup.py", - ) - filter_file( - r"REQUIRED_PACKAGES\[i\] = 'tf-estimator-nightly (>=|~=)", - r"pass #REQUIRED_PACKAGES[i] = 'tf-estimator-nightly \1", - "tensorflow/tools/pip_package/setup.py", - ) - filter_file( - r"REQUIRED_PACKAGES\[i\] = 'keras-nightly (>=|~=)", - r"pass #REQUIRED_PACKAGES[i] = 'keras-nightly \1", - "tensorflow/tools/pip_package/setup.py", - ) - if spec.satisfies("@1.13.1 +nccl"): filter_file( r"^build --action_env NCCL_INSTALL_PATH=.*", From 54054acd44340555d5b975df05aa9edf6a7bf270 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 20 Oct 2023 14:37:07 +0200 Subject: [PATCH 269/408] ASP-based solver: minimize weights over edges (#40632) With the introduction of multiple build dependencies from the same package in the DAG, we need to minimize a few weights accounting for edges rather than nodes. If we don't do that we might have multiple "optimal" solutions that differ only in how the same nodes are connected together. This commit ensures optimal versions are picked per parent in case of multiple choices for a dependency. --- lib/spack/spack/solver/concretize.lp | 11 ++++++ lib/spack/spack/test/concretize.py | 37 +++++++++++++++++++ .../packages/py-floating/package.py | 26 +++++++++++++ 3 files changed, 74 insertions(+) create mode 100644 var/spack/repos/duplicates.test/packages/py-floating/package.py diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 622547800f44e5..7630ec1c78abe7 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -1539,6 +1539,17 @@ opt_criterion(5, "non-preferred targets"). build_priority(PackageNode, Priority) }. +% Choose more recent versions for nodes +opt_criterion(1, "edge wiring"). +#minimize{ 0@201: #true }. +#minimize{ 0@1: #true }. +#minimize{ + Weight@1,ParentNode,PackageNode + : version_weight(PackageNode, Weight), + not attr("root", PackageNode), + depends_on(ParentNode, PackageNode) +}. + %----------- % Notes %----------- diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index d54ee6ff755aa4..2f3380827b3139 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -2282,6 +2282,43 @@ def test_pure_build_virtual_dependency(self, strategy): s = Spec("virtual-build").concretized() assert s["pkgconfig"].name == "pkg-config" + @pytest.mark.regression("40595") + def test_no_multiple_solutions_with_different_edges_same_nodes(self): + r"""Tests that the root node, which has a dependency on py-setuptools without constraint, + doesn't randomly pick one of the two setuptools (@=59, @=60) needed by its dependency. + + o py-floating@1.25.0/3baitsp + |\ + | |\ + | | |\ + | o | | py-shapely@1.25.0/4hep6my + |/| | | + | |\| | + | | |/ + | |/| + | | o py-setuptools@60/cwhbthc + | |/ + |/| + | o py-numpy@1.25.0/5q5fx4d + |/| + | |\ + | o | py-setuptools@59/jvsa7sd + |/ / + o | python@3.11.2/pdmjekv + o | gmake@3.0/jv7k2bl + / + o gmake@4.1/uo6ot3d + """ + spec_str = "py-floating" + + root = spack.spec.Spec(spec_str).concretized() + assert root["py-shapely"].satisfies("^py-setuptools@=60") + assert root["py-numpy"].satisfies("^py-setuptools@=59") + + edges = root.edges_to_dependencies("py-setuptools") + assert len(edges) == 1 + assert edges[0].spec.satisfies("@=60") + @pytest.mark.parametrize( "v_str,v_opts,checksummed", diff --git a/var/spack/repos/duplicates.test/packages/py-floating/package.py b/var/spack/repos/duplicates.test/packages/py-floating/package.py new file mode 100644 index 00000000000000..2921b617bd76ad --- /dev/null +++ b/var/spack/repos/duplicates.test/packages/py-floating/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PyFloating(Package): + """An extension that depends on: + - py-setuptools without further constraints + - py-shapely, which depends on py-setuptools@=60 + - py-numpy, which depends on py-setuptools@=59 + + We need to ensure that by default the root node gets the best version + of setuptools it could. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/tdep-1.0.tar.gz" + + version("1.25.0", md5="0123456789abcdef0123456789abcdef") + + extends("python") + depends_on("py-numpy", type=("build", "run")) + depends_on("py-shapely", type=("build", "run")) + depends_on("py-setuptools", type="build") From 3b6cb607a1f3b5e696fb19b7fb89158b35661497 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Fri, 20 Oct 2023 14:37:15 +0200 Subject: [PATCH 270/408] py-corner: new package (#40546) * [add] py-corner: new package * py-corner: remove py-wheel dependence with respect to reviewing commentary --------- Co-authored-by: LydDeb --- .../builtin/packages/py-corner/package.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-corner/package.py diff --git a/var/spack/repos/builtin/packages/py-corner/package.py b/var/spack/repos/builtin/packages/py-corner/package.py new file mode 100644 index 00000000000000..81ae512123dd39 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-corner/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCorner(PythonPackage): + """Make some beautiful corner plots.""" + + homepage = "https://corner.readthedocs.io" + pypi = "corner/corner-2.2.2.tar.gz" + + maintainers("LydDeb") + + version("2.2.2", sha256="4bc79f3b6778c270103f0926e64ef2606c48c3b6f92daf5382fc4babf5d608d1") + + depends_on("python@3.9:", type=("build", "run")) + depends_on("py-setuptools@62.0:", type="build") + depends_on("py-setuptools-scm", type="build") + depends_on("py-matplotlib@2.1:", type=("build", "run")) From b5d41eb70f286416cd5615e59e599dcfab77f685 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 20 Oct 2023 16:20:20 +0200 Subject: [PATCH 271/408] gromacs +cp2k: build in CI (#40494) * gromacs +cp2k: build in CI * libxsmm: x86 only * attempt to fix dbcsr + new mpich * use c11 standard * gromacs: does not depend on dbcsr * cp2k: build with cmake in CI, s.t. dbcsr is a separate package * cp2k: cmake patches for config files and C/C++ std * cp2k: remove unnecessary constraints due to patch --- .../stacks/e4s-neoverse_v1/spack.yaml | 1 + .../stacks/e4s-power/spack.yaml | 1 + .../cloud_pipelines/stacks/e4s/spack.yaml | 1 + .../packages/cp2k/cmake-fixes-2023.2.patch | 600 ++++++++++++++++++ .../repos/builtin/packages/cp2k/package.py | 15 +- .../repos/builtin/packages/dbcsr/package.py | 8 + .../repos/builtin/packages/gromacs/package.py | 1 - .../repos/builtin/packages/libxsmm/package.py | 3 + 8 files changed, 623 insertions(+), 7 deletions(-) create mode 100644 var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml index db903c15c47a6f..d42e5f1fcade20 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml @@ -101,6 +101,7 @@ spack: - gmp - gotcha - gptune ~mpispawn + - gromacs +cp2k ^cp2k build_system=cmake - h5bench - hdf5-vol-async - hdf5-vol-cache diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index 5f8f3d0e628dae..10bf4bc57d99f7 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -101,6 +101,7 @@ spack: - gmp - gotcha - gptune + - gromacs +cp2k ^cp2k build_system=cmake - h5bench - hdf5-vol-async - hdf5-vol-cache diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 410a379ee06321..86eab1d4074d3a 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -107,6 +107,7 @@ spack: - gmp - gotcha - gptune ~mpispawn + - gromacs +cp2k ^cp2k build_system=cmake - h5bench - hdf5-vol-async - hdf5-vol-cache diff --git a/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch b/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch new file mode 100644 index 00000000000000..2961a4ceee8d45 --- /dev/null +++ b/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch @@ -0,0 +1,600 @@ +From 1897cbf3e467dc765f733b09af041fe8f25fa906 Mon Sep 17 00:00:00 2001 +From: Mathieu Taillefumier +Date: Thu, 19 Oct 2023 12:21:50 +0200 +Subject: [PATCH] [cmake] fix for building gromacs and cp2k with cmake and spack + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 3f81c7b524..1b6c6a0636 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -49,7 +49,8 @@ if(NOT DEFINED CMAKE_CUDA_STANDARD) + endif() + + # set language and standard +-set(CMAKE_CXX_STANDARD 11) ++set(CMAKE_CXX_STANDARD 14) ++set(CMAKE_C_STANDARD 11) + + find_package(PkgConfig) + +@@ -115,8 +116,8 @@ cmake_dependent_option(CP2K_ENABLE_FFTW3_OPENMP_SUPPORT + "Enable FFTW openmp support" ON "CP2K_USE_FFTW3" OFF) + cmake_dependent_option(CP2K_ENABLE_FFTW3_THREADS_SUPPORT + "Enable FFTW THREADS support" OFF "CP2K_USE_FFTW3" OFF) +-cmake_dependent_option(CP2K_ENABLE_F08_MPI "Enable MPI Fortran 2008 interface" +- OFF "CP2K_USE_MPI" OFF) ++cmake_dependent_option(CP2K_USE_MPI_F08 "Enable MPI Fortran 2008 interface" OFF ++ "CP2K_USE_MPI" OFF) + + cmake_dependent_option( + DBCSR_USE_ACCEL +@@ -748,7 +749,7 @@ add_subdirectory(src) + include(GNUInstallDirs) + + get_target_property(CP2K_LIBS cp2k_link_libs INTERFACE_LINK_LIBRARIES) +-configure_file(cmake/cp2k.pc.in cp2k.pc @ONLY) ++configure_file(cmake/libcp2k.pc.in libcp2k.pc @ONLY) + + message( + STATUS "--------------------------------------------------------------------") +@@ -1039,6 +1040,10 @@ install(FILES "${PROJECT_BINARY_DIR}/cp2kConfig.cmake" + "${PROJECT_BINARY_DIR}/cp2kConfigVersion.cmake" + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k") + ++install(FILES "${PROJECT_BINARY_DIR}/libcp2k.pc" ++ DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig") ++ ++ + install( + DIRECTORY "${PROJECT_SOURCE_DIR}/cmake" + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k" +diff --git a/cmake/FindBlas.cmake b/cmake/FindBlas.cmake +index 6e5fb78240..335cbd964a 100644 +--- a/cmake/FindBlas.cmake ++++ b/cmake/FindBlas.cmake +@@ -15,104 +15,108 @@ if(NOT + OR CMAKE_Fortran_COMPILER_LOADED)) + message(FATAL_ERROR "FindBLAS requires Fortran, C, or C++ to be enabled.") + endif() ++if(NOT CP2K_CONFIG_PACKAGE) ++ set(CP2K_BLAS_VENDOR_LIST ++ "auto" ++ "MKL" ++ "OpenBLAS" ++ "SCI" ++ "GenericBLAS" ++ "Armpl" ++ "FlexiBLAS" ++ "Atlas" ++ "NVHPCBlas" ++ "CUSTOM") ++ ++ set(__BLAS_VENDOR_LIST ${CP2K_BLAS_VENDOR_LIST}) ++ list(REMOVE_ITEM __BLAS_VENDOR_LIST "auto") ++ list(REMOVE_ITEM __BLAS_VENDOR_LIST "CUSTOM") ++ ++ # set(CP2K_BLAS_VENDOR "auto" CACHE STRING "Blas library for computations on ++ # host") ++ set_property(CACHE CP2K_BLAS_VENDOR PROPERTY STRINGS ${CP2K_BLAS_VENDOR_LIST}) ++ ++ if(NOT ${CP2K_BLAS_VENDOR} IN_LIST CP2K_BLAS_VENDOR_LIST) ++ message(FATAL_ERROR "Invalid Host BLAS backend") ++ endif() + +-set(CP2K_BLAS_VENDOR_LIST +- "auto" +- "MKL" +- "OpenBLAS" +- "SCI" +- "GenericBLAS" +- "Armpl" +- "FlexiBLAS" +- "Atlas" +- "NVHPCBlas" +- "CUSTOM") +- +-set(__BLAS_VENDOR_LIST ${CP2K_BLAS_VENDOR_LIST}) +-list(REMOVE_ITEM __BLAS_VENDOR_LIST "auto") +-list(REMOVE_ITEM __BLAS_VENDOR_LIST "CUSTOM") +- +-# set(CP2K_BLAS_VENDOR "auto" CACHE STRING "Blas library for computations on +-# host") +-set_property(CACHE CP2K_BLAS_VENDOR PROPERTY STRINGS ${CP2K_BLAS_VENDOR_LIST}) +- +-if(NOT ${CP2K_BLAS_VENDOR} IN_LIST CP2K_BLAS_VENDOR_LIST) +- message(FATAL_ERROR "Invalid Host BLAS backend") +-endif() +- +-set(CP2K_BLAS_THREAD_LIST "sequential" "thread" "gnu-thread" "intel-thread" +- "tbb-thread" "openmp") +- +-set(CP2K_BLAS_THREADING +- "sequential" +- CACHE STRING "threaded blas library") +-set_property(CACHE CP2K_BLAS_THREADING PROPERTY STRINGS +- ${CP2K_BLAS_THREAD_LIST}) +- +-if(NOT ${CP2K_BLAS_THREADING} IN_LIST CP2K_BLAS_THREAD_LIST) +- message(FATAL_ERROR "Invalid threaded BLAS backend") +-endif() ++ set(CP2K_BLAS_THREAD_LIST "sequential" "thread" "gnu-thread" "intel-thread" ++ "tbb-thread" "openmp") + +-set(CP2K_BLAS_INTERFACE_BITS_LIST "32bits" "64bits") +-set(CP2K_BLAS_INTERFACE +- "32bits" +- CACHE STRING +- "32 bits integers are used for indices, matrices and vectors sizes") +-set_property(CACHE CP2K_BLAS_INTERFACE +- PROPERTY STRINGS ${CP2K_BLAS_INTERFACE_BITS_LIST}) +- +-if(NOT ${CP2K_BLAS_INTERFACE} IN_LIST CP2K_BLAS_INTERFACE_BITS_LIST) +- message( +- FATAL_ERROR +- "Invalid parameters. Blas and lapack can exist in two flavors 32 or 64 bits interfaces (relevant mostly for mkl)" +- ) +-endif() ++ set(CP2K_BLAS_THREADING ++ "sequential" ++ CACHE STRING "threaded blas library") ++ set_property(CACHE CP2K_BLAS_THREADING PROPERTY STRINGS ++ ${CP2K_BLAS_THREAD_LIST}) + +-set(CP2K_BLAS_FOUND FALSE) ++ if(NOT ${CP2K_BLAS_THREADING} IN_LIST CP2K_BLAS_THREAD_LIST) ++ message(FATAL_ERROR "Invalid threaded BLAS backend") ++ endif() + +-# first check for a specific implementation if requested ++ set(CP2K_BLAS_INTERFACE_BITS_LIST "32bits" "64bits") ++ set(CP2K_BLAS_INTERFACE ++ "32bits" ++ CACHE STRING ++ "32 bits integers are used for indices, matrices and vectors sizes") ++ set_property(CACHE CP2K_BLAS_INTERFACE ++ PROPERTY STRINGS ${CP2K_BLAS_INTERFACE_BITS_LIST}) + +-if(NOT CP2K_BLAS_VENDOR MATCHES "auto|CUSTOM") +- find_package(${CP2K_BLAS_VENDOR} REQUIRED) +- if(TARGET CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas) +- get_target_property( +- CP2K_BLAS_INCLUDE_DIRS CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas +- INTERFACE_INCLUDE_DIRECTORIES) +- get_target_property( +- CP2K_BLAS_LINK_LIBRARIES CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas +- INTERFACE_LINK_LIBRARIES) +- set(CP2K_BLAS_FOUND TRUE) +- endif() +-else() +- if(CP2K_BLAS_VENDOR MATCHES "CUSTOM" AND NOT DEFINED CP2K_BLAS_LINK_LIBRARIES) ++ if(NOT ${CP2K_BLAS_INTERFACE} IN_LIST CP2K_BLAS_INTERFACE_BITS_LIST) + message( + FATAL_ERROR +- "Setting CP2K_BLAS_VENDOR=CUSTOM imply setting CP2K_BLAS_LINK_LIBRARIES\n and CP2K_LAPACK_LINK_LIBRARIES to the right libraries. See the README_cmake.md for more details" ++ "Invalid parameters. Blas and lapack can exist in two flavors 32 or 64 bits interfaces (relevant mostly for mkl)" + ) + endif() + +- if(DEFINED CP2K_BLAS_LINK_LIBRARIES) +- set(CP2K_BLAS_FOUND TRUE) ++ set(CP2K_BLAS_FOUND FALSE) ++ ++ # first check for a specific implementation if requested ++ ++ if(NOT CP2K_BLAS_VENDOR MATCHES "auto|CUSTOM") ++ find_package(${CP2K_BLAS_VENDOR} REQUIRED) ++ if(TARGET CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas) ++ get_target_property( ++ CP2K_BLAS_INCLUDE_DIRS CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas ++ INTERFACE_INCLUDE_DIRECTORIES) ++ get_target_property( ++ CP2K_BLAS_LINK_LIBRARIES CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas ++ INTERFACE_LINK_LIBRARIES) ++ set(CP2K_BLAS_FOUND TRUE) ++ endif() + else() +- # search for any blas implementation and exit immediately if one is found. +- # we could also give a full list of found implementation and let the user +- # choose which implementation to use +- foreach(_libs ${__BLAS_VENDOR_LIST}) +- # I exclude the first item of the list +- find_package(${_libs}) +- if(TARGET CP2K::BLAS::${_libs}::blas) +- get_target_property(CP2K_BLAS_INCLUDE_DIRS CP2K::BLAS::${_libs}::blas +- INTERFACE_INCLUDE_DIRECTORIES) +- get_target_property(CP2K_BLAS_LINK_LIBRARIES CP2K::BLAS::${_libs}::blas +- INTERFACE_LINK_LIBRARIES) +- set(CP2K_BLAS_VENDOR "${_libs}") +- set(CP2K_BLAS_FOUND TRUE) +- break() +- endif() +- endforeach() ++ if(CP2K_BLAS_VENDOR MATCHES "CUSTOM" AND NOT DEFINED ++ CP2K_BLAS_LINK_LIBRARIES) ++ message( ++ FATAL_ERROR ++ "Setting CP2K_BLAS_VENDOR=CUSTOM imply setting CP2K_BLAS_LINK_LIBRARIES\n and CP2K_LAPACK_LINK_LIBRARIES to the right libraries. See the README_cmake.md for more details" ++ ) ++ endif() ++ ++ if(DEFINED CP2K_BLAS_LINK_LIBRARIES) ++ set(CP2K_BLAS_FOUND TRUE) ++ else() ++ # search for any blas implementation and exit immediately if one is found. ++ # we could also give a full list of found implementation and let the user ++ # choose which implementation to use ++ foreach(_libs ${__BLAS_VENDOR_LIST}) ++ # I exclude the first item of the list ++ find_package(${_libs}) ++ if(TARGET CP2K::BLAS::${_libs}::blas) ++ get_target_property(CP2K_BLAS_INCLUDE_DIRS CP2K::BLAS::${_libs}::blas ++ INTERFACE_INCLUDE_DIRECTORIES) ++ get_target_property( ++ CP2K_BLAS_LINK_LIBRARIES CP2K::BLAS::${_libs}::blas ++ INTERFACE_LINK_LIBRARIES) ++ set(CP2K_BLAS_VENDOR "${_libs}") ++ set(CP2K_BLAS_FOUND TRUE) ++ break() ++ endif() ++ endforeach() ++ endif() + endif() ++else() ++ set(CP2K_BLAS_FOUND ON) + endif() +- + # we exclude the CP2K_BLAS_INCLUDE_DIRS from the list of mandatory variables as + # having the fortran interface is usually enough. C, C++ and others languages + # might require this information though +diff --git a/cmake/FindLapack.cmake b/cmake/FindLapack.cmake +index 966e0d78d3..77a1e04258 100644 +--- a/cmake/FindLapack.cmake ++++ b/cmake/FindLapack.cmake +@@ -20,33 +20,34 @@ include(FindPackageHandleStandardArgs) + find_package(PkgConfig) + find_package(Blas REQUIRED) + +-if(CP2K_BLAS_FOUND) +- # LAPACK in the Intel MKL 10+ library? +- if(CP2K_BLAS_VENDOR MATCHES "MKL|OpenBLAS|Armpl|SCI|FlexiBLAS|NVHPC") +- # we just need to create the interface that's all +- set(CP2K_LAPACK_FOUND TRUE) +- get_target_property(CP2K_LAPACK_INCLUDE_DIRS CP2K::BLAS::blas +- INTERFACE_INCLUDE_DIRECTORIES) +- get_target_property(CP2K_LAPACK_LINK_LIBRARIES CP2K::BLAS::blas +- INTERFACE_LINK_LIBRARIES) +- else() +- # we might get lucky to find a pkgconfig package for lapack (fedora provides +- # one for instance) +- if(PKG_CONFIG_FOUND) +- pkg_check_modules(CP2K_LAPACK lapack) +- endif() ++if(NOT CP2K_CONFIG_PACKAGE) ++ if(CP2K_BLAS_FOUND) ++ # LAPACK in the Intel MKL 10+ library? ++ if(CP2K_BLAS_VENDOR MATCHES "MKL|OpenBLAS|Armpl|SCI|FlexiBLAS|NVHPC") ++ # we just need to create the interface that's all ++ set(CP2K_LAPACK_FOUND TRUE) ++ get_target_property(CP2K_LAPACK_INCLUDE_DIRS CP2K::BLAS::blas ++ INTERFACE_INCLUDE_DIRECTORIES) ++ get_target_property(CP2K_LAPACK_LINK_LIBRARIES CP2K::BLAS::blas ++ INTERFACE_LINK_LIBRARIES) ++ else() ++ # we might get lucky to find a pkgconfig package for lapack (fedora ++ # provides one for instance) ++ if(PKG_CONFIG_FOUND) ++ pkg_check_modules(CP2K_LAPACK lapack) ++ endif() + +- if(NOT CP2K_LAPACK_FOUND) +- find_library( +- CP2K_LAPACK_LINK_LIBRARIES +- NAMES "lapack" "lapack64" +- PATH_SUFFIXES "openblas" "openblas64" "openblas-pthread" +- "openblas-openmp" "lib" "lib64" +- NO_DEFAULT_PATH) ++ if(NOT CP2K_LAPACK_FOUND) ++ find_library( ++ CP2K_LAPACK_LINK_LIBRARIES ++ NAMES "lapack" "lapack64" ++ PATH_SUFFIXES "openblas" "openblas64" "openblas-pthread" ++ "openblas-openmp" "lib" "lib64" ++ NO_DEFAULT_PATH) ++ endif() + endif() + endif() + endif() +- + # check if found + find_package_handle_standard_args(Lapack + REQUIRED_VARS CP2K_LAPACK_LINK_LIBRARIES) +diff --git a/cmake/cp2k.pc.in b/cmake/cp2k.pc.in +deleted file mode 100644 +index 5b4a095660..0000000000 +--- a/cmake/cp2k.pc.in ++++ /dev/null +@@ -1,19 +0,0 @@ +-# this template is filled-in by CMake `configure_file(... @ONLY)` +-# the `@....@` are filled in by CMake configure_file(), +-# from variables set in your CMakeLists.txt or by CMake itself +-# +-# Good tutoral for understanding .pc files: +-# https://people.freedesktop.org/~dbn/pkg-config-guide.html +- +-prefix="@CMAKE_INSTALL_PREFIX@" +-exec_prefix="${prefix}" +-libdir="${prefix}/lib" +-includedir="${prefix}/include" +- +-Name: @PROJECT_NAME@ +-Description: @CMAKE_PROJECT_DESCRIPTION@ +-URL: @CMAKE_PROJECT_HOMEPAGE_URL@ +-Version: @PROJECT_VERSION@ +-Cflags: -I"${includedir}" +-Libs: -L"${libdir}" -lcp2k -lcp2k_dbm -lcp2k_grid -lcp2k_offload +-#Libs.private: -L"${libdir}" @CP2K_LIBS@ +\ No newline at end of file +diff --git a/cmake/cp2kConfig.cmake.in b/cmake/cp2kConfig.cmake.in +index a3acd47442..a9e0eb5a58 100644 +--- a/cmake/cp2kConfig.cmake.in ++++ b/cmake/cp2kConfig.cmake.in +@@ -5,112 +5,120 @@ + #! SPDX-License-Identifier: GPL-2.0-or-later ! + #!-------------------------------------------------------------------------------------------------! + +- + cmake_minimum_required(VERSION 3.22) ++include(CMakeFindDependencyMacro) ++ ++if(NOT TARGET cp2k::cp2k) ++ set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/modules" ++ ${CMAKE_MODULE_PATH}) ++ ++ # store CXX compiler id. Used in MKL package. ++ set(CP2K_CXX_COMPILER_ID @CMAKE_CXX_COMPILER_ID@) ++ if(NOT ${CMAKE_CXX_COMPILER_ID}) ++ set(CMAKE_CXX_COMPILER_ID ${CP2K_CXX_COMPILER_ID}) ++ endif() ++ ++ set(CP2K_BLAS_VENDOR @CP2K_BLAS_VENDOR@) ++ set(CP2K_SCALAPACK_VENDOR @CP2K_SCALAPACK_VENDOR@) ++ set(CP2K_BLAS_LINK_LIBRARIES @CP2K_BLAS_LINK_LIBRARIES@) ++ set(CP2K_LAPACK_LINK_LIBRARIES @CP2K_LAPACK_LINK_LIBRARIES@) ++ set(CP2K_SCALAPACK_LINK_LIBRARIES @CP2K_SCALAPACK_LINK_LIBRARIES@) ++ ++ set(CP2K_CONFIG_PACKAGE ON) ++ find_dependency(Lapack REQUIRED) ++ ++ # define lapack and blas TARGETS ++ ++ if(@CP2K_USE_MPI@) ++ find_dependency(SCALAPACK REQUIRED) ++ endif() ++ unset(CP2K_CONFIG_PACKAGE) ++ ++ set(cp2k_VERSION @cp2k_VERSION@) ++ ++ find_dependency(DBCSR 2.5 REQUIRED) ++ ++ if(@CP2K_USE_LIBXSMM@) ++ find_dependency(LibXSMM REQUIRED) ++ endif() ++ ++ if(@CP2K_USE_HIP@) ++ # Find hip ++ find_dependency(hipfft REQUIRED IMPORTED CONFIG) ++ find_dependency(hipblas REQUIRED IMPORTED CONFIG) ++ endif() ++ ++ if(@CP2K_USE_CUDA@) ++ find_dependency(CUDAToolkit REQUIRED) ++ endif() ++ if(@CP2K_USE_ELPA@) ++ find_dependency(Elpa REQUIRED) ++ endif() ++ ++ if(@CP2K_USE_LIBXC@) ++ find_dependency(LibXC 6 REQUIRED EXACT) ++ endif() ++ ++ if(@CP2K_USE_COSMA@) ++ find_dependency(cosma REQUIRED) ++ endif() ++ ++ if(@CP2K_USE_MPI@) ++ find_dependency(MPI REQUIRED) ++ endif() ++ ++ if(@CP2K_USE_FFTW3@) ++ find_dependency(Fftw REQUIRED) ++ endif() ++ # QUIP ++ if(@CP2K_USE_QUIP@) ++ find_dependency(Quip REQUIRED) ++ endif() + +-# store CXX compiler id. Used in MKL package. +-set(SIRIUS_CXX_COMPILER_ID @CMAKE_CXX_COMPILER_ID@) +-if(NOT ${CMAKE_CXX_COMPILER_ID}) +- set(CMAKE_CXX_COMPILER_ID ${SIRIUS_CXX_COMPILER_ID}) +-endif() +- +-set(CP2K_BLAS_VENDOR @CP2K_BLAS_VENDOR@) +-set(CP2K_SCALAPACK_VENDOR @CP2K_SCALAPACK_VENDOR@) +- +-if (@CP2K_BLAS_VENDOR@ MATCHES "CUSTOM") +- set(CP2K_BLAS_LINK_LIBRARIES @CP2K_BLAS_LINK_LIBRARIES@) +- set(CP2K_LAPACK_LINK_LIBRARIES @CP2K_LAPACK_LINK_LIBRARIES@) +-endif() +- +-if (@CP2K_SCALAPACK_VENDOR@ MATCHES "CUSTOM") +- set(CP2K_SCALAPACK_LINK_LIBRARIES @CP2K_SCALAPACK_LINK_LIBRARIES@) +-endif() +- +-find_package(Lapack REQUIRED) +-find_package(DBCSR 2.4 REQUIRED) +- +-if(@CP2K_USE_LIBXSMM@ +- find_package(LibXSMM REQUIRED) +-endif() +- +-if (@@CP2K_USE_HIP@) +- # Find hip +- find_package(hipfft REQUIRED IMPORTED CONFIG) +- find_package(hipblas REQUIRED IMPORTED CONFIG) +-endif() +- +-if (@@CP2K_USE_CUDA@) +- find_package(CUDAToolkit REQUIRED) +-endif() +-if(@CP2K_USE_ELPA@) +- find_package(Elpa REQUIRED) +-endif() +- +-if(@CP2K_USE_LIBXC@) +- find_package(LibXC 6 REQUIRED EXACT) +-endif() +- +-if(@CP2K_USE_COSMA@) +- find_package(cosma REQUIRED) +-endif() ++ # libint + +-if (@@CP2K_USE_MPI@) +- find_package(MPI REQUIRED) +- find_package(SCALAPACK REQUIRED) +-endif() ++ if(@CP2K_USE_LIBINT2@) ++ find_dependency(Libint2 REQUIRED) ++ endif() + +-if(@CP2K_USE_FFTW3@) +- find_package(Fftw REQUIRED) +-endif() +- # QUIP +-if(@CP2K_USE_QUIP@) +- find_package(Quip REQUIRED) +-endif() ++ # spglib + +-# libint ++ if(@CP2K_USE_SPGLIB@) ++ find_dependency(LibSPG REQUIRED) ++ endif() + +-if(@CP2K_USE_LIBINT2@) +- find_package(Libint2 REQUIRED) +-endif() ++ if(@CP2K_USE_SPLA@) ++ find_dependency(SPLA REQUIRED) ++ endif() + +-# spglib ++ if(@CP2K_USE_SIRIUS@) ++ find_dependency(sirius REQUIRED) ++ endif() + +-if(@CP2K_USE_SPGLIB@) +- find_package(LibSPG REQUIRED) +-endif() ++ if(@CP2K_USE_SUPERLU@) ++ find_dependency(SuperLU REQUIRED) ++ endif() + +-if(@CP2K_USE_SPLA@) +- find_package(SPLA REQUIRED) +-endif() ++ if(@CP2K_USE_METIS@) ++ find_dependency(Metis) ++ endif() + +-if(@CP2K_USE_SIRIUS@) +- find_package(sirius REQUIRED) +-endif() +- +-if(@CP2K_USE_SUPERLU@) +- find_package(SuperLU REQUIRED) +-endif() ++ if(@CP2K_USE_PEXSI@) ++ # PEXSI 1.2 uses cmake as build system ++ find_dependency(PEXSI REQUIRED) ++ endif() + +-if(@CP2K_USE_PARMETIS@) +- find_package(Metis) +-endif() ++ if(@CP2K_USE_PLUMED@) ++ find_dependency(Plumed REQUIRED) ++ endif() + +-if(@CP2K_USE_PTSCOTCH@) +- find_package(Ptscotch REQUIRED) +-endif() ++ if(@CP2K_USE_LIBTORCH@) ++ find_dependency(Torch REQUIRED) ++ endif() + +-if(@CP2K_USE_PEXSI@) +- # PEXSI 1.2 uses cmake as build system +- find_package(PEXSI REQUIRED) +-endif() ++ include("${CMAKE_CURRENT_LIST_DIR}/cp2kTargets.cmake") + +-if(@CP2K_USE_PLUMED@) +- find_package(Plumed REQUIRED) +-endif() ++ # Clean-up module path. ++ list(REMOVE_ITEM CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/modules") + +-if(@CP2K_USE_LIBTORCH@) +- find_package(Torch REQUIRED) + endif() +- +-# Include SIRIUS target +-include("${CMAKE_CURRENT_LIST_DIR}/cp2kTargets.cmake") +diff --git a/cmake/libcp2k.pc.in b/cmake/libcp2k.pc.in +new file mode 100644 +index 0000000000..618af55e28 +--- /dev/null ++++ b/cmake/libcp2k.pc.in +@@ -0,0 +1,11 @@ ++prefix="@CMAKE_INSTALL_PREFIX@" ++exec_prefix="${prefix}" ++libdir="${prefix}/@CMAKE_INSTALL_LIBDIR@" ++includedir="${prefix}/@CMAKE_INSTALL_INCLUDEDIR@" ++ ++Name: @PROJECT_NAME@ ++Description: @CMAKE_PROJECT_DESCRIPTION@ ++URL: @CMAKE_PROJECT_HOMEPAGE_URL@ ++Version: @PROJECT_VERSION@ ++Cflags: -I"${includedir}/cp2k" -I"${includedir}/cp2k/@CMAKE_Fortran_COMPILER_ID@-@CMAKE_Fortran_COMPILER_VERSION@" ++Libs: -L"${libdir}" -lcp2k +diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt +index dbc955885e..e003d4f88d 100644 +--- a/src/CMakeLists.txt ++++ b/src/CMakeLists.txt +@@ -1555,7 +1555,7 @@ target_compile_definitions( + cp2k + PUBLIC $<$:__parallel> + $<$:__SCALAPACK> +- $<$:__MPI_08> ++ $<$:__MPI_08> + __COMPILE_DATE=\"${CP2K_TIMESTAMP}\" + __COMPILE_HOST=\"${CP2K_HOST_NAME}\" + __COMPILE_REVISION=\"${CP2K_GIT_HASH}\" +@@ -1774,12 +1774,12 @@ install( + EXPORT cp2k_targets + FILE cp2kTargets.cmake + NAMESPACE cp2k:: +- DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k") ++ DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") + +-install(FILES start/libcp2k.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/cp2k") ++install(FILES start/libcp2k.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}") + + install( + DIRECTORY "${PROJECT_BINARY_DIR}/src/mod_files" +- DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/cp2k" ++ DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${CMAKE_Fortran_COMPILER_ID}-${CMAKE_Fortran_COMPILER_VERSION}" + FILES_MATCHING + PATTERN "*.mod") diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index 94dfbfec400418..27deecf78472aa 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -276,12 +276,10 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): depends_on("wannier90", when="@3.0+mpi") with when("build_system=cmake"): - depends_on("dbcsr") - depends_on("dbcsr@2.6:", when="@2023.2:") + depends_on("dbcsr@2.6:") depends_on("dbcsr+openmp", when="+openmp") depends_on("dbcsr+cuda", when="+cuda") depends_on("dbcsr+rocm", when="+rocm") - conflicts("+mpi_f08", when="@:2023.2") # CP2K needs compiler specific compilation flags, e.g. optflags conflicts("%apple-clang") @@ -352,6 +350,7 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): # These patches backport 2023.x fixes to previous versions patch("backport_avoid_null_2022.x.patch", when="@2022.1:2022.2 %aocc@:4.0") patch("backport_avoid_null_9.1.patch", when="@9.1 %aocc@:4.0") + patch("cmake-fixes-2023.2.patch", when="@2023.2 build_system=cmake") # Patch for an undefined constant due to incompatible changes in ELPA @when("@9.1:2022.2 +elpa") @@ -427,9 +426,13 @@ def edit(self, spec, prefix): ldflags = [] libs = [] - # CP2K Makefile doesn't set C standard, but the source code uses - # C99-style for-loops with inline definition of iterating variable. - cflags.append(self.compiler.c99_flag) + # CP2K Makefile doesn't set C standard + if spec.satisfies("@2023.2:"): + # Use of DBL_DECIMAL_DIG + cflags.append(self.compiler.c11_flag) + else: + # C99-style for-loops with inline definition of iterating variable. + cflags.append(self.compiler.c99_flag) if "%intel" in spec: cflags.append("-fp-model precise") diff --git a/var/spack/repos/builtin/packages/dbcsr/package.py b/var/spack/repos/builtin/packages/dbcsr/package.py index 57ff0b5a402f50..2a3251304f44c2 100644 --- a/var/spack/repos/builtin/packages/dbcsr/package.py +++ b/var/spack/repos/builtin/packages/dbcsr/package.py @@ -98,6 +98,14 @@ class Dbcsr(CMakePackage, CudaPackage, ROCmPackage): conflicts("smm=blas", when="+opencl") + with when("+mpi"): + # When using mpich 4.1 or higher, mpi_f08 has to be used, otherwise: + # Error: Type mismatch in argument 'baseptr' at (1); passed TYPE(c_ptr) + # to INTEGER(8) + conflicts("^mpich@4.1:", when="@:2.5") + conflicts("~mpi_f08", when="^mpich@4.1:") + depends_on("mpich+fortran", when="^mpich") + generator("ninja") depends_on("ninja@1.10:", type="build") diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index a4a024eb1cb618..052832e0036308 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -261,7 +261,6 @@ class Gromacs(CMakePackage, CudaPackage): depends_on("hwloc", when="+hwloc@2019:") depends_on("cp2k@8.1:", when="+cp2k") - depends_on("dbcsr", when="+cp2k") depends_on("nvhpc", when="+cufftmp") diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py index 4e853c41cf31f8..4de81dace46dd2 100644 --- a/var/spack/repos/builtin/packages/libxsmm/package.py +++ b/var/spack/repos/builtin/packages/libxsmm/package.py @@ -82,6 +82,9 @@ class Libxsmm(MakefilePackage): # (). depends_on("binutils+ld+gas@2.33:", type="build", when="@:1.17") + # Intel Architecture or compatible CPU required + requires("target=x86_64:") + @property def libs(self): result = find_libraries(["libxsmm", "libxsmmf"], root=self.prefix, recursive=True) From 8bfcdc4fc0a665c69200e254980076c542e674b0 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 20 Oct 2023 17:09:19 +0200 Subject: [PATCH 272/408] concretize separately: show concretization time per spec as they concretize when verbose (#40634) --- lib/spack/spack/environment/environment.py | 51 ++++++----- lib/spack/spack/test/cmd/dev_build.py | 3 +- lib/spack/spack/util/parallel.py | 87 ++++--------------- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 4 +- 4 files changed, 47 insertions(+), 98 deletions(-) diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 62dda31034d2b7..51ea453c39ef3c 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1480,11 +1480,12 @@ def _concretize_separately(self, tests=False): self._add_concrete_spec(s, concrete, new=False) # Concretize any new user specs that we haven't concretized yet - arguments, root_specs = [], [] + args, root_specs, i = [], [], 0 for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints): if uspec not in old_concretized_user_specs: root_specs.append(uspec) - arguments.append((uspec_constraints, tests)) + args.append((i, uspec_constraints, tests)) + i += 1 # Ensure we don't try to bootstrap clingo in parallel if spack.config.get("config:concretizer", "clingo") == "clingo": @@ -1503,34 +1504,36 @@ def _concretize_separately(self, tests=False): _ = spack.compilers.get_compiler_config() # Early return if there is nothing to do - if len(arguments) == 0: + if len(args) == 0: return [] # Solve the environment in parallel on Linux start = time.time() - max_processes = min( - len(arguments), # Number of specs - spack.util.cpus.determine_number_of_jobs(parallel=True), - ) + num_procs = min(len(args), spack.util.cpus.determine_number_of_jobs(parallel=True)) - # TODO: revisit this print as soon as darwin is parallel too + # TODO: support parallel concretization on macOS and Windows msg = "Starting concretization" - if sys.platform != "darwin": - pool_size = spack.util.parallel.num_processes(max_processes=max_processes) - if pool_size > 1: - msg = msg + " pool with {0} processes".format(pool_size) + if sys.platform not in ("darwin", "win32") and num_procs > 1: + msg += f" pool with {num_procs} processes" tty.msg(msg) - concretized_root_specs = spack.util.parallel.parallel_map( - _concretize_task, arguments, max_processes=max_processes, debug=tty.is_debug() - ) + batch = [] + for i, concrete, duration in spack.util.parallel.imap_unordered( + _concretize_task, args, processes=num_procs, debug=tty.is_debug() + ): + batch.append((i, concrete)) + tty.verbose(f"[{duration:7.2f}s] {root_specs[i]}") + sys.stdout.flush() + + # Add specs in original order + batch.sort(key=lambda x: x[0]) + by_hash = {} # for attaching information on test dependencies + for root, (_, concrete) in zip(root_specs, batch): + self._add_concrete_spec(root, concrete) + by_hash[concrete.dag_hash()] = concrete finish = time.time() - tty.msg("Environment concretized in %.2f seconds." % (finish - start)) - by_hash = {} - for abstract, concrete in zip(root_specs, concretized_root_specs): - self._add_concrete_spec(abstract, concrete) - by_hash[concrete.dag_hash()] = concrete + tty.msg(f"Environment concretized in {finish - start:.2f} seconds") # Unify the specs objects, so we get correct references to all parents self._read_lockfile_dict(self._to_lockfile_dict()) @@ -2392,10 +2395,12 @@ def _concretize_from_constraints(spec_constraints, tests=False): invalid_constraints.extend(inv_variant_constraints) -def _concretize_task(packed_arguments): - spec_constraints, tests = packed_arguments +def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]: + index, spec_constraints, tests = packed_arguments with tty.SuppressOutput(msg_enabled=False): - return _concretize_from_constraints(spec_constraints, tests) + start = time.time() + spec = _concretize_from_constraints(spec_constraints, tests) + return index, spec, time.time() - start def make_repo_path(root): diff --git a/lib/spack/spack/test/cmd/dev_build.py b/lib/spack/spack/test/cmd/dev_build.py index 71ab195b649c8e..c5a7b5c3bb801a 100644 --- a/lib/spack/spack/test/cmd/dev_build.py +++ b/lib/spack/spack/test/cmd/dev_build.py @@ -11,6 +11,7 @@ import spack.build_environment import spack.environment as ev +import spack.error import spack.spec import spack.store from spack.main import SpackCommand @@ -237,7 +238,7 @@ def test_dev_build_env_version_mismatch(tmpdir, install_mockery, mutable_mock_en env("create", "test", "./spack.yaml") with ev.read("test"): - with pytest.raises(RuntimeError): + with pytest.raises((RuntimeError, spack.error.UnsatisfiableSpecError)): install() diff --git a/lib/spack/spack/util/parallel.py b/lib/spack/spack/util/parallel.py index 06e9ed52256828..683835641ae17e 100644 --- a/lib/spack/spack/util/parallel.py +++ b/lib/spack/spack/util/parallel.py @@ -2,14 +2,11 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import contextlib import multiprocessing import os import sys import traceback -from .cpus import cpus_available - class ErrorFromWorker: """Wrapper class to report an error from a worker process""" @@ -56,79 +53,25 @@ def __call__(self, *args, **kwargs): return value -def raise_if_errors(*results, **kwargs): - """Analyze results from worker Processes to search for ErrorFromWorker - objects. If found print all of them and raise an exception. - - Args: - *results: results from worker processes - debug: if True show complete stacktraces - - Raise: - RuntimeError: if ErrorFromWorker objects are in the results - """ - debug = kwargs.get("debug", False) # This can be a keyword only arg in Python 3 - errors = [x for x in results if isinstance(x, ErrorFromWorker)] - if not errors: - return - - msg = "\n".join([error.stacktrace if debug else str(error) for error in errors]) - - error_fmt = "{0}" - if len(errors) > 1 and not debug: - error_fmt = "errors occurred during concretization of the environment:\n{0}" - - raise RuntimeError(error_fmt.format(msg)) - - -@contextlib.contextmanager -def pool(*args, **kwargs): - """Context manager to start and terminate a pool of processes, similar to the - default one provided in Python 3.X - - Arguments are forwarded to the multiprocessing.Pool.__init__ method. - """ - try: - p = multiprocessing.Pool(*args, **kwargs) - yield p - finally: - p.terminate() - p.join() - - -def num_processes(max_processes=None): - """Return the number of processes in a pool. - - Currently the function return the minimum between the maximum number - of processes and the cpus available. - - When a maximum number of processes is not specified return the cpus available. - - Args: - max_processes (int or None): maximum number of processes allowed - """ - max_processes or cpus_available() - return min(cpus_available(), max_processes) - - -def parallel_map(func, arguments, max_processes=None, debug=False): - """Map a task object to the list of arguments, return the list of results. +def imap_unordered(f, list_of_args, *, processes: int, debug=False): + """Wrapper around multiprocessing.Pool.imap_unordered. Args: - func (Task): user defined task object - arguments (list): list of arguments for the task - max_processes (int or None): maximum number of processes allowed - debug (bool): if False, raise an exception containing just the error messages + f: function to apply + list_of_args: list of tuples of args for the task + processes: maximum number of processes allowed + debug: if False, raise an exception containing just the error messages from workers, if True an exception with complete stacktraces Raises: RuntimeError: if any error occurred in the worker processes """ - task_wrapper = Task(func) - if sys.platform != "darwin" and sys.platform != "win32": - with pool(processes=num_processes(max_processes=max_processes)) as p: - results = p.map(task_wrapper, arguments) - else: - results = list(map(task_wrapper, arguments)) - raise_if_errors(*results, debug=debug) - return results + if sys.platform in ("darwin", "win32") or len(list_of_args) == 1: + yield from map(f, list_of_args) + return + + with multiprocessing.Pool(processes) as p: + for result in p.imap_unordered(Task(f), list_of_args): + if isinstance(result, ErrorFromWorker): + raise RuntimeError(result.stacktrace if debug else str(result)) + yield result diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 905901ff295e5d..245bb51933ccf0 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -144,7 +144,7 @@ default: - spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))" < "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" - spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" - - spack + - spack -v --config-scope "${SPACK_CI_CONFIG_ROOT}" --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}" --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}" @@ -197,7 +197,7 @@ default: - spack --version - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - spack env activate --without-view . - - spack + - spack -v ci generate --check-index-only --buildcache-destination "${PUSH_BUILDCACHE_DEPRECATED}" --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" From 608f1b5688030dd66e92aa05b2d1f9b7924dba47 Mon Sep 17 00:00:00 2001 From: "Garth N. Wells" Date: Fri, 20 Oct 2023 18:04:02 +0100 Subject: [PATCH 273/408] py-fenics-ffcx: update to v0.7 (#40569) --- .../packages/py-fenics-ffcx/package.py | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py b/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py index 45c11e7952f596..2cd0584a662527 100644 --- a/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py +++ b/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py @@ -15,24 +15,41 @@ class PyFenicsFfcx(PythonPackage): maintainers("chrisrichardson", "garth-wells", "jhale") version("main", branch="main") + version("0.7.0", sha256="7f3c3ca91d63ce7831d37799cc19d0551bdcd275bdfa4c099711679533dd1c71") version("0.6.0", sha256="076fad61d406afffd41019ae1abf6da3f76406c035c772abad2156127667980e") version( "0.5.0.post0", sha256="039908c9998b51ba53e5deb3a97016062c262f0a4285218644304f7d3cd35882" ) version("0.4.2", sha256="3be6eef064d6ef907245db5b6cc15d4e603762e68b76e53e099935ca91ef1ee4") - depends_on("python@3.7:", type=("build", "run")) - depends_on("py-setuptools@58:", type=("build", "run")) + depends_on("python@3.8:", when="@0.7:", type=("build", "run")) + depends_on("py-setuptools@62:", when="@0.7:", type="build") + depends_on("py-setuptools@58:", when="@0.4.2:0.6", type="build") + # CFFI is required at runtime for JIT support depends_on("py-cffi", type=("build", "run")) + # py-numpy>=1.21 required because FFCx uses NumPy typing (version + # requirement not properly set in the FFCx pyproject.toml file) depends_on("py-numpy@1.21:", type=("build", "run")) depends_on("py-fenics-ufl@main", type=("build", "run"), when="@main") - depends_on("py-fenics-ufl@2023.1", type=("build", "run"), when="@0.6") + depends_on("py-fenics-ufl@2023.3.0:", type=("build", "run"), when="@0.8") + depends_on("py-fenics-ufl@2023.2.0", type=("build", "run"), when="@0.7") depends_on("py-fenics-ufl@2022.2.0", type=("build", "run"), when="@0.5.0:0.5") depends_on("py-fenics-ufl@2022.1.0", type=("build", "run"), when="@0.4.2") depends_on("py-fenics-basix@main", type=("build", "run"), when="@main") + depends_on("py-fenics-basix@0.7", type=("build", "run"), when="@0.7") depends_on("py-fenics-basix@0.6.0:0.6", type=("build", "run"), when="@0.6.0:0.6") depends_on("py-fenics-basix@0.5.1:0.5", type=("build", "run"), when="@0.5.0:0.5") depends_on("py-fenics-basix@0.4.2", type=("build", "run"), when="@0.4.2") + + depends_on("py-pytest@6:", type="test") + depends_on("py-sympy", type="test") + + @run_after("install") + @on_package_attributes(run_tests=True) + def check_build(self): + with working_dir("test"): + pytest = which("pytest") + pytest("--ignore=test_cmdline.py") From 8093c6b04db6ef409c723b853db98a036e0f444e Mon Sep 17 00:00:00 2001 From: Andrey Alekseenko Date: Sat, 21 Oct 2023 01:28:45 +0200 Subject: [PATCH 274/408] gromacs: add 2022.6, 2023.2, 2023.3 versions (#38906) * gromacs: add 2022.6, 2023.2 versions * gromacs: add version 2023.3 --- var/spack/repos/builtin/packages/gromacs/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 052832e0036308..e280234a0e45fa 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -30,8 +30,11 @@ class Gromacs(CMakePackage, CudaPackage): version("main", branch="main") version("master", branch="main", deprecated=True) + version("2023.3", sha256="4ec8f8d0c7af76b13f8fd16db8e2c120e749de439ae9554d9f653f812d78d1cb") + version("2023.2", sha256="bce1480727e4b2bb900413b75d99a3266f3507877da4f5b2d491df798f9fcdae") version("2023.1", sha256="eef2bb4a6cb6314cf9da47f26df2a0d27af4bf7b3099723d43601073ab0a42f4") version("2023", sha256="ac92c6da72fbbcca414fd8a8d979e56ecf17c4c1cdabed2da5cfb4e7277b7ba8") + version("2022.6", sha256="75d277138475679dd3e334e384a71516570cde767310476687f2a5b72333ea41") version("2022.5", sha256="083cc3c424bb93ffe86c12f952e3e5b4e6c9f6520de5338761f24b75e018c223") version("2022.4", sha256="c511be602ff29402065b50906841def98752639b92a95f1b0a1060d9b5e27297") version("2022.3", sha256="14cfb130ddaf8f759a3af643c04f5a0d0d32b09bc3448b16afa5b617f5e35dae") From c6a9d56daeae6e8ce25be67845255cb1ae11f3bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Sat, 21 Oct 2023 02:09:13 +0200 Subject: [PATCH 275/408] [add] py-dict2css: new package (#40552) Co-authored-by: LydDeb --- .../builtin/packages/py-dict2css/package.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-dict2css/package.py diff --git a/var/spack/repos/builtin/packages/py-dict2css/package.py b/var/spack/repos/builtin/packages/py-dict2css/package.py new file mode 100644 index 00000000000000..7e962e56d75e0a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dict2css/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyDict2css(PythonPackage): + """A μ-library for constructing cascading style sheets from Python dictionaries.""" + + homepage = "https://github.com/sphinx-toolbox/dict2css" + pypi = "dict2css/dict2css-0.3.0.tar.gz" + + maintainers("LydDeb") + + version("0.3.0", sha256="1e8b1bf580dca2083198f88a60ec88c878a8829d760dfe45483ef80fe2905117") + + depends_on("py-whey", type="build") + depends_on("py-cssutils@2.2.0:", type=("build", "run")) + depends_on("py-domdf-python-tools@2.2.0:", type=("build", "run")) From 19fd8a8ebee9702a4a881a0a119be48889025820 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Sat, 21 Oct 2023 00:38:03 -0600 Subject: [PATCH 276/408] py-kombu: pick older version of py-setuptools (#40642) --- var/spack/repos/builtin/packages/py-kombu/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-kombu/package.py b/var/spack/repos/builtin/packages/py-kombu/package.py index 9c732796cf30a6..23fc35f315f405 100644 --- a/var/spack/repos/builtin/packages/py-kombu/package.py +++ b/var/spack/repos/builtin/packages/py-kombu/package.py @@ -23,7 +23,7 @@ class PyKombu(PythonPackage): variant("redis", default=False, description="Use redis transport") - depends_on("py-setuptools", type="build") + depends_on("py-setuptools@:55", type="build") depends_on("py-amqp@2.5.2:2.5", when="@:4.6.6", type=("build", "run")) depends_on("py-amqp@2.6.0:2.6", when="@4.6.7:4", type=("build", "run")) depends_on("py-amqp@5.0.0:5", when="@5.0.0:5.0.2", type=("build", "run")) From 7e8ef2cda5448d1f2205fb559f48efa79caceebb Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sat, 21 Oct 2023 13:38:30 +0200 Subject: [PATCH 277/408] py-kombu: fix setuptools bound (#40646) --- var/spack/repos/builtin/packages/py-kombu/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-kombu/package.py b/var/spack/repos/builtin/packages/py-kombu/package.py index 23fc35f315f405..6f13c380ffb840 100644 --- a/var/spack/repos/builtin/packages/py-kombu/package.py +++ b/var/spack/repos/builtin/packages/py-kombu/package.py @@ -23,7 +23,10 @@ class PyKombu(PythonPackage): variant("redis", default=False, description="Use redis transport") - depends_on("py-setuptools@:55", type="build") + depends_on("py-setuptools", type="build") + # "pytz>dev" in tests_require: setuptools parser changed in v60 and errors. + depends_on("py-setuptools@:59", when="@4.6:5.2", type="build") + depends_on("py-amqp@2.5.2:2.5", when="@:4.6.6", type=("build", "run")) depends_on("py-amqp@2.6.0:2.6", when="@4.6.7:4", type=("build", "run")) depends_on("py-amqp@5.0.0:5", when="@5.0.0:5.0.2", type=("build", "run")) From 3ed2194926c155e8e5f9516d5e845ea502920ff5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Sat, 21 Oct 2023 15:26:36 +0200 Subject: [PATCH 278/408] intel-tbb: patch patch for Apple's patch (#40640) While e.g. GNU patch 2.7.6 (as provided by homebrew) would apply the previous version of this patch without problems, Apple's patch 2.0-12u11-Apple fails to find out which file to patch. Adding two lines to the patch fixes that. Renamed the patch in order to not require a `spack clean -m`. --- .../intel-tbb/{gcc_13-2021.patch => gcc_13-2021-v2.patch} | 2 ++ var/spack/repos/builtin/packages/intel-tbb/package.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) rename var/spack/repos/builtin/packages/intel-tbb/{gcc_13-2021.patch => gcc_13-2021-v2.patch} (92%) diff --git a/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch b/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch similarity index 92% rename from var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch rename to var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch index e1e1b1116bf6f8..d1e87cd7c4d5ac 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch +++ b/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch @@ -16,6 +16,8 @@ Signed-off-by: Sam James diff --git a/test/common/utils_assert.h b/test/common/utils_assert.h index 1df8ae72acc49fe38dac4d9bed4e9f4f26affcf5..0123ab881e124a800a5ebf8507050148038747d5 100644 +--- a/test/common/utils_assert.h ++++ b/test/common/utils_assert.h @@ -20,6 +20,8 @@ #include "config.h" #include "utils_report.h" diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py index 45761c7a06fcc3..14da30b2d430fe 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-tbb/package.py @@ -125,7 +125,7 @@ class IntelTbb(CMakePackage, MakefilePackage): patch("gcc_generic-pedantic-4.4.patch", level=1, when="@:2019.0") # Patch and conflicts for GCC 13 support (#1031). - patch("gcc_13-2021.patch", when="@2021.1:") + patch("gcc_13-2021-v2.patch", when="@2021.1:") conflicts("%gcc@13", when="@:2021.3") # Patch cmakeConfig.cmake.in to find the libraries where we install them. From 28e60a696ffec485c21c5dbd18ab12712feede6f Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Sun, 22 Oct 2023 01:52:44 -0700 Subject: [PATCH 279/408] Docs: Add version range example to conditional dependencies (#40630) * Docs: Add version range example to conditional dependencies * Add when context manager example --- lib/spack/docs/packaging_guide.rst | 45 ++++++++++++++++++++++++------ 1 file changed, 36 insertions(+), 9 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index ae6be5b4a6eb3e..157236ebfcc12e 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2557,9 +2557,10 @@ Conditional dependencies ^^^^^^^^^^^^^^^^^^^^^^^^ You may have a package that only requires a dependency under certain -conditions. For example, you may have a package that has optional MPI support, -- MPI is only a dependency when you want to enable MPI support for the -package. In that case, you could say something like: +conditions. For example, you may have a package with optional MPI support. +You would then provide a variant to reflect that the feature is optional +and specify the MPI dependency only applies when MPI support is enabled. +In that case, you could say something like: .. code-block:: python @@ -2567,13 +2568,39 @@ package. In that case, you could say something like: depends_on("mpi", when="+mpi") -``when`` can include constraints on the variant, version, compiler, etc. and -the :mod:`syntax` is the same as for Specs written on the command -line. -If a dependency/feature of a package isn't typically used, you can save time -by making it conditional (since Spack will not build the dependency unless it -is required for the Spec). +Suppose the above package also has, since version 3, optional `Trilinos` +support and you want them both to build either with or without MPI. Further +suppose you require a version of `Trilinos` no older than 12.6. In that case, +the `trilinos` variant and dependency directives would be: + +.. code-block:: python + + variant("trilinos", default=False, description="Enable Trilinos support") + + depends_on("trilinos@12.6:", when="@3: +trilinos") + depends_on("trilinos@12.6: +mpi", when="@3: +trilinos +mpi") + + +Alternatively, you could use the `when` context manager to equivalently specify +the `trilinos` variant dependencies as follows: + +.. code-block:: python + + with when("@3: +trilinos"): + depends_on("trilinos@12.6:") + depends_on("trilinos +mpi", when="+mpi") + + +The argument to ``when`` in either case can include any Spec constraints that +are supported on the command line using the same :ref:`syntax `. + +.. note:: + + If a dependency isn't typically used, you can save time by making it + conditional since Spack will not build the dependency unless it is + required for the Spec. + .. _dependency_dependency_patching: From 982f53b42a39a85be32a5dc2a478e687b643b0bf Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Sun, 22 Oct 2023 16:07:31 +0200 Subject: [PATCH 280/408] py-kiwisolver: add a new version (#40653) Co-authored-by: jmcarcell --- var/spack/repos/builtin/packages/py-kiwisolver/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-kiwisolver/package.py b/var/spack/repos/builtin/packages/py-kiwisolver/package.py index 803646240a34cb..08ad89b0e4d407 100644 --- a/var/spack/repos/builtin/packages/py-kiwisolver/package.py +++ b/var/spack/repos/builtin/packages/py-kiwisolver/package.py @@ -12,6 +12,7 @@ class PyKiwisolver(PythonPackage): homepage = "https://github.com/nucleic/kiwi" pypi = "kiwisolver/kiwisolver-1.1.0.tar.gz" + version("1.4.5", sha256="e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec") version("1.4.4", sha256="d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955") version("1.3.2", sha256="fc4453705b81d03568d5b808ad8f09c77c47534f6ac2e72e733f9ca4714aa75c") version("1.3.1", sha256="950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248") From c541003c18b25f439b4c12c0571552e6471d231b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sun, 22 Oct 2023 19:17:48 +0200 Subject: [PATCH 281/408] zlib-ng: add v2.1.4 (#40647) --- var/spack/repos/builtin/packages/zlib-ng/package.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/zlib-ng/package.py b/var/spack/repos/builtin/packages/zlib-ng/package.py index d069545dc1ec78..8444736856a3c2 100644 --- a/var/spack/repos/builtin/packages/zlib-ng/package.py +++ b/var/spack/repos/builtin/packages/zlib-ng/package.py @@ -16,8 +16,17 @@ class ZlibNg(AutotoolsPackage, CMakePackage): maintainers("haampie") - version("2.1.3", sha256="d20e55f89d71991c59f1c5ad1ef944815e5850526c0d9cd8e504eaed5b24491a") - version("2.1.2", sha256="383560d6b00697c04e8878e26c0187b480971a8bce90ffd26a5a7b0f7ecf1a33") + version("2.1.4", sha256="a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a") + version( + "2.1.3", + sha256="d20e55f89d71991c59f1c5ad1ef944815e5850526c0d9cd8e504eaed5b24491a", + deprecated=True, + ) + version( + "2.1.2", + sha256="383560d6b00697c04e8878e26c0187b480971a8bce90ffd26a5a7b0f7ecf1a33", + deprecated=True, + ) version("2.0.7", sha256="6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200") version("2.0.0", sha256="86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8") From c02cbb4b6e6f488e2fc687b3c4bda5c5fd8d5179 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Sun, 22 Oct 2023 19:18:16 +0200 Subject: [PATCH 282/408] glib: add patch with a fix for PTRACE_0_EXITKILL (#40655) Co-authored-by: jmcarcell --- var/spack/repos/builtin/packages/glib/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 7ccdf2fd2ad0c7..1dd0ad9ea2295c 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -173,6 +173,13 @@ class Glib(MesonPackage, AutotoolsPackage): patch("meson-gettext-2.66.patch", when="@2.66:2.68,2.72") patch("meson-gettext-2.70.patch", when="@2.70") + # Don't use PTRACE_O_EXITKILL if it's not defined + patch( + "https://gitlab.gnome.org/GNOME/glib/-/commit/bda87264372c006c94e21ffb8ff9c50ecb3e14bd.diff", + sha256="2c25d7b3bf581b3ec992d7af997fa6c769174d49b9350e0320c33f5e048cba99", + when="@2.78.0", + ) + def url_for_version(self, version): """Handle glib's version-based custom URLs.""" url = "https://download.gnome.org/sources/glib" From 15b38ceff37574fce909cc2798ec5daf7579e5b3 Mon Sep 17 00:00:00 2001 From: Bill Williams Date: Sun, 22 Oct 2023 22:11:19 +0200 Subject: [PATCH 283/408] Add Score-P 8.3 and dependencies (#40478) Includes Score-P 8.3 and Cubew/cubelib 4.8.2. --- var/spack/repos/builtin/packages/cubelib/package.py | 2 ++ var/spack/repos/builtin/packages/cubew/package.py | 2 ++ var/spack/repos/builtin/packages/scorep/package.py | 8 ++++++-- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/cubelib/package.py b/var/spack/repos/builtin/packages/cubelib/package.py index 713c301f2ff246..919a001fedaa4f 100644 --- a/var/spack/repos/builtin/packages/cubelib/package.py +++ b/var/spack/repos/builtin/packages/cubelib/package.py @@ -11,7 +11,9 @@ class Cubelib(AutotoolsPackage): homepage = "https://www.scalasca.org/software/cube-4.x/download.html" url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubelib-4.4.tar.gz" + maintainers = ("swat-jsc", "wrwilliams") + version("4.8.2", sha256="d6fdef57b1bc9594f1450ba46cf08f431dd0d4ae595c47e2f3454e17e4ae74f4") version("4.8", sha256="171c93ac5afd6bc74c50a9a58efdaf8589ff5cc1e5bd773ebdfb2347b77e2f68") version("4.7.1", sha256="62cf33a51acd9a723fff9a4a5411cd74203e24e0c4ffc5b9e82e011778ed4f2f") version("4.7", sha256="e44352c80a25a49b0fa0748792ccc9f1be31300a96c32de982b92477a8740938") diff --git a/var/spack/repos/builtin/packages/cubew/package.py b/var/spack/repos/builtin/packages/cubew/package.py index 6674a7cf662697..bcab0920fd1833 100644 --- a/var/spack/repos/builtin/packages/cubew/package.py +++ b/var/spack/repos/builtin/packages/cubew/package.py @@ -11,7 +11,9 @@ class Cubew(AutotoolsPackage): homepage = "https://www.scalasca.org/software/cube-4.x/download.html" url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubew-4.4.tar.gz" + maintainers = ("swat-jsc", "wrwilliams") + version("4.8.2", sha256="4f3bcf0622c2429b8972b5eb3f14d79ec89b8161e3c1cc5862ceda417d7975d2") version("4.8", sha256="73c7f9e9681ee45d71943b66c01cfe675b426e4816e751ed2e0b670563ca4cf3") version("4.7.1", sha256="0d364a4930ca876aa887ec40d12399d61a225dbab69e57379b293516d7b6db8d") version("4.7", sha256="a7c7fca13e6cb252f08d4380223d7c56a8e86a67de147bcc0279ebb849c884a5") diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py index ba9ac487e521e1..316173a73e297d 100644 --- a/var/spack/repos/builtin/packages/scorep/package.py +++ b/var/spack/repos/builtin/packages/scorep/package.py @@ -16,6 +16,8 @@ class Scorep(AutotoolsPackage): url = "https://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-7.1/scorep-7.1.tar.gz" maintainers("wrwilliams") + version("8.3", sha256="76c914e6319221c059234597a3bc53da788ed679179ac99c147284dcefb1574a") + # version 8.2 was immediately superseded before it hit Spack version("8.1", sha256="3a40b481fce610871ddf6bdfb88a6d06b9e5eb38c6080faac6d5e44990060a37") version("8.0", sha256="4c0f34f20999f92ebe6ca1ff706d0846b8ce6cd537ffbedb49dfaef0faa66311") version("7.1", sha256="98dea497982001fb82da3429ca55669b2917a0858c71abe2cfe7cd113381f1f7") @@ -93,8 +95,10 @@ def url_for_version(self, version): # SCOREP 8 depends_on("binutils", type="link", when="@8:") depends_on("otf2@3:", when="@8:") - depends_on("cubew@4.8:", when="@8:") - depends_on("cubelib@4.8:", when="@8:") + depends_on("cubew@4.8.2:", when="@8.3:") + depends_on("cubelib@4.8.2:", when="@8.3:") + depends_on("cubew@4.8:", when="@8:8.2") + depends_on("cubelib@4.8:", when="@8:8.2") # fall through to Score-P 7's OPARI2, no new release # SCOREP 7 depends_on("otf2@2.3:2.3.99", when="@7.0:7") From 0d746f32c3ce74f994d5ec64125b0812a375c910 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 08:58:24 +0200 Subject: [PATCH 284/408] libxml2: fix GitLab patch (#40658) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- var/spack/repos/builtin/packages/libxml2/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py index f0de744c590f41..ca92e6994a682b 100644 --- a/var/spack/repos/builtin/packages/libxml2/package.py +++ b/var/spack/repos/builtin/packages/libxml2/package.py @@ -74,8 +74,8 @@ def url_for_version(self, version): # Use NAN/INFINITY if available to avoid SIGFPE # See https://gitlab.gnome.org/GNOME/libxml2/-/merge_requests/186 patch( - "https://gitlab.gnome.org/GNOME/libxml2/-/commit/c9925454fd384a17c8c03d358c6778a552e9287b.patch", - sha256="3e06d42596b105839648070a5921157fe284b932289ffdbfa304ddc3457e5637", + "https://gitlab.gnome.org/GNOME/libxml2/-/commit/c9925454fd384a17c8c03d358c6778a552e9287b.diff", + sha256="5dc43fed02b443d2563a502a52caafe39477c06fc30b70f786d5ed3eb5aea88d", when="@2.9.11:2.9.14", ) build_system(conditional("nmake", when="platform=windows"), "autotools", default="autotools") From 1b0d3c3a792c172c7aeb8b81030d31994f350fac Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 08:58:47 +0200 Subject: [PATCH 285/408] vtk: fix GitLab patch (#40659) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- var/spack/repos/builtin/packages/vtk/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py index 122e29408eb652..d73bb332594ea4 100644 --- a/var/spack/repos/builtin/packages/vtk/package.py +++ b/var/spack/repos/builtin/packages/vtk/package.py @@ -195,8 +195,8 @@ class Vtk(CMakePackage): ) patch( - "https://gitlab.kitware.com/vtk/vtk/-/commit/5a1c96e12e9b4a660d326be3bed115a2ceadb573.patch", - sha256="65175731c080961f85d779d613ac1f6bce89783745e54e864edec7637b03b18a", + "https://gitlab.kitware.com/vtk/vtk/-/commit/5a1c96e12e9b4a660d326be3bed115a2ceadb573.diff", + sha256="c446a90459b108082db5b28d9aeda99d030e636325e01929beba062cafb16b76", when="@9.1", ) From b0afe7ff443faac67d402532f0015e016c6ebb7e Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 08:59:10 +0200 Subject: [PATCH 286/408] garfieldpp: fix GitLab patch (#40660) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- var/spack/repos/builtin/packages/garfieldpp/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/garfieldpp/package.py b/var/spack/repos/builtin/packages/garfieldpp/package.py index 40671403cc7eb4..0bbdda3e3d9d8a 100644 --- a/var/spack/repos/builtin/packages/garfieldpp/package.py +++ b/var/spack/repos/builtin/packages/garfieldpp/package.py @@ -18,8 +18,8 @@ class Garfieldpp(CMakePackage): maintainers("mirguest") patch( - "https://gitlab.cern.ch/garfield/garfieldpp/-/commit/882c3023cfa89b45ca7a0c95ab1518454536e8e1.patch", - sha256="440bc8129c55168e6c45d39e4344911d48ddb13fd3f9ee05974b2ede46a23b93", + "https://gitlab.cern.ch/garfield/garfieldpp/-/commit/882c3023cfa89b45ca7a0c95ab1518454536e8e1.diff", + sha256="ea3b91d67011abe41e72c7b55578d14b77bd2ef5e7f344077091934b24f38f0d", when="@4.0", ) From bbc067885d18ced21d18ed98c1e38d7d954b5fdd Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 08:59:38 +0200 Subject: [PATCH 287/408] gobject-introspection: fix GitLab patch (#40661) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- .../repos/builtin/packages/gobject-introspection/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/gobject-introspection/package.py b/var/spack/repos/builtin/packages/gobject-introspection/package.py index 4f46e4ef7029a0..c7bfb372b473a9 100644 --- a/var/spack/repos/builtin/packages/gobject-introspection/package.py +++ b/var/spack/repos/builtin/packages/gobject-introspection/package.py @@ -74,8 +74,8 @@ class GobjectIntrospection(MesonPackage, AutotoolsPackage): # https://gitlab.gnome.org/GNOME/gobject-introspection/-/issues/325 patch( "https://gitlab.gnome.org/GNOME/gobject-introspection/-/commit/" - "1f9284228092b2a7200e8a78bc0ea6702231c6db.patch", - sha256="7700828b638c85255c87fcc317ea7e9572ff443f65c86648796528885e5b4cea", + "1f9284228092b2a7200e8a78bc0ea6702231c6db.diff", + sha256="dcb9e7c956dff49c3a73535829382e8662fa6bd13bdfb416e8eac47b2604fa0a", when="@:1.63.1", ) From a1cd07478341f24ec6fa34c399ff709b15362bbb Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 08:59:58 +0200 Subject: [PATCH 288/408] knem: fix GitLab patch (#40662) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- var/spack/repos/builtin/packages/knem/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/knem/package.py b/var/spack/repos/builtin/packages/knem/package.py index 2b229c93d28755..3e1bcd925c8b3b 100644 --- a/var/spack/repos/builtin/packages/knem/package.py +++ b/var/spack/repos/builtin/packages/knem/package.py @@ -32,8 +32,8 @@ class Knem(AutotoolsPackage): variant("hwloc", default=True, description="Enable hwloc in the user-space tools") patch( - "https://gitlab.inria.fr/knem/knem/-/commit/5c8cb902d6040df58cdc4e4e4c10d1f1426c3525.patch", - sha256="78885a02d6f031a793db6a7190549f8d64c8606b353051d65f8e3f802b801902", + "https://gitlab.inria.fr/knem/knem/-/commit/5c8cb902d6040df58cdc4e4e4c10d1f1426c3525.diff", + sha256="a422277f02247bde680d4a3c8ccb8c05498a79109ba1ade4a037bedd6efe3c79", when="@1.1.4", ) From 7028e92a0a9fc0bbaa39804686573c291382843e Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 09:00:22 +0200 Subject: [PATCH 289/408] libtheora: fix GitLab patch (#40657) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- var/spack/repos/builtin/packages/libtheora/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libtheora/package.py b/var/spack/repos/builtin/packages/libtheora/package.py index 6386da3497d85f..7c454a52504f25 100644 --- a/var/spack/repos/builtin/packages/libtheora/package.py +++ b/var/spack/repos/builtin/packages/libtheora/package.py @@ -46,8 +46,8 @@ class Libtheora(AutotoolsPackage, MSBuildPackage): patch("exit-prior-to-running-configure.patch", when="@1.1.1") patch("fix_encoding.patch", when="@1.1:") patch( - "https://gitlab.xiph.org/xiph/theora/-/commit/7288b539c52e99168488dc3a343845c9365617c8.patch", - sha256="8b1f256fa6bfb4ce1355c5be1104e8cfe695c8484d8ea19db06c006880a02298", + "https://gitlab.xiph.org/xiph/theora/-/commit/7288b539c52e99168488dc3a343845c9365617c8.diff", + sha256="e01ef71a1c19783a0b323b90a625e5c360ddb7ee03d2b6c201f1519f1704ea11", when="^libpng@1.6:", ) patch("libtheora-inc-external-ogg.patch", when="platform=windows") From 67be918d54582bfb181c30ff5a5c2141b9647eae Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Mon, 23 Oct 2023 00:15:03 -0700 Subject: [PATCH 290/408] Docs: Update spec variant checks plus python quotes and string formatting (#40643) --- .../docs/build_systems/autotoolspackage.rst | 95 +++++++++++-------- .../docs/build_systems/cachedcmakepackage.rst | 22 ++--- lib/spack/docs/build_systems/cudapackage.rst | 28 +++--- .../docs/build_systems/custompackage.rst | 32 +++---- .../docs/build_systems/makefilepackage.rst | 50 +++++----- .../docs/build_systems/pythonpackage.rst | 62 ++++++------ lib/spack/docs/build_systems/rocmpackage.rst | 23 +++-- lib/spack/docs/build_systems/sconspackage.rst | 20 ++-- lib/spack/docs/packaging_guide.rst | 8 +- 9 files changed, 176 insertions(+), 164 deletions(-) diff --git a/lib/spack/docs/build_systems/autotoolspackage.rst b/lib/spack/docs/build_systems/autotoolspackage.rst index abf25f149bc59a..8b8ccb8f35c1c7 100644 --- a/lib/spack/docs/build_systems/autotoolspackage.rst +++ b/lib/spack/docs/build_systems/autotoolspackage.rst @@ -127,9 +127,9 @@ check out a commit from the ``master`` branch, you would want to add: .. code-block:: python - depends_on('autoconf', type='build', when='@master') - depends_on('automake', type='build', when='@master') - depends_on('libtool', type='build', when='@master') + depends_on("autoconf", type="build", when="@master") + depends_on("automake", type="build", when="@master") + depends_on("libtool", type="build", when="@master") It is typically redundant to list the ``m4`` macro processor package as a dependency, since ``autoconf`` already depends on it. @@ -145,7 +145,7 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script. .. code-block:: python def autoreconf(self, spec, prefix): - which('bash')('autogen.sh') + which("bash")("autogen.sh") """"""""""""""""""""""""""""""""""""""" patching configure or Makefile.in files @@ -186,9 +186,9 @@ To opt out of this feature, use the following setting: To enable it conditionally on different architectures, define a property and make the package depend on ``gnuconfig`` as a build dependency: -.. code-block +.. code-block:: python - depends_on('gnuconfig', when='@1.0:') + depends_on("gnuconfig", when="@1.0:") @property def patch_config_files(self): @@ -230,7 +230,7 @@ version, this can be done like so: @property def force_autoreconf(self): - return self.version == Version('1.2.3') + return self.version == Version("1.2.3") ^^^^^^^^^^^^^^^^^^^^^^^ Finding configure flags @@ -278,13 +278,22 @@ function like so: def configure_args(self): args = [] - if '+mpi' in self.spec: - args.append('--enable-mpi') + if self.spec.satisfies("+mpi"): + args.append("--enable-mpi") else: - args.append('--disable-mpi') + args.append("--disable-mpi") return args + +Alternatively, you can use the :ref:`enable_or_disable ` helper: + +.. code-block:: python + + def configure_args(self): + return [self.enable_or_disable("mpi")] + + Note that we are explicitly disabling MPI support if it is not requested. This is important, as many Autotools packages will enable options by default if the dependencies are found, and disable them @@ -295,9 +304,11 @@ and `here `_ @@ -113,7 +113,7 @@ you can do this like so: .. code-block:: python - build_targets = ['CC=cc'] + build_targets = ["CC=cc"] If you do need access to the spec, you can create a property like so: @@ -125,8 +125,8 @@ If you do need access to the spec, you can create a property like so: spec = self.spec return [ - 'CC=cc', - 'BLASLIB={0}'.format(spec['blas'].libs.ld_flags), + "CC=cc", + f"BLASLIB={spec['blas'].libs.ld_flags}", ] @@ -145,12 +145,12 @@ and a ``filter_file`` method to help with this. For example: .. code-block:: python def edit(self, spec, prefix): - makefile = FileFilter('Makefile') + makefile = FileFilter("Makefile") - makefile.filter(r'^\s*CC\s*=.*', 'CC = ' + spack_cc) - makefile.filter(r'^\s*CXX\s*=.*', 'CXX = ' + spack_cxx) - makefile.filter(r'^\s*F77\s*=.*', 'F77 = ' + spack_f77) - makefile.filter(r'^\s*FC\s*=.*', 'FC = ' + spack_fc) + makefile.filter(r"^\s*CC\s*=.*", f"CC = {spack_cc}") + makefile.filter(r"^\s*CXX\s*=.*", f"CXX = {spack_cxx}") + makefile.filter(r"^\s*F77\s*=.*", f"F77 = {spack_f77}") + makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}") `stream `_ @@ -181,16 +181,16 @@ well for storing variables: def edit(self, spec, prefix): config = { - 'CC': 'cc', - 'MAKE': 'make', + "CC": "cc", + "MAKE": "make", } - if '+blas' in spec: - config['BLAS_LIBS'] = spec['blas'].libs.joined() + if spec.satisfies("+blas"): + config["BLAS_LIBS"] = spec["blas"].libs.joined() - with open('make.inc', 'w') as inc: + with open("make.inc", "w") as inc: for key in config: - inc.write('{0} = {1}\n'.format(key, config[key])) + inc.write(f"{key} = {config[key]}\n") `elk `_ @@ -204,14 +204,14 @@ them in a list: def edit(self, spec, prefix): config = [ - 'INSTALL_DIR = {0}'.format(prefix), - 'INCLUDE_DIR = $(INSTALL_DIR)/include', - 'LIBRARY_DIR = $(INSTALL_DIR)/lib', + f"INSTALL_DIR = {prefix}", + "INCLUDE_DIR = $(INSTALL_DIR)/include", + "LIBRARY_DIR = $(INSTALL_DIR)/lib", ] - with open('make.inc', 'w') as inc: + with open("make.inc", "w") as inc: for var in config: - inc.write('{0}\n'.format(var)) + inc.write(f"{var}\n") `hpl `_ @@ -284,7 +284,7 @@ can tell Spack where to locate it like so: .. code-block:: python - build_directory = 'src' + build_directory = "src" ^^^^^^^^^^^^^^^^^^^ @@ -299,8 +299,8 @@ install the package: def install(self, spec, prefix): mkdir(prefix.bin) - install('foo', prefix.bin) - install_tree('lib', prefix.lib) + install("foo", prefix.bin) + install_tree("lib", prefix.lib) ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/build_systems/pythonpackage.rst b/lib/spack/docs/build_systems/pythonpackage.rst index 17295a457fe139..168ff5dc88223c 100644 --- a/lib/spack/docs/build_systems/pythonpackage.rst +++ b/lib/spack/docs/build_systems/pythonpackage.rst @@ -152,16 +152,16 @@ set. Once set, ``pypi`` will be used to define the ``homepage``, .. code-block:: python - homepage = 'https://pypi.org/project/setuptools/' - url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip' - list_url = 'https://pypi.org/simple/setuptools/' + homepage = "https://pypi.org/project/setuptools/" + url = "https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip" + list_url = "https://pypi.org/simple/setuptools/" is equivalent to: .. code-block:: python - pypi = 'setuptools/setuptools-49.2.0.zip' + pypi = "setuptools/setuptools-49.2.0.zip" If a package has a different homepage listed on PyPI, you can @@ -208,7 +208,7 @@ dependencies to your package: .. code-block:: python - depends_on('py-setuptools@42:', type='build') + depends_on("py-setuptools@42:", type="build") Note that ``py-wheel`` is already listed as a build dependency in the @@ -232,7 +232,7 @@ Look for dependencies under the following keys: * ``dependencies`` under ``[project]`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``[project.optional-dependencies]`` @@ -279,12 +279,12 @@ distutils library, and has almost the exact same API. In addition to * ``setup_requires`` These packages are usually only needed at build-time, so you can - add them with ``type='build'``. + add them with ``type="build"``. * ``install_requires`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``extras_require`` @@ -296,7 +296,7 @@ distutils library, and has almost the exact same API. In addition to These are packages that are required to run the unit tests for the package. These dependencies can be specified using the - ``type='test'`` dependency type. However, the PyPI tarballs rarely + ``type="test"`` dependency type. However, the PyPI tarballs rarely contain unit tests, so there is usually no reason to add these. See https://setuptools.pypa.io/en/latest/userguide/dependency_management.html @@ -321,7 +321,7 @@ older versions of flit may use the following keys: * ``requires`` under ``[tool.flit.metadata]`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``[tool.flit.metadata.requires-extra]`` @@ -434,12 +434,12 @@ the BLAS/LAPACK library you want pkg-config to search for: .. code-block:: python - depends_on('py-pip@22.1:', type='build') + depends_on("py-pip@22.1:", type="build") def config_settings(self, spec, prefix): return { - 'blas': spec['blas'].libs.names[0], - 'lapack': spec['lapack'].libs.names[0], + "blas": spec["blas"].libs.names[0], + "lapack": spec["lapack"].libs.names[0], } @@ -463,10 +463,10 @@ has an optional dependency on ``libyaml`` that can be enabled like so: def global_options(self, spec, prefix): options = [] - if '+libyaml' in spec: - options.append('--with-libyaml') + if spec.satisfies("+libyaml"): + options.append("--with-libyaml") else: - options.append('--without-libyaml') + options.append("--without-libyaml") return options @@ -492,10 +492,10 @@ allows you to specify the directories to search for ``libyaml``: def install_options(self, spec, prefix): options = [] - if '+libyaml' in spec: + if spec.satisfies("+libyaml"): options.extend([ - spec['libyaml'].libs.search_flags, - spec['libyaml'].headers.include_flags, + spec["libyaml"].libs.search_flags, + spec["libyaml"].headers.include_flags, ]) return options @@ -556,7 +556,7 @@ detected are wrong, you can provide the names yourself by overriding .. code-block:: python - import_modules = ['six'] + import_modules = ["six"] Sometimes the list of module names to import depends on how the @@ -571,9 +571,9 @@ This can be expressed like so: @property def import_modules(self): - modules = ['yaml'] - if '+libyaml' in self.spec: - modules.append('yaml.cyaml') + modules = ["yaml"] + if self.spec.satisfies("+libyaml"): + modules.append("yaml.cyaml") return modules @@ -586,14 +586,14 @@ Instead of defining the ``import_modules`` explicitly, only the subset of module names to be skipped can be defined by using ``skip_modules``. If a defined module has submodules, they are skipped as well, e.g., in case the ``plotting`` modules should be excluded from the -automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface', -'nilearn.plotting', 'nilearn.plotting.data']`` set: +automatically detected ``import_modules`` ``["nilearn", "nilearn.surface", +"nilearn.plotting", "nilearn.plotting.data"]`` set: .. code-block:: python - skip_modules = ['nilearn.plotting'] + skip_modules = ["nilearn.plotting"] -This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']`` +This will set ``import_modules`` to ``["nilearn", "nilearn.surface"]`` Import tests can be run during the installation using ``spack install --test=root`` or at any time after the installation using @@ -612,11 +612,11 @@ after the ``install`` phase: .. code-block:: python - @run_after('install') + @run_after("install") @on_package_attributes(run_tests=True) def install_test(self): - with working_dir('spack-test', create=True): - python('-c', 'import numpy; numpy.test("full", verbose=2)') + with working_dir("spack-test", create=True): + python("-c", "import numpy; numpy.test('full', verbose=2)") when testing is enabled during the installation (i.e., ``spack install @@ -638,7 +638,7 @@ provides Python bindings in a ``python`` directory, you can use: .. code-block:: python - build_directory = 'python' + build_directory = "python" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/build_systems/rocmpackage.rst b/lib/spack/docs/build_systems/rocmpackage.rst index 636e5b812623f2..8f90794dfb7df5 100644 --- a/lib/spack/docs/build_systems/rocmpackage.rst +++ b/lib/spack/docs/build_systems/rocmpackage.rst @@ -81,28 +81,27 @@ class of your package. For example, you can add it to your class MyRocmPackage(CMakePackage, ROCmPackage): ... # Ensure +rocm and amdgpu_targets are passed to dependencies - depends_on('mydeppackage', when='+rocm') + depends_on("mydeppackage", when="+rocm") for val in ROCmPackage.amdgpu_targets: - depends_on('mydeppackage amdgpu_target={0}'.format(val), - when='amdgpu_target={0}'.format(val)) + depends_on(f"mydeppackage amdgpu_target={val}", + when=f"amdgpu_target={val}") ... def cmake_args(self): spec = self.spec args = [] ... - if '+rocm' in spec: + if spec.satisfies("+rocm"): # Set up the hip macros needed by the build args.extend([ - '-DENABLE_HIP=ON', - '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)]) - rocm_archs = spec.variants['amdgpu_target'].value - if 'none' not in rocm_archs: - args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}' - .format(",".join(rocm_archs))) + "-DENABLE_HIP=ON", + f"-DHIP_ROOT_DIR={spec['hip'].prefix}"]) + rocm_archs = spec.variants["amdgpu_target"].value + if "none" not in rocm_archs: + args.append(f"-DHIP_HIPCC_FLAGS=--amdgpu-target={','.join(rocm_archs}") else: # Ensure build with hip is disabled - args.append('-DENABLE_HIP=OFF') + args.append("-DENABLE_HIP=OFF") ... return args ... @@ -114,7 +113,7 @@ build. This example also illustrates how to check for the ``rocm`` variant using ``self.spec`` and how to retrieve the ``amdgpu_target`` variant's value -using ``self.spec.variants['amdgpu_target'].value``. +using ``self.spec.variants["amdgpu_target"].value``. All five packages using ``ROCmPackage`` as of January 2021 also use the :ref:`CudaPackage `. So it is worth looking at those packages diff --git a/lib/spack/docs/build_systems/sconspackage.rst b/lib/spack/docs/build_systems/sconspackage.rst index 18002586a06c75..a17e1271b86d3b 100644 --- a/lib/spack/docs/build_systems/sconspackage.rst +++ b/lib/spack/docs/build_systems/sconspackage.rst @@ -57,7 +57,7 @@ overridden like so: .. code-block:: python def test(self): - scons('check') + scons("check") ^^^^^^^^^^^^^^^ @@ -88,7 +88,7 @@ base class already contains: .. code-block:: python - depends_on('scons', type='build') + depends_on("scons", type="build") If you want to specify a particular version requirement, you can override @@ -96,7 +96,7 @@ this in your package: .. code-block:: python - depends_on('scons@2.3.0:', type='build') + depends_on("scons@2.3.0:", type="build") ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -238,14 +238,14 @@ the package build phase. This is done by overriding ``build_args`` like so: def build_args(self, spec, prefix): args = [ - 'PREFIX={0}'.format(prefix), - 'ZLIB={0}'.format(spec['zlib'].prefix), + f"PREFIX={prefix}", + f"ZLIB={spec['zlib'].prefix}", ] - if '+debug' in spec: - args.append('DEBUG=yes') + if spec.satisfies("+debug"): + args.append("DEBUG=yes") else: - args.append('DEBUG=no') + args.append("DEBUG=no") return args @@ -275,8 +275,8 @@ environment variables. For example, cantera has the following option: * env_vars: [ string ] Environment variables to propagate through to SCons. Either the string "all" or a comma separated list of variable names, e.g. - 'LD_LIBRARY_PATH,HOME'. - - default: 'LD_LIBRARY_PATH,PYTHONPATH' + "LD_LIBRARY_PATH,HOME". + - default: "LD_LIBRARY_PATH,PYTHONPATH" In the case of cantera, using ``env_vars=all`` allows us to use diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 157236ebfcc12e..d488ae0c7f1825 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1549,7 +1549,7 @@ its value: def configure_args(self): ... - if "+shared" in self.spec: + if self.spec.satisfies("+shared"): extra_args.append("--enable-shared") else: extra_args.append("--disable-shared") @@ -1636,7 +1636,7 @@ Within a package recipe a multi-valued variant is tested using a ``key=value`` s .. code-block:: python - if "languages=jit" in spec: + if spec.satisfies("languages=jit"): options.append("--enable-host-shared") """"""""""""""""""""""""""""""""""""""""""" @@ -3528,7 +3528,7 @@ need to override methods like ``configure_args``: def configure_args(self): args = ["--enable-cxx"] + self.enable_or_disable("libs") - if "libs=static" in self.spec: + if self.spec.satisfies("libs=static"): args.append("--with-pic") return args @@ -4391,7 +4391,7 @@ for supported features, for instance: .. code-block:: python - if "avx512" in spec.target: + if spec.satisfies("target=avx512"): args.append("--with-avx512") The snippet above will append the ``--with-avx512`` item to a list of arguments only if the corresponding From 954ec97178cc56dcc43e7b311f8855942979516c Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 23 Oct 2023 09:37:20 +0200 Subject: [PATCH 291/408] py-cython: new version, python 3.11 upperbound (#40343) --- var/spack/repos/builtin/packages/py-cython/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py index ba371b1b1649e6..d0426c40392d97 100644 --- a/var/spack/repos/builtin/packages/py-cython/package.py +++ b/var/spack/repos/builtin/packages/py-cython/package.py @@ -13,6 +13,7 @@ class PyCython(PythonPackage): pypi = "cython/Cython-0.29.21.tar.gz" tags = ["build-tools"] + version("3.0.4", sha256="2e379b491ee985d31e5faaf050f79f4a8f59f482835906efe4477b33b4fbe9ff") version("3.0.0", sha256="350b18f9673e63101dbbfcf774ee2f57c20ac4636d255741d76ca79016b1bd82") version( "3.0.0a9", @@ -45,6 +46,9 @@ class PyCython(PythonPackage): version("0.23.5", sha256="0ae5a5451a190e03ee36922c4189ca2c88d1df40a89b4f224bc842d388a0d1b6") version("0.23.4", sha256="fec42fecee35d6cc02887f1eef4e4952c97402ed2800bfe41bbd9ed1a0730d8e") + # https://github.com/cython/cython/issues/5751 (distutils not yet dropped) + depends_on("python@:3.11", type=("build", "link", "run")) + # https://github.com/cython/cython/commit/1cd24026e9cf6d63d539b359f8ba5155fd48ae21 # collections.Iterable was removed in Python 3.10 depends_on("python@:3.9", when="@:0.29.14", type=("build", "link", "run")) From 0720274b03ab65546aee5b9f43917c033f9316cc Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 23 Oct 2023 10:26:20 +0200 Subject: [PATCH 292/408] concretizer verbose: show progress in % too (#40654) --- lib/spack/spack/environment/environment.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 51ea453c39ef3c..0b36351d4e853c 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1518,11 +1518,14 @@ def _concretize_separately(self, tests=False): tty.msg(msg) batch = [] - for i, concrete, duration in spack.util.parallel.imap_unordered( - _concretize_task, args, processes=num_procs, debug=tty.is_debug() + for j, (i, concrete, duration) in enumerate( + spack.util.parallel.imap_unordered( + _concretize_task, args, processes=num_procs, debug=tty.is_debug() + ) ): batch.append((i, concrete)) - tty.verbose(f"[{duration:7.2f}s] {root_specs[i]}") + percentage = (j + 1) / len(args) * 100 + tty.verbose(f"{duration:6.1f}s [{percentage:3.0f}%] {root_specs[i]}") sys.stdout.flush() # Add specs in original order From 1ad62404bdbf8467097f078363a93f67cd478d72 Mon Sep 17 00:00:00 2001 From: Aiden Grossman Date: Mon, 23 Oct 2023 02:43:54 -0700 Subject: [PATCH 293/408] 3proxy: respect compiler choice (#39240) --- var/spack/repos/builtin/packages/3proxy/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/3proxy/package.py b/var/spack/repos/builtin/packages/3proxy/package.py index e9a408698b87ac..78e52895145b65 100644 --- a/var/spack/repos/builtin/packages/3proxy/package.py +++ b/var/spack/repos/builtin/packages/3proxy/package.py @@ -24,7 +24,9 @@ class _3proxy(MakefilePackage): depends_on("m4", type="build") def build(self, spec, prefix): - make("-f", f"Makefile.{platform.system()}") + make("-f", f"Makefile.{platform.system()}", f"CC={spack_cc}") def install(self, spec, prefix): - make("-f", f"Makefile.{platform.system()}", f"prefix={prefix}", "install") + make( + "-f", f"Makefile.{platform.system()}", f"prefix={prefix}", f"CC={spack_cc}", "install" + ) From a843453b50f9ab7b32b052306b00406b0f80133b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 23 Oct 2023 16:22:41 +0200 Subject: [PATCH 294/408] nghttp2: add v1.57.0 (#40652) --- var/spack/repos/builtin/packages/nghttp2/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/nghttp2/package.py b/var/spack/repos/builtin/packages/nghttp2/package.py index fe9d4f94e38a8e..2de551d8b5fa49 100644 --- a/var/spack/repos/builtin/packages/nghttp2/package.py +++ b/var/spack/repos/builtin/packages/nghttp2/package.py @@ -13,6 +13,7 @@ class Nghttp2(AutotoolsPackage): homepage = "https://nghttp2.org/" url = "https://github.com/nghttp2/nghttp2/releases/download/v1.26.0/nghttp2-1.26.0.tar.gz" + version("1.57.0", sha256="1e3258453784d3b7e6cc48d0be087b168f8360b5d588c66bfeda05d07ad39ffd") version("1.52.0", sha256="9877caa62bd72dde1331da38ce039dadb049817a01c3bdee809da15b754771b8") version("1.51.0", sha256="2a0bef286f65b35c24250432e7ec042441a8157a5b93519412d9055169d9ce54") version("1.50.0", sha256="d162468980dba58e54e31aa2cbaf96fd2f0890e6dd141af100f6bd1b30aa73c6") From b5bbcaf88be55cfa5f01a38a938296620b88db66 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Mon, 23 Oct 2023 17:11:51 +0200 Subject: [PATCH 295/408] geant4: add patch for when using the system expat library (#40650) Co-authored-by: jmcarcell --- .../packages/geant4/package-cache.patch | 48 +++++++++++++++++++ .../repos/builtin/packages/geant4/package.py | 3 ++ 2 files changed, 51 insertions(+) create mode 100644 var/spack/repos/builtin/packages/geant4/package-cache.patch diff --git a/var/spack/repos/builtin/packages/geant4/package-cache.patch b/var/spack/repos/builtin/packages/geant4/package-cache.patch new file mode 100644 index 00000000000000..835a4c34098d0e --- /dev/null +++ b/var/spack/repos/builtin/packages/geant4/package-cache.patch @@ -0,0 +1,48 @@ +diff --git a/cmake/Modules/G4CMakeUtilities.cmake b/cmake/Modules/G4CMakeUtilities.cmake +index 16f7b3c8c0..84acfcd5e7 100644 +--- a/cmake/Modules/G4CMakeUtilities.cmake ++++ b/cmake/Modules/G4CMakeUtilities.cmake +@@ -221,6 +221,21 @@ function(geant4_export_package_variables _file) + get_property(__var_value CACHE ${__var} PROPERTY VALUE) + get_property(__var_type CACHE ${__var} PROPERTY TYPE) + get_property(__var_help CACHE ${__var} PROPERTY HELPSTRING) ++ # Variable may not be in cache, only local (canonical case being EXPAT_LIBRARY since CMake 3.27) ++ # We still need to account for these because they may be required to be in the CACHE at least set in ++ # earlier versions. ++ # 1. Variable may not be in cache, only local (canonical case being EXPAT_LIBRARY since CMake 3.27) ++ # We still need to account for these because they may be required to be in the CACHE at least set in ++ # earlier versions. ++ # 2. Depending on CMake version, variable may be in cache but unitialized, here we want the local value ++ if(((NOT __var_value) AND (NOT __var_type) AND (NOT __var_help)) OR (__var_type STREQUAL "UNINITIALIZED")) ++ set(__var_value ${${__var}}) ++ # TODO: set type based on whether it looks like a bool or path, but PATH almost invariably what we save ++ # Only important in cmake GUI and if value needs to be changed, which we don't if package cache is used ++ set(__var_type PATH) ++ set(__var_help "no documentation, not a cache value") ++ endif() ++ + list(APPEND __local_build_setting "geant4_set_and_check_package_variable(${__var} \"${__var_value}\" ${__var_type} \"${__var_help}\")") + endforeach() + +diff --git a/cmake/Modules/G4OptionalComponents.cmake b/cmake/Modules/G4OptionalComponents.cmake +index 7b3a1f9836..f503a2994a 100644 +--- a/cmake/Modules/G4OptionalComponents.cmake ++++ b/cmake/Modules/G4OptionalComponents.cmake +@@ -78,6 +78,8 @@ else() + unset(EXPAT_FOUND) + unset(EXPAT_INCLUDE_DIR CACHE) + unset(EXPAT_LIBRARY CACHE) ++ unset(EXPAT_LIBRARY_RELEASE CACHE) ++ unset(EXPAT_LIBRARY_DEBUG CACHE) + message(FATAL_ERROR + "Detected system expat header and library: + EXPAT_INCLUDE_DIR = ${__badexpat_include_dir} +@@ -88,7 +90,7 @@ Set the above CMake variables to point to an expat install of the required versi + + # Backward compatibility for sources.cmake using the variable + set(EXPAT_LIBRARIES EXPAT::EXPAT) +- geant4_save_package_variables(EXPAT EXPAT_INCLUDE_DIR EXPAT_LIBRARY) ++ geant4_save_package_variables(EXPAT EXPAT_INCLUDE_DIR EXPAT_LIBRARY EXPAT_LIBRARY_RELEASE EXPAT_LIBRARY_DEBUG) + else() + set(EXPAT_FOUND TRUE) + set(GEANT4_USE_BUILTIN_EXPAT TRUE) \ No newline at end of file diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index bf4ade6ce7baf7..afc4464b098bd8 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -151,6 +151,9 @@ def std_when(values): patch("cxx17_geant4_10_0.patch", level=1, when="@10.4.0 cxxstd=17") patch("geant4-10.4.3-cxx17-removed-features.patch", level=1, when="@10.4.3 cxxstd=17") + # See https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2556 + patch("package-cache.patch", level=1, when="@10.7.0:11.2.0^cmake@3.17:") + # NVHPC: "thread-local declaration follows non-thread-local declaration" conflicts("%nvhpc", when="+threads") From 79cda87b9bafeb6662623289a6710a07026ed678 Mon Sep 17 00:00:00 2001 From: Olivier Cessenat Date: Mon, 23 Oct 2023 18:56:12 +0200 Subject: [PATCH 296/408] ngspice: new version 41 and option osdi (#40664) --- var/spack/repos/builtin/packages/ngspice/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/ngspice/package.py b/var/spack/repos/builtin/packages/ngspice/package.py index c826b24052d635..08bbbd712f49e5 100644 --- a/var/spack/repos/builtin/packages/ngspice/package.py +++ b/var/spack/repos/builtin/packages/ngspice/package.py @@ -18,6 +18,7 @@ class Ngspice(AutotoolsPackage): # Master version by default adds the experimental adms feature version("master", branch="master") + version("41", sha256="1ce219395d2f50c33eb223a1403f8318b168f1e6d1015a7db9dbf439408de8c4") version("40", sha256="e303ca7bc0f594e2d6aa84f68785423e6bf0c8dad009bb20be4d5742588e890d") version("39", sha256="bf94e811eaad8aaf05821d036a9eb5f8a65d21d30e1cab12701885e09618d771") version("38", sha256="2c3e22f6c47b165db241cf355371a0a7558540ab2af3f8b5eedeeb289a317c56") @@ -52,6 +53,7 @@ class Ngspice(AutotoolsPackage): variant("openmp", default=False, description="Compile with multi-threading support") variant("readline", default=True, description="Build readline support (for bin)") variant("fft", default=True, description="Use external fftw lib") + variant("osdi", default=False, description="Use osdi/OpenVAF") depends_on("fftw-api@3:~mpi~openmp", when="+fft~openmp") depends_on("fftw-api@3:~mpi+openmp", when="+fft+openmp") @@ -120,6 +122,8 @@ def configure_args(self): args.append("--enable-openmp") if "~fft" in spec: args.append("--with-fftw3=no") + if "+osdi" in spec: + args.append("--enable-osdi") if "darwin" in spec.architecture: args.append("--enable-pss") if "@master" in spec: From c9d3abe5714b907ace4cfa5c11bf07322737ad50 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 20:22:39 +0200 Subject: [PATCH 297/408] audit: add check for GitLab patches (#40656) GitLab's .patch URLs only provide abbreviated hashes, while .diff URLs provide full hashes. There does not seem to be a parameter to force .patch URLs to also return full hashes, so we should make sure to use the .diff ones. --- lib/spack/spack/audit.py | 43 +++++++++++++------ lib/spack/spack/test/audit.py | 4 ++ .../invalid-gitlab-patch-url/package.py | 20 +++++++++ .../package.py | 20 +++++++++ 4 files changed, 73 insertions(+), 14 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py create mode 100644 var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index 176c45487f51d0..8b13ffc7cf72db 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -307,10 +307,17 @@ def _check_build_test_callbacks(pkgs, error_cls): @package_directives def _check_patch_urls(pkgs, error_cls): - """Ensure that patches fetched from GitHub have stable sha256 hashes.""" + """Ensure that patches fetched from GitHub and GitLab have stable sha256 + hashes.""" github_patch_url_re = ( r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/" - ".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)" + r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)" + ) + # Only .diff URLs have stable/full hashes: + # https://forum.gitlab.com/t/patches-with-full-index/29313 + gitlab_patch_url_re = ( + r"^https?://(?:.+)?gitlab(?:.+)/" + r".+/.+/-/(?:commit|merge_requests)/[a-fA-F0-9]+\.(?:patch|diff)" ) errors = [] @@ -321,19 +328,27 @@ def _check_patch_urls(pkgs, error_cls): if not isinstance(patch, spack.patch.UrlPatch): continue - if not re.match(github_patch_url_re, patch.url): - continue - - full_index_arg = "?full_index=1" - if not patch.url.endswith(full_index_arg): - errors.append( - error_cls( - "patch URL in package {0} must end with {1}".format( - pkg_cls.name, full_index_arg - ), - [patch.url], + if re.match(github_patch_url_re, patch.url): + full_index_arg = "?full_index=1" + if not patch.url.endswith(full_index_arg): + errors.append( + error_cls( + "patch URL in package {0} must end with {1}".format( + pkg_cls.name, full_index_arg + ), + [patch.url], + ) + ) + elif re.match(gitlab_patch_url_re, patch.url): + if not patch.url.endswith(".diff"): + errors.append( + error_cls( + "patch URL in package {0} must end with .diff".format( + pkg_cls.name + ), + [patch.url], + ) ) - ) return errors diff --git a/lib/spack/spack/test/audit.py b/lib/spack/spack/test/audit.py index 2efc2bbd88913a..a3d4bb8e3fbaf1 100644 --- a/lib/spack/spack/test/audit.py +++ b/lib/spack/spack/test/audit.py @@ -21,6 +21,10 @@ (["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has a GitHub patch URL without full_index=1 (["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), + # This package has invalid GitLab patch URLs + (["invalid-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), + # This package has invalid GitLab patch URLs + (["invalid-selfhosted-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has a stand-alone 'test*' method in build-time callbacks (["fail-test-audit"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has no issues diff --git a/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py b/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py new file mode 100644 index 00000000000000..527a1815e62863 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class InvalidGitlabPatchUrl(Package): + """Package that has GitLab patch URLs that fail auditing.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/patch-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + patch( + "https://gitlab.com/QEF/q-e/-/commit/4ca3afd4c6f27afcf3f42415a85a353a7be1bd37.patch", + sha256="d7dec588efb5c04f99d949d8b9bb4a0fbc98b917ae79e12e4b87ad7c3dc9e268", + ) diff --git a/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py b/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py new file mode 100644 index 00000000000000..818876405c26f6 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class InvalidSelfhostedGitlabPatchUrl(Package): + """Package that has GitLab patch URLs that fail auditing.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/patch-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + patch( + "https://gitlab.gnome.org/GNOME/glib/-/commit/bda87264372c006c94e21ffb8ff9c50ecb3e14bd.patch", + sha256="2e811ec62cb09044c95a4d0213993f09af70cdcc1c709257b33bc9248ae950ed", + ) From 9da048dd44a633ba29e5a05b0e6f917fddc482a8 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 23 Oct 2023 13:56:27 -0500 Subject: [PATCH 298/408] py-scikit-learn: add v1.3.2 (#40672) --- .../builtin/packages/py-scikit-learn/package.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py index 389bc6d48bbb9d..05f6d09b53952b 100644 --- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py @@ -17,6 +17,7 @@ class PyScikitLearn(PythonPackage): maintainers("adamjstewart") version("master", branch="master") + version("1.3.2", sha256="a2f54c76accc15a34bfb9066e6c7a56c1e7235dda5762b990792330b52ccfb05") version("1.3.1", sha256="1a231cced3ee3fa04756b4a7ab532dc9417acd581a330adff5f2c01ac2831fcf") version("1.3.0", sha256="8be549886f5eda46436b6e555b0e4873b4f10aa21c07df45c4bc1735afbccd7a") version("1.2.2", sha256="8429aea30ec24e7a8c7ed8a3fa6213adf3814a6efbea09e16e0a0c71e1a1a3d7") @@ -51,7 +52,8 @@ class PyScikitLearn(PythonPackage): variant("openmp", default=True, description="Build with OpenMP support") # Based on PyPI wheel availability - depends_on("python@3.8:3.11", when="@1.1.3:", type=("build", "run")) + depends_on("python@3.8:3.12", when="@1.3.1:", type=("build", "run")) + depends_on("python@3.8:3.11", when="@1.1.3:1.3.0", type=("build", "run")) depends_on("python@3.8:3.10", when="@1.1.0:1.1.2", type=("build", "run")) depends_on("python@:3.10", when="@1.0.2", type=("build", "run")) depends_on("python@:3.9", when="@0.24:1.0.1", type=("build", "run")) @@ -61,6 +63,10 @@ class PyScikitLearn(PythonPackage): # pyproject.toml depends_on("py-setuptools", type="build") depends_on("py-setuptools@:59", when="@:1.2.1", type="build") + depends_on("py-cython@0.29.33:2", when="@1.3:", type="build") + depends_on("py-cython@0.29.24:2", when="@1.0.2:", type="build") + depends_on("py-cython@0.28.5:2", when="@0.21:", type="build") + depends_on("py-cython@0.23:2", type="build") # sklearn/_min_dependencies.py depends_on("py-numpy@1.17.3:", when="@1.1:", type=("build", "run")) @@ -80,10 +86,6 @@ class PyScikitLearn(PythonPackage): depends_on("py-joblib@1:", when="@1.1:", type=("build", "run")) depends_on("py-joblib@0.11:", type=("build", "run")) depends_on("py-threadpoolctl@2.0.0:", when="@0.23:", type=("build", "run")) - depends_on("py-cython@0.29.33:", when="@1.3:", type="build") - depends_on("py-cython@0.29.24:", when="@1.0.2:", type="build") - depends_on("py-cython@0.28.5:", when="@0.21:", type="build") - depends_on("py-cython@0.23:", type="build") depends_on("llvm-openmp", when="@0.21: %apple-clang +openmp") # Test dependencies From 5239d43edf41f2e87d62e4d091e5d32e10fe8950 Mon Sep 17 00:00:00 2001 From: Jim Galarowicz Date: Mon, 23 Oct 2023 14:31:20 -0500 Subject: [PATCH 299/408] Update survey package file for survey version 9 changes. (#40619) * Update survey package file for survey version 9 changes. * Fix single quote - make double. * Small change to trigger spack tests --- .../repos/builtin/packages/survey/package.py | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/survey/package.py b/var/spack/repos/builtin/packages/survey/package.py index 79bac929665e8e..1fc4c550f0d37e 100644 --- a/var/spack/repos/builtin/packages/survey/package.py +++ b/var/spack/repos/builtin/packages/survey/package.py @@ -19,7 +19,7 @@ class Survey(CMakePackage): available for tools inside current MPI implementations including: MPICH, MVAPICH, MPT, and OpenMPI. It also supports multiple architectures and has been tested on machines based on Intel, - AMD, ARM, and IBM P8/9 processors and integrated GPUs. + AMD, ARM, and IBM P8/9 processors and integrated NVIDIA GPUs. Survey is a licensed product with the source not openly available. To access the survey source and build with spack please contact: @@ -33,7 +33,8 @@ class Survey(CMakePackage): maintainers("jgalarowicz") version("master", branch="master") - version("1.0.8", branch="1.0.8") + version("1.0.9", branch="1.0.9") + version("1.0.8", tag="1.0.8") version("1.0.7", tag="1.0.7") version("1.0.6", tag="1.0.6") version("1.0.5", tag="1.0.5") @@ -45,6 +46,7 @@ class Survey(CMakePackage): version("1.0.0", branch="1.0.0") variant("mpi", default=False, description="Enable mpi, build MPI data collector") + variant("debug", default=False, description="Build a debug survey version") variant( "tls_model", @@ -61,9 +63,10 @@ class Survey(CMakePackage): depends_on("libmonitor@2021.11.08+commrank", type=("build", "link", "run"), when="@1.0.3:") depends_on("papi@5:", type=("build", "link", "run")) - depends_on("gotcha@master", type=("build", "link", "run")) - depends_on("llvm-openmp@9.0.0", type=("build", "link", "run"), when="@:1.0.2") - depends_on("llvm-openmp@12.0.1", type=("build", "link", "run"), when="@1.0.3:") + depends_on("gotcha@master", type=("build", "link"), when="@:1.0.7") + depends_on("gotcha@1.0.4", type=("build", "link"), when="@1.0.8:") + depends_on("llvm-openmp@9.0.0", type=("build", "link"), when="@:1.0.2") + depends_on("llvm-openmp@12.0.1", type=("build", "link"), when="@1.0.3:") # MPI Installation depends_on("mpi", when="+mpi") @@ -81,6 +84,10 @@ class Survey(CMakePackage): depends_on("py-more-itertools", type=("build", "run"), when="@1.0.4:") depends_on("py-versioneer", type=("build", "run"), when="@1.0.5:") depends_on("py-filelock", type=("build", "run"), when="@1.0.7:") + depends_on("py-zipp", type=("build", "run"), when="@1.0.7:") + depends_on("py-humanize", type=("build", "run"), when="@1.0.8:") + depends_on("py-importlib-resources", type=("build", "run"), when="@1.0.8:") + depends_on("py-gitpython", type=("build", "run"), when="@1.0.9:") extends("python") @@ -117,6 +124,11 @@ def cmake_args(self): mpi_options = self.get_mpi_cmake_options(spec) cmake_args.extend(mpi_options) + if "+debug" in spec: + cmake_args.append("-DCMAKE_C_FLAGS=-g -O2") + cmake_args.append("-DCMAKE_CXX_FLAGS=-g -O2") + cmake_args.append("-DCMAKE_BUILD_TYPE=Custom") + return cmake_args def setup_run_environment(self, env): From 37b70bf7015f6f3413e0c3c1c11338c0d8404232 Mon Sep 17 00:00:00 2001 From: Taillefumier Mathieu <29380261+mtaillefumier@users.noreply.github.com> Date: Mon, 23 Oct 2023 21:37:42 +0200 Subject: [PATCH 300/408] Add rccl and nccl variants to cp2k and cosma (#40451) --- var/spack/repos/builtin/packages/cosma/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/cosma/package.py b/var/spack/repos/builtin/packages/cosma/package.py index 2fccafe1872b0e..19db9a0531859c 100644 --- a/var/spack/repos/builtin/packages/cosma/package.py +++ b/var/spack/repos/builtin/packages/cosma/package.py @@ -48,6 +48,9 @@ class Cosma(CMakePackage): with when("+cuda"): variant("nccl", default=False, description="Use cuda nccl") + with when("+rocm"): + variant("rccl", default=False, description="Use rocm rccl") + depends_on("cmake@3.22:", type="build") depends_on("mpi@3:") depends_on("blas", when="~cuda ~rocm") @@ -114,6 +117,7 @@ def cmake_args(self): self.define_from_variant("COSMA_WITH_TESTS", "tests"), self.define_from_variant("COSMA_WITH_APPS", "apps"), self.define_from_variant("COSMA_WITH_NCCL", "nccl"), + self.define_from_variant("COSMA_WITH_RCCL", "rccl"), self.define_from_variant("COSMA_WITH_GPU_AWARE_MPI", "gpu_direct"), self.define_from_variant("COSMA_WITH_PROFILING", "profiling"), self.define("COSMA_WITH_BENCHMARKS", False), From c1d8d86ae2bb1ebdd9db4df3d663c6c0cc2dbe3c Mon Sep 17 00:00:00 2001 From: Vicente Bolea Date: Mon, 23 Oct 2023 16:01:57 -0400 Subject: [PATCH 301/408] Adios2: add kokkos variant (#40623) * adios2: update variants and dependencies * adios2: add kokkos rocm|cuda|sycl variant * e4s oneapi ci stack: add adios2 +sycl * e4s ci stack: add adios2 +rocm * [@spackbot] updating style on behalf of vicentebolea * Apply suggestions from code review * adios2: fixed cuda variant * update ecp-data-vis-sdk * Update share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml --------- Co-authored-by: eugeneswalker Co-authored-by: vicentebolea --- .../stacks/e4s-oneapi/spack.yaml | 2 +- .../stacks/e4s-rocm-external/spack.yaml | 2 + .../cloud_pipelines/stacks/e4s/spack.yaml | 2 + .../repos/builtin/packages/adios2/package.py | 60 ++++++++++++++++--- .../packages/ecp-data-vis-sdk/package.py | 2 +- 5 files changed, 59 insertions(+), 9 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index d170b0a272772c..605a69e4a57d31 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -227,8 +227,8 @@ spack: - cabana +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples - kokkos +sycl +openmp cxxstd=17 +tests +examples - kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp cxxstd=17 +tests +examples - - tau +mpi +opencl +level_zero ~pdt # tau: requires libdrm.so to be installed - slate +sycl + - tau +mpi +opencl +level_zero ~pdt # tau: requires libdrm.so to be installed # -- # - ginkgo +oneapi # InstallError: Ginkgo's oneAPI backend requires theDPC++ compiler as main CXX compiler. # - hpctoolkit +level_zero # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml index 885dbb538b0476..b5ac17207796fe 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml @@ -258,6 +258,7 @@ spack: - tau +mpi +rocm # tau: has issue with `spack env depfile` build # ROCM 908 + - adios2 +kokkos +rocm amdgpu_target=gfx908 - amrex +rocm amdgpu_target=gfx908 - arborx +rocm amdgpu_target=gfx908 - cabana +rocm amdgpu_target=gfx908 @@ -297,6 +298,7 @@ spack: # - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898 # ROCM 90a + - adios2 +kokkos +rocm amdgpu_target=gfx90a - amrex +rocm amdgpu_target=gfx90a - arborx +rocm amdgpu_target=gfx90a - cabana +rocm amdgpu_target=gfx90a diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 86eab1d4074d3a..710360172ab1c2 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -306,6 +306,7 @@ spack: - tau +mpi +rocm # tau: has issue with `spack env depfile` build # ROCM 908 + - adios2 +kokkos +rocm amdgpu_target=gfx908 - amrex +rocm amdgpu_target=gfx908 - arborx +rocm amdgpu_target=gfx908 - cabana +rocm amdgpu_target=gfx908 @@ -345,6 +346,7 @@ spack: # - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898 # ROCM 90a + - adios2 +kokkos +rocm amdgpu_target=gfx90a - amrex +rocm amdgpu_target=gfx90a - arborx +rocm amdgpu_target=gfx90a - cabana +rocm amdgpu_target=gfx90a diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py index bfb08227e6ff9d..218457f3e38a4e 100644 --- a/var/spack/repos/builtin/packages/adios2/package.py +++ b/var/spack/repos/builtin/packages/adios2/package.py @@ -9,7 +9,7 @@ from spack.package import * -class Adios2(CMakePackage, CudaPackage): +class Adios2(CMakePackage, CudaPackage, ROCmPackage): """The Adaptable Input Output System version 2, developed in the Exascale Computing Program""" @@ -62,7 +62,8 @@ class Adios2(CMakePackage, CudaPackage): variant( "libpressio", default=False, when="@2.8:", description="Enable LibPressio for compression" ) - variant("blosc", default=True, when="@2.4:", description="Enable Blosc compression") + variant("blosc", default=True, when="@2.4:2.8", description="Enable Blosc compression") + variant("blosc2", default=True, when="@2.9:", description="Enable Blosc2 compression") variant("bzip2", default=True, when="@2.4:", description="Enable BZip2 compression") variant("zfp", default=True, description="Enable ZFP compression") variant("png", default=True, when="@2.4:", description="Enable PNG compression") @@ -78,7 +79,7 @@ class Adios2(CMakePackage, CudaPackage): description="Enable the DataMan engine for WAN transports", ) variant("dataspaces", default=False, when="@2.5:", description="Enable support for DATASPACES") - variant("ssc", default=True, description="Enable the SSC staging engine") + variant("ssc", default=True, when="@:2.7", description="Enable the SSC staging engine") variant("hdf5", default=False, description="Enable the HDF5 engine") variant( "aws", @@ -94,7 +95,8 @@ class Adios2(CMakePackage, CudaPackage): ) # Optional language bindings, C++11 and C always provided - variant("cuda", default=False, when="@2.8:", description="Enable CUDA support") + variant("kokkos", default=False, when="@2.9:", description="Enable Kokkos support") + variant("sycl", default=False, when="@2.10:", description="Enable SYCL support") variant("python", default=False, description="Enable the Python bindings") variant("fortran", default=True, description="Enable the Fortran bindings") @@ -108,6 +110,37 @@ class Adios2(CMakePackage, CudaPackage): depends_on("cmake@3.12.0:", type="build") + # Standalone CUDA support + depends_on("cuda", when="+cuda ~kokkos") + + # Kokkos support + depends_on("kokkos@3.7: +cuda +wrapper", when="+kokkos +cuda") + depends_on("kokkos@3.7: +rocm", when="+kokkos +rocm") + depends_on("kokkos@3.7: +sycl", when="+kokkos +sycl") + + # Propagate CUDA target to kokkos for +cuda + for cuda_arch in CudaPackage.cuda_arch_values: + depends_on( + "kokkos cuda_arch=%s" % cuda_arch, when="+kokkos +cuda cuda_arch=%s" % cuda_arch + ) + + # Propagate AMD GPU target to kokkos for +rocm + for amdgpu_value in ROCmPackage.amdgpu_targets: + depends_on( + "kokkos amdgpu_target=%s" % amdgpu_value, + when="+kokkos +rocm amdgpu_target=%s" % amdgpu_value, + ) + + conflicts("+cuda", when="@:2.7") + conflicts("+rocm", when="@:2.8") + + conflicts("+cuda", when="+sycl") + conflicts("+rocm", when="+cuda") + conflicts("+rocm", when="+sycl") + + conflicts("+rocm", when="~kokkos", msg="ADIOS2 does not support HIP without Kokkos") + conflicts("+sycl", when="~kokkos", msg="ADIOS2 does not support SYCL without Kokkos") + for _platform in ["linux", "darwin", "cray"]: depends_on("pkgconfig", type="build", when=f"platform={_platform}") variant( @@ -135,8 +168,8 @@ class Adios2(CMakePackage, CudaPackage): depends_on("hdf5+mpi", when="+hdf5+mpi") depends_on("libpressio", when="+libpressio") - depends_on("c-blosc", when="@:2.8 +blosc") - depends_on("c-blosc2", when="@2.9: +blosc") + depends_on("c-blosc", when="+blosc") + depends_on("c-blosc2", when="+blosc2") depends_on("bzip2", when="+bzip2") depends_on("libpng@1.6:", when="+png") depends_on("zfp@0.5.1:0.5", when="+zfp") @@ -202,6 +235,7 @@ def cmake_args(self): from_variant("BUILD_SHARED_LIBS", "shared"), from_variant("ADIOS2_USE_AWSSDK", "aws"), from_variant("ADIOS2_USE_Blosc", "blosc"), + from_variant("ADIOS2_USE_Blosc2", "blosc2"), from_variant("ADIOS2_USE_BZip2", "bzip2"), from_variant("ADIOS2_USE_DataMan", "dataman"), from_variant("ADIOS2_USE_DataSpaces", "dataspaces"), @@ -214,9 +248,13 @@ def cmake_args(self): from_variant("ADIOS2_USE_SST", "sst"), from_variant("ADIOS2_USE_SZ", "sz"), from_variant("ADIOS2_USE_ZFP", "zfp"), - from_variant("ADIOS2_USE_CUDA", "cuda"), from_variant("ADIOS2_USE_Catalyst", "libcatalyst"), from_variant("ADIOS2_USE_LIBPRESSIO", "libpressio"), + self.define("ADIOS2_USE_CUDA", self.spec.satisfies("+cuda ~kokkos")), + self.define("ADIOS2_USE_Kokkos", self.spec.satisfies("+kokkos")), + self.define("Kokkos_ENABLE_CUDA", self.spec.satisfies("+cuda +kokkos")), + self.define("Kokkos_ENABLE_HIP", self.spec.satisfies("+rocm")), + self.define("Kokkos_ENABLE_SYCL", self.spec.satisfies("+sycl")), self.define("BUILD_TESTING", self.run_tests), self.define("ADIOS2_BUILD_EXAMPLES", False), self.define("ADIOS2_USE_Endian_Reverse", True), @@ -244,6 +282,14 @@ def cmake_args(self): args.append(f"-DPYTHON_EXECUTABLE:FILEPATH={spec['python'].command.path}") args.append(f"-DPython_EXECUTABLE:FILEPATH={spec['python'].command.path}") + # hip support + if "+cuda" in spec: + args.append(self.builder.define_cuda_architectures(self)) + + # hip support + if "+rocm" in spec: + args.append(self.builder.define_hip_architectures(self)) + return args @property diff --git a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py index 189515b05638eb..f23a736569f24a 100644 --- a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py +++ b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py @@ -102,7 +102,7 @@ class EcpDataVisSdk(BundlePackage, CudaPackage, ROCmPackage): amdgpu_target_variants = ["amdgpu_target={0}".format(x) for x in ROCmPackage.amdgpu_targets] dav_sdk_depends_on( - "adios2+shared+mpi+python+blosc+sst+ssc+dataman", + "adios2+shared+mpi+python+sst+dataman", when="+adios2", propagate=["cuda", "hdf5", "sz", "zfp", "fortran"] + cuda_arch_variants, ) From e7892dffcfa06ec2622a497c60c826f8fdb46a51 Mon Sep 17 00:00:00 2001 From: Nakano Masaki Date: Tue, 24 Oct 2023 05:02:15 +0900 Subject: [PATCH 302/408] fix installation error of bear (#40637) Co-authored-by: Tom Scogland --- var/spack/repos/builtin/packages/bear/package.py | 6 +++--- var/spack/repos/builtin/packages/grpc/package.py | 2 +- var/spack/repos/builtin/packages/re2/package.py | 5 +++++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/bear/package.py b/var/spack/repos/builtin/packages/bear/package.py index 18240c7f330954..becb364a2c2d99 100644 --- a/var/spack/repos/builtin/packages/bear/package.py +++ b/var/spack/repos/builtin/packages/bear/package.py @@ -23,10 +23,10 @@ class Bear(CMakePackage): version("2.0.4", sha256="33ea117b09068aa2cd59c0f0f7535ad82c5ee473133779f1cc20f6f99793a63e") depends_on("pkgconfig", when="@3:") - depends_on("fmt", when="@3.0.0:") - depends_on("grpc", when="@3.0.0:") + depends_on("fmt@8", when="@3.0.0:") + depends_on("grpc +shared", when="@3.0.0:") depends_on("nlohmann-json", when="@3.0.0:") - depends_on("spdlog", when="@3.0.0:") + depends_on("spdlog +fmt_external", when="@3.0.0:") depends_on("cmake@2.8:", type="build") depends_on("python", type="build") depends_on("googletest", type="test", when="@3:") diff --git a/var/spack/repos/builtin/packages/grpc/package.py b/var/spack/repos/builtin/packages/grpc/package.py index dd7f3f5acf422a..58e64427ec23fb 100644 --- a/var/spack/repos/builtin/packages/grpc/package.py +++ b/var/spack/repos/builtin/packages/grpc/package.py @@ -59,7 +59,7 @@ class Grpc(CMakePackage): depends_on("zlib-api") depends_on("c-ares") depends_on("abseil-cpp", when="@1.27:") - depends_on("re2+pic", when="@1.33.1:") + depends_on("re2+pic@2023-09-01", when="@1.33.1:") def cmake_args(self): args = [ diff --git a/var/spack/repos/builtin/packages/re2/package.py b/var/spack/repos/builtin/packages/re2/package.py index 761005949b60e2..3c62d3da76217c 100644 --- a/var/spack/repos/builtin/packages/re2/package.py +++ b/var/spack/repos/builtin/packages/re2/package.py @@ -13,6 +13,9 @@ class Re2(CMakePackage): homepage = "https://github.com/google/re2" url = "https://github.com/google/re2/archive/2020-08-01.tar.gz" + version( + "2023-09-01", sha256="5bb6875ae1cd1e9fedde98018c346db7260655f86fdb8837e3075103acd3649b" + ) version( "2021-06-01", sha256="26155e050b10b5969e986dab35654247a3b1b295e0532880b5a9c13c0a700ceb" ) @@ -26,6 +29,8 @@ class Re2(CMakePackage): variant("shared", default=False, description="Build shared instead of static libraries") variant("pic", default=True, description="Enable position independent code") + depends_on("abseil-cpp", when="@2023-09-01:") + # shared libs must have position-independent code conflicts("+shared ~pic") From 9bb025c8714de7750866801a9392d932523b8746 Mon Sep 17 00:00:00 2001 From: Annop Wongwathanarat Date: Tue, 24 Oct 2023 07:58:04 +0100 Subject: [PATCH 303/408] armpl-gcc: add version 23.10 and macOS support (#40511) --- .../builtin/packages/armpl-gcc/package.py | 119 +++++++++++++++--- 1 file changed, 104 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/builtin/packages/armpl-gcc/package.py b/var/spack/repos/builtin/packages/armpl-gcc/package.py index 22f8521d925169..f0157ae551ffe1 100644 --- a/var/spack/repos/builtin/packages/armpl-gcc/package.py +++ b/var/spack/repos/builtin/packages/armpl-gcc/package.py @@ -31,11 +31,62 @@ "rhel8": "RHEL-8", "rhel9": "RHEL-9", "rocky8": "RHEL-8", + "rocky9": "RHEL-9", "amzn2": "AmazonLinux-2", "amzn2023": "AmazonLinux-2023", } _versions = { + "23.10_gcc-12.2": { + "RHEL-7": ("e5e2c69ad281a676f2a06c835fbf31d4f9fdf46aa3f3f7c8aafff46985f64902"), + "RHEL-8": ("cc0f3572ead93d1e31797b7a39a40cff3414878df9bd24a452bf4877dc35ca4c"), + "RHEL-9": ("18c75f57333031e454921cc3f4f22fd567e5a701424ff9ac219bbfe9955a8a96"), + "SLES-15": ("e1e891eceaffedecf7351e2c499ef2b49a36c9af29174b366ff470d0a568c18f"), + "Ubuntu-20.04": ("976424875c52c2062fc76cbc5d527ee82413cdc0432d7c59f423295a3b0cc612"), + "Ubuntu-22.04": ("6dd778edf55e13e8b766d75c340f0259f6cb507a93966d76d188b8b3943c769b"), + "AmazonLinux-2": ("423ac3df262b5fcca6cea480503b693306c970dd8e8e05c753ece92446ac7fee"), + "AmazonLinux-2023": ("acadf3b6cde866cb41f7363b290a646a492769aaa5819d4c0d60df89913342a9"), + }, + "23.10_gcc-11.3": { + "RHEL-7": ("b2afbdc056ae01fb5c71935448b19300ef368962a94ae76b8811f1d328c723c2"), + "RHEL-8": ("79b83a8a2c46b949896b3964c761cbd0b66c37826996afb62c466af5fb420bc2"), + "RHEL-9": ("7a84f561bcf941bb25123b3ef730b4c02616bc51215933870677163e78af38e3"), + "SLES-15": ("9243c405d092d3eabff112ccabc300e96f13c3d2c5c319df04d7093bb6f535a2"), + "Ubuntu-20.04": ("a16df088ef9303040d92b017b233c6e4c6f0300d09c2ad0a66c0318831bf009c"), + "Ubuntu-22.04": ("fabda66dc6388fa8c094443fa53deece5590db66caaa6a1e39e99e64d5bb0709"), + "AmazonLinux-2": ("db5d039fa1d07695a71b8733584d878bb778d41bc0ecc3e19059b75cffdcf8cd"), + "AmazonLinux-2023": ("977fd465702f086a69e3f7fc28f2bcb6c79a7af381dc7d865345115b26f4631f"), + }, + "23.10_gcc-10.4": { + "RHEL-7": ("3c8bad3af82a76ca1a45705afd47028cc26c7093377a554e692e1cd6f61cb304"), + "RHEL-8": ("381afae0e3e94aa91029f571de0e51c2342e50b4f855db7a9b9ca66e16e26276"), + "SLES-15": ("226e9519407331b4ad5ded8699cd15f1d9b845843304bbf21f47009a399fe2a0"), + "Ubuntu-20.04": ("45de59f795ad9026a838ab611b03b1644169a034ce59d6cca2c7940850fa17ad"), + "AmazonLinux-2": ("637b51da12548dc66da9132328fe2ea39ba0736af66fb30332ca8eeb540e3373"), + }, + "23.10_gcc-9.3": { + "RHEL-7": ("6fc2e3319b83ea2b1bf8d98ec43f614b937bb5f23d15aefe9e9171c882d24a60"), + "RHEL-8": ("1a05548a7051d1df42280fdcfcffeaf89d519aa7978bffd29171da60fdbccecf"), + "SLES-15": ("389ddd34e1299e4d942864f63f236158a81ce4190f59af512a1bea3221153bfe"), + "Ubuntu-20.04": ("a1a221859b5f0962df3a0c6ce31669827bff0bfffb185b80429620f14b40f4f4"), + "AmazonLinux-2": ("2eef9b28e95e75f0040eb61c9e1b406ec4d0b81cce3e95a652029aa0898733a0"), + }, + "23.10_gcc-8.2": { + "RHEL-7": ("d6596721e74e7bdc8d9ce7b8b2a4c5ab2bd430f3ca69b9ec84f587f1aa181083"), + "RHEL-8": ("004aed52003e19a6c14df303456318e486ad783eb543b79285c7953a23722a4a"), + "SLES-15": ("12c638c0cc5bdc220699499ec6bb160a7b889f105901f4354bd2748a77d25c8e"), + "AmazonLinux-2": ("d039134236cda298cd0920c3c5b017eeef83fcab82949221dc7deb081026252f"), + }, + "23.10_gcc-7.5": { + "RHEL-7": ("1a0ca860c168987d174923dfc7800e10521303914793162a8bae2b2cd3f68203"), + "AmazonLinux-2": ("58b201a6bbe7ee10563d8d42b32a77c4b15c57b4e81abb35d24b8c3fc9cff4d9"), + }, + "23.10_flang-new_clang_17": { + "macOS": ("baf09cd6d1d1b7c780b8b31cfe1dd709596b182dc714127fbc9f23007ff9e23a") + }, + "23.06_flang-new_clang_16": { + "macOS": ("232f5e89e0f1f4777480c64a790e477dfd2f423d3cf5704a116a2736f36250ea") + }, "23.04.1_gcc-12.2": { "RHEL-7": ("789cc093cb7e0d9294aff0fdf94b74987435a09cdff4c1b7118a03350548d03c"), "RHEL-8": ("1b668baec6d3df2d48c5aedc70baa6a9b638983b94bf2cd58d378859a1da49f0"), @@ -177,20 +228,28 @@ def get_os(ver): - spack_os = spack.platforms.host().default_os + platform = spack.platforms.host() + if platform.name == "darwin": + return "macOS" if ver.startswith("22."): - return _os_map_before_23.get(spack_os, "") + return _os_map_before_23.get(platform.default_os, "") else: - return _os_map.get(spack_os, "RHEL-7") + return _os_map.get(platform.default_os, "RHEL-7") def get_package_url(version): base_url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-performance-libraries/" armpl_version = version.split("_")[0] armpl_version_dashed = armpl_version.replace(".", "-") - gcc_version = version.split("_")[1] + compiler_version = version.split("_", 1)[1] os = get_os(armpl_version) - filename = "arm-performance-libraries_" + armpl_version + "_" + os + "_" + gcc_version + ".tar" + if os == "macOS": + if armpl_version.startswith("23.06"): + return f"{base_url}{armpl_version_dashed}/armpl_{armpl_version}_{compiler_version}.dmg" + else: + filename = f"arm-performance-libraries_{armpl_version}_macOS.dmg" + return f"{base_url}{armpl_version_dashed}/macos/{filename}" + filename = f"arm-performance-libraries_{armpl_version}_{os}_{compiler_version}.tar" os_short = "" if armpl_version.startswith("22.0."): os_short = os.replace("-", "") @@ -198,7 +257,7 @@ def get_package_url(version): os_short = os.split(".")[0].lower() if "amazonlinux" in os_short: os_short = os_short.replace("amazonlinux", "al") - return base_url + armpl_version_dashed + "/" + os_short + "/" + filename + return f"{base_url}{armpl_version_dashed}/{os_short}/{filename}" def get_armpl_prefix(spec): @@ -215,16 +274,26 @@ class ArmplGcc(Package): maintainers("annop-w") for ver, packages in _versions.items(): - key = "{0}".format(get_os(ver)) + key = get_os(ver) sha256sum = packages.get(key) url = get_package_url(ver) if sha256sum: - version(ver, sha256=sha256sum, url=url) + extension = os.path.splitext(url)[1] + # Don't attempt to expand .dmg files + expand = extension != ".dmg" + version(ver, sha256=sha256sum, url=url, extension=extension, expand=expand) conflicts("target=x86:", msg="Only available on Aarch64") conflicts("target=ppc64:", msg="Only available on Aarch64") conflicts("target=ppc64le:", msg="Only available on Aarch64") + conflicts("%gcc@:11", when="@23.10_gcc-12.2") + conflicts("%gcc@:10", when="@23.10_gcc-11.3") + conflicts("%gcc@:9", when="@23.10_gcc-10.4") + conflicts("%gcc@:8", when="@23.10_gcc-9.3") + conflicts("%gcc@:7", when="@23.10_gcc-8.2") + conflicts("%gcc@:6", when="@23.10_gcc-7.5") + conflicts("%gcc@:11", when="@23.04.1_gcc-12.2") conflicts("%gcc@:10", when="@23.04.1_gcc-11.3") conflicts("%gcc@:9", when="@23.04.1_gcc-10.2") @@ -266,17 +335,29 @@ class ArmplGcc(Package): # Run the installer with the desired install directory def install(self, spec, prefix): + if spec.platform == "darwin": + hdiutil = which("hdiutil") + # Mount image + mountpoint = os.path.join(self.stage.path, "mount") + hdiutil("attach", "-mountpoint", mountpoint, self.stage.archive_file) + try: + # Run installer + exe_name = f"armpl_{spec.version.string}_install.sh" + installer = Executable(os.path.join(mountpoint, exe_name)) + installer("-y", f"--install_dir={prefix}") + finally: + # Unmount image + hdiutil("detach", mountpoint) + return if self.compiler.name != "gcc": raise spack.error.SpackError(("Only compatible with GCC.\n")) with when("@:22"): - armpl_version = "{}".format(spec.version.up_to(3)).split("_")[0] + armpl_version = spec.version.up_to(3).string.split("_")[0] with when("@23:"): - armpl_version = "{}".format(spec.version).split("_")[0] + armpl_version = spec.version.string.split("_")[0] - exe = Executable( - "./arm-performance-libraries_{0}_{1}.sh".format(armpl_version, get_os(armpl_version)) - ) + exe = Executable(f"./arm-performance-libraries_{armpl_version}_{get_os(armpl_version)}.sh") exe("--accept", "--force", "--install-to", prefix) @property @@ -330,14 +411,22 @@ def headers(self): def setup_run_environment(self, env): armpl_dir = get_armpl_prefix(self.spec) - env.prepend_path("LD_LIBRARY_PATH", join_path(armpl_dir, "lib")) + if self.spec.platform == "darwin": + env.prepend_path("DYLD_LIBRARY_PATH", join_path(armpl_dir, "lib")) + else: + env.prepend_path("LD_LIBRARY_PATH", join_path(armpl_dir, "lib")) @run_after("install") def check_install(self): armpl_dir = get_armpl_prefix(self.spec) armpl_example_dir = join_path(armpl_dir, "examples") # run example makefile - make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir) + if self.spec.platform == "darwin": + # Fortran examples on MacOS requires flang-new which is + # not commonly installed, so only run the C examples. + make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir, "c_examples") + else: + make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir) # clean up make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir, "clean") From 85541c7e742e7169b86a586386ff2627700fdc8e Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Tue, 24 Oct 2023 00:08:05 -0700 Subject: [PATCH 304/408] exago: fix v1.5.1 tag; only allow python up to 3.10 for for @:1.5 (#40676) * exago: fix v1.5.1 tag; only allow python up to 3.10 for for @:1.5 due to pybind error with py 3.11 * hiop@:1.0 +cuda: constrain to cuda@:11.9 --- var/spack/repos/builtin/packages/exago/package.py | 4 ++-- var/spack/repos/builtin/packages/hiop/package.py | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index d28b4fa1f82018..b38aff0147b9a0 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -17,7 +17,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pnnl/ExaGO.git" maintainers("ryandanehy", "cameronrutherford", "pelesh") - version("1.5.1", commit="7abe482c8da0e247f9de4896f5982c4cacbecd78", submodules=True) + version("1.5.1", tag="v1.5.1", submodules=True) version("1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True) version("1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True) version("1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True) @@ -64,7 +64,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): ) # Dependencies - depends_on("python@3.6:", when="@1.3.0:+python") + depends_on("python@3.6:3.10", when="@1.3.0:1.5+python") depends_on("py-pytest", type=("build", "run"), when="@1.5.0:+python") depends_on("py-mpi4py", when="@1.3.0:+mpi+python") depends_on("pkgconfig", type="build") diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index ff62c7da56c0ee..9ceedc36b4bd0c 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -104,6 +104,10 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): depends_on("magma@{0}:".format(magma_v), when="@{0}:+rocm".format(hiop_v)) depends_on("cuda@11:", when="@develop:+cuda") + + # https://github.com/spack/spack/issues/40678 + depends_on("cuda@:11.9", when="@:1.0 +cuda") + depends_on("raja", when="+raja") depends_on("umpire", when="+raja") depends_on("raja+openmp", when="+raja~cuda~rocm") From 3fb2cfb8e9a8544d7a3548b2c5c9cce85840281f Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Tue, 24 Oct 2023 09:28:23 -0700 Subject: [PATCH 305/408] hiop +cuda: fix issue 40678 (#40688) --- var/spack/repos/builtin/packages/hiop/package.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index 9ceedc36b4bd0c..353c7fd942b675 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -103,10 +103,9 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): depends_on("magma@{0}:".format(magma_v), when="@{0}:+cuda".format(hiop_v)) depends_on("magma@{0}:".format(magma_v), when="@{0}:+rocm".format(hiop_v)) - depends_on("cuda@11:", when="@develop:+cuda") - # https://github.com/spack/spack/issues/40678 - depends_on("cuda@:11.9", when="@:1.0 +cuda") + depends_on("cuda@11:11.9", when="@develop:+cuda") + depends_on("cuda@:11.9", when="+cuda") depends_on("raja", when="+raja") depends_on("umpire", when="+raja") From ecaf0df7196734fc614226584f1b5ad6ace56d5e Mon Sep 17 00:00:00 2001 From: Alberto Invernizzi <9337627+albestro@users.noreply.github.com> Date: Tue, 24 Oct 2023 19:21:58 +0200 Subject: [PATCH 306/408] neovim: conflict for libluv problem on macOS + add newer versions of neovim and libluv (#40690) * add conflict with libluv version >=1.44 just on macOS * minor change * add libluv versions * neovim: add newer releases --- var/spack/repos/builtin/packages/libluv/package.py | 2 ++ var/spack/repos/builtin/packages/neovim/package.py | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libluv/package.py b/var/spack/repos/builtin/packages/libluv/package.py index abf42d47f08ee5..b3600f63ce6f54 100644 --- a/var/spack/repos/builtin/packages/libluv/package.py +++ b/var/spack/repos/builtin/packages/libluv/package.py @@ -14,7 +14,9 @@ class Libluv(CMakePackage): homepage = "https://github.com/luvit/luv" url = "https://github.com/luvit/luv/releases/download/1.36.0-0/luv-1.36.0-0.tar.gz" + version("1.45.0-0", sha256="fa6c46fb09f88320afa7f88017efd7b0d2b3a0158c5ba5b6851340b0332a2b81") version("1.44.2-1", sha256="3eb5c7bc44f61fbc4148ea30e3221d410263e0ffa285672851fc19debf9e5c30") + version("1.44.2-0", sha256="30639f8e0fac7fb0c3a04b94a00f73c6d218c15765347ceb0998a6b72464b6cf") version("1.43.0-0", sha256="567a6f3dcdcf8a9b54ddc57ffef89d1e950d72832b85ee81c8c83a9d4e0e9de2") version("1.42.0-1", sha256="4b6fbaa89d2420edf6070ad9e522993e132bd7eb2540ff754c2b9f1497744db2") version("1.42.0-0", sha256="b5228a9d0eaacd9f862b6270c732d5c90773a28ce53b6d9e32a14050e7947f36") diff --git a/var/spack/repos/builtin/packages/neovim/package.py b/var/spack/repos/builtin/packages/neovim/package.py index db8bd4a66c63ef..737cc57de7e39b 100644 --- a/var/spack/repos/builtin/packages/neovim/package.py +++ b/var/spack/repos/builtin/packages/neovim/package.py @@ -17,6 +17,8 @@ class Neovim(CMakePackage): version("master", branch="master") version("stable", tag="stable", commit="7d4bba7aa7a4a3444919ea7a3804094c290395ef") + version("0.9.4", sha256="148356027ee8d586adebb6513a94d76accc79da9597109ace5c445b09d383093") + version("0.9.2", sha256="06b8518bad4237a28a67a4fbc16ec32581f35f216b27f4c98347acee7f5fb369") version("0.9.1", sha256="8db17c2a1f4776dcda00e59489ea0d98ba82f7d1a8ea03281d640e58d8a3a00e") version("0.9.0", sha256="39d79107c54d2f3babcad2cd157c399241c04f6e75e98c18e8afaf2bb5e82937") version("0.8.3", sha256="adf45ff160e1d89f519b6114732eba03485ae469beb27919b0f7a4f6b44233c1") @@ -136,7 +138,10 @@ class Neovim(CMakePackage): # Support for `libvterm@0.2:` has been added in neovim@0.8.0 # term: Add support for libvterm >= 0.2 (https://github.com/neovim/neovim/releases/tag/v0.8.0) # https://github.com/neovim/neovim/issues/16217#issuecomment-958590493 - conflicts("^libvterm@0.2:", when="@:0.7") + conflicts("libvterm@0.2:", when="@:0.7") + + # https://github.com/neovim/neovim/issues/25770 + conflicts("libluv@1.44:", when="platform=darwin") @when("^lua") def cmake_args(self): From f037238b0122498e9873775ddc196ac5ecb3ccbb Mon Sep 17 00:00:00 2001 From: Filippo Barbari <121092059+fbarbari@users.noreply.github.com> Date: Tue, 24 Oct 2023 19:26:26 +0200 Subject: [PATCH 307/408] Added new benchmark version up to 1.8.3 (#40689) --- var/spack/repos/builtin/packages/benchmark/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/benchmark/package.py b/var/spack/repos/builtin/packages/benchmark/package.py index 9026d3d1c14928..fe0b286352fcd1 100644 --- a/var/spack/repos/builtin/packages/benchmark/package.py +++ b/var/spack/repos/builtin/packages/benchmark/package.py @@ -16,7 +16,16 @@ class Benchmark(CMakePackage): # first properly installed CMake config packages in # 1.2.0 release: https://github.com/google/benchmark/issues/363 version("main", branch="main") + version("1.8.3", sha256="6bc180a57d23d4d9515519f92b0c83d61b05b5bab188961f36ac7b06b0d9e9ce") + version("1.8.2", sha256="2aab2980d0376137f969d92848fbb68216abb07633034534fc8c65cc4e7a0e93") + version("1.8.1", sha256="e9ff65cecfed4f60c893a1e8a1ba94221fad3b27075f2f80f47eb424b0f8c9bd") + version("1.8.0", sha256="ea2e94c24ddf6594d15c711c06ccd4486434d9cf3eca954e2af8a20c88f9f172") + version("1.7.1", sha256="6430e4092653380d9dc4ccb45a1e2dc9259d581f4866dc0759713126056bc1d7") + version("1.7.0", sha256="3aff99169fa8bdee356eaa1f691e835a6e57b1efeadb8a0f9f228531158246ac") + version("1.6.2", sha256="a9f77e6188c1cd4ebedfa7538bf5176d6acc72ead6f456919e5f464ef2f06158") + version("1.6.1", sha256="6132883bc8c9b0df5375b16ab520fac1a85dc9e4cf5be59480448ece74b278d4") version("1.6.0", sha256="1f71c72ce08d2c1310011ea6436b31e39ccab8c2db94186d26657d41747c85d6") + version("1.5.6", sha256="789f85b4810d13ff803834ea75999e41b326405d83d6a538baf01499eda96102") version("1.5.5", sha256="3bff5f237c317ddfd8d5a9b96b3eede7c0802e799db520d38ce756a2a46a18a0") version("1.5.4", sha256="e3adf8c98bb38a198822725c0fc6c0ae4711f16fbbf6aeb311d5ad11e5a081b5") version("1.5.0", sha256="3c6a165b6ecc948967a1ead710d4a181d7b0fbcaa183ef7ea84604994966221a") From 9268210730eac4ce0f0f19c0bd640b85d23ad972 Mon Sep 17 00:00:00 2001 From: AMD Toolchain Support <73240730+amd-toolchain-support@users.noreply.github.com> Date: Tue, 24 Oct 2023 19:06:32 +0100 Subject: [PATCH 308/408] openmpi: fix pmi@4.2.3: compat (#40686) --- var/spack/repos/builtin/packages/openmpi/package.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index 87e5bc4f2bfee5..5325235612442d 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -555,11 +555,14 @@ class Openmpi(AutotoolsPackage, CudaPackage): # PMIx is unavailable for @1, and required for @2: # OpenMPI @2: includes a vendored version: - # depends_on('pmix@1.1.2', when='@2.1.6') - # depends_on('pmix@3.2.3', when='@4.1.2') - depends_on("pmix@1.0:1", when="@2.0:2 ~internal-pmix") - depends_on("pmix@3.2:", when="@4.0:4 ~internal-pmix") - depends_on("pmix@4.2:", when="@5.0:5 ~internal-pmix") + with when("~internal-pmix"): + depends_on("pmix@1", when="@2") + depends_on("pmix@3.2:", when="@4:") + depends_on("pmix@4.2:", when="@5:") + + # pmix@4.2.3 contains a breaking change, compat fixed in openmpi@4.1.6 + # See https://www.mail-archive.com/announce@lists.open-mpi.org//msg00158.html + depends_on("pmix@:4.2.2", when="@:4.1.5") # Libevent is required when *vendored* PMIx is used depends_on("libevent@2:", when="@main") From 0d3172f539ce2bb3d24b5f69a0798ec1a200a3af Mon Sep 17 00:00:00 2001 From: renjithravindrankannath <94420380+renjithravindrankannath@users.noreply.github.com> Date: Tue, 24 Oct 2023 15:30:02 -0700 Subject: [PATCH 309/408] Updating rvs binary path. (#40604) * Updating rvs binary path * Updating spec check as per the recommendation --- .../builtin/packages/rocm-validation-suite/package.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py index dfefd8ef75d5c5..adad90b646e628 100644 --- a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py +++ b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py @@ -179,14 +179,18 @@ def setup_build_environment(self, build_env): depends_on("hip-rocclr@" + ver, when="@" + ver) def patch(self): - if "@4.5.0:5.1" in self.spec: + if self.spec.satisfies("@4.5:5.1"): filter_file( "@ROCM_PATH@/rvs", self.spec.prefix.rvs, "rvs/conf/deviceid.sh.in", string=True ) - elif "@5.2.0:" in self.spec: + elif self.spec.satisfies("@5.2:5.4"): filter_file( "@ROCM_PATH@/bin", self.spec.prefix.bin, "rvs/conf/deviceid.sh.in", string=True ) + elif self.spec.satisfies("@5.5:"): + filter_file( + "@ROCM_PATH@/rvs", self.spec.prefix.rvs, "rvs/conf/deviceid.sh.in", string=True + ) def cmake_args(self): args = [ From fb0fa96ec024105adac9a19dffbaca4b0e7de754 Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Tue, 24 Oct 2023 15:46:23 -0700 Subject: [PATCH 310/408] Add ufs-utils@1.11.0 (#40695) * Add ufs-utils@1.11.0 * Update package.py --- var/spack/repos/builtin/packages/ufs-utils/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/ufs-utils/package.py b/var/spack/repos/builtin/packages/ufs-utils/package.py index e551e7fec1fdbb..50380bfe5889b2 100644 --- a/var/spack/repos/builtin/packages/ufs-utils/package.py +++ b/var/spack/repos/builtin/packages/ufs-utils/package.py @@ -18,6 +18,12 @@ class UfsUtils(CMakePackage): maintainers("t-brown", "edwardhartnett", "AlexanderRichert-NOAA", "Hang-Lei-NOAA") + version( + "1.11.0", + tag="ufs_utils_1_11_0", + commit="72701ab45165ae67a1c4b4d855e763bf5674dbd2", + submodules=True, + ) version( "1.10.0", tag="ufs_utils_1_10_0", From 5fe989f2e1b593cfd2984877d22e3ef30f2ed0e4 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Tue, 24 Oct 2023 19:37:26 -0400 Subject: [PATCH 311/408] Windows: search PATH for patch utility (#40513) Previously, we only searched for `patch` inside of whatever Git installation was available because the most common installation of Git available on Windows had `patch`. That's not true for all possible installations of Git though, so this updates the search to also check PATH. --- lib/spack/spack/patch.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index 8b094a7642d634..7e2fcaff103ef3 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -7,6 +7,7 @@ import inspect import os import os.path +import pathlib import sys import llnl.util.filesystem @@ -36,10 +37,12 @@ def apply_patch(stage, patch_path, level=1, working_dir="."): """ git_utils_path = os.environ.get("PATH", "") if sys.platform == "win32": - git = which_string("git", required=True) - git_root = git.split("\\")[:-2] - git_root.extend(["usr", "bin"]) - git_utils_path = os.sep.join(git_root) + git = which_string("git") + if git: + git = pathlib.Path(git) + git_root = git.parent.parent + git_root = git_root / "usr" / "bin" + git_utils_path = os.pathsep.join([str(git_root), git_utils_path]) # TODO: Decouple Spack's patch support on Windows from Git # for Windows, and instead have Spack directly fetch, install, and From 34b6212b56b3b15dab8e8a9582de9e8a625f7ae8 Mon Sep 17 00:00:00 2001 From: Taillefumier Mathieu <29380261+mtaillefumier@users.noreply.github.com> Date: Wed, 25 Oct 2023 09:55:13 +0200 Subject: [PATCH 312/408] [cp2k] Use fftw3 MKL by default when cp2k is compiled with mkl (#40671) --- .../packages/cp2k/cmake-fixes-2023.2.patch | 154 +++++++++++++++--- 1 file changed, 134 insertions(+), 20 deletions(-) diff --git a/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch b/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch index 2961a4ceee8d45..985edad3aa5a1c 100644 --- a/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch +++ b/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch @@ -1,10 +1,22 @@ -From 1897cbf3e467dc765f733b09af041fe8f25fa906 Mon Sep 17 00:00:00 2001 +From b75eb217115820059aba26d1ff1a8657e3841e7d Mon Sep 17 00:00:00 2001 From: Mathieu Taillefumier -Date: Thu, 19 Oct 2023 12:21:50 +0200 -Subject: [PATCH] [cmake] fix for building gromacs and cp2k with cmake and spack +Date: Mon, 23 Oct 2023 15:50:44 +0200 +Subject: [PATCH] cmake-fixes-2023.2 + +--- + CMakeLists.txt | 63 +++++++----- + cmake/FindBlas.cmake | 174 +++++++++++++++++----------------- + cmake/FindLapack.cmake | 47 ++++----- + cmake/cp2k.pc.in | 19 ---- + cmake/cp2kConfig.cmake.in | 195 ++++++++++++++++++++------------------ + cmake/libcp2k.pc.in | 11 +++ + src/CMakeLists.txt | 18 ++-- + 7 files changed, 276 insertions(+), 251 deletions(-) + delete mode 100644 cmake/cp2k.pc.in + create mode 100644 cmake/libcp2k.pc.in diff --git a/CMakeLists.txt b/CMakeLists.txt -index 3f81c7b524..1b6c6a0636 100644 +index 3f81c7b52..f2d85d033 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -49,7 +49,8 @@ if(NOT DEFINED CMAKE_CUDA_STANDARD) @@ -17,7 +29,18 @@ index 3f81c7b524..1b6c6a0636 100644 find_package(PkgConfig) -@@ -115,8 +116,8 @@ cmake_dependent_option(CP2K_ENABLE_FFTW3_OPENMP_SUPPORT +@@ -108,6 +109,10 @@ option(CP2K_USE_LIBXSMM "Use libxsmm for small gemms (supports x86 platforms)" + OFF) + option(CP2K_BUILD_DBCSR "Duild dbcsr at the same time than cp2k." OFF) + option(BUILD_SHARED_LIBS "Build cp2k shared library" ON) ++option( ++ CP2K_USE_FFTW3_WITH_MKL ++ "If set to ON use the original implementation of fftw3 instead of the MKL implementation." ++ OFF) + + cmake_dependent_option(CP2K_ENABLE_ELPA_OPENMP_SUPPORT + "Enable elpa openmp support" ON "CP2K_USE_ELPA" OFF) +@@ -115,8 +120,8 @@ cmake_dependent_option(CP2K_ENABLE_FFTW3_OPENMP_SUPPORT "Enable FFTW openmp support" ON "CP2K_USE_FFTW3" OFF) cmake_dependent_option(CP2K_ENABLE_FFTW3_THREADS_SUPPORT "Enable FFTW THREADS support" OFF "CP2K_USE_FFTW3" OFF) @@ -28,7 +51,71 @@ index 3f81c7b524..1b6c6a0636 100644 cmake_dependent_option( DBCSR_USE_ACCEL -@@ -748,7 +749,7 @@ add_subdirectory(src) +@@ -527,7 +532,7 @@ if(CP2K_USE_ACCEL MATCHES "CUDA") + endif() + + set(CP2K_USE_CUDA ON) +- message(STATUS ``"-- CUDA compiler and libraries found") ++ message(STATUS "-- CUDA compiler and libraries found") + elseif(CP2K_USE_ACCEL MATCHES "HIP") + enable_language(HIP) + # Find hip +@@ -620,27 +625,36 @@ endif() + + # FFTW3 + ++set(CP2K_USE_FFTW3_ OFF) + if(CP2K_USE_FFTW3) +- find_package(Fftw REQUIRED) +- if(CP2K_ENABLE_FFTW3_THREADS_SUPPORT AND CP2K_ENABLE_FFTW3_OPENMP_SUPPORT) +- message( +- FATAL_ERROR +- "Fftw3 threads and openmp supports can not be used at the same time") +- endif() ++ if(CP2K_USE_FFTW3_WITH_MKL OR NOT CP2K_BLAS_VENDOR MATCHES "MKL") ++ find_package(Fftw REQUIRED) ++ if(CP2K_ENABLE_FFTW3_THREADS_SUPPORT AND CP2K_ENABLE_FFTW3_OPENMP_SUPPORT) ++ message( ++ FATAL_ERROR ++ "Fftw3 threads and openmp supports can not be used at the same time") ++ endif() + +- if((CP2K_ENABLE_FFTW3_THREADS_SUPPORT) AND (NOT TARGET +- CP2K::FFTW3::fftw3_threads)) +- message( +- FATAL_ERROR +- "fftw3 was compiled without multithreading support (--enable-threads option in fftw build system)." +- ) +- endif() ++ if((CP2K_ENABLE_FFTW3_THREADS_SUPPORT) AND (NOT TARGET ++ CP2K::FFTW3::fftw3_threads)) ++ message( ++ FATAL_ERROR ++ "fftw3 was compiled without multithreading support (--enable-threads option in fftw build system)." ++ ) ++ endif() + +- if((CP2K_ENABLE_FFTW3_OPENMP_SUPPORT) AND (NOT TARGET CP2K::FFTW3::fftw3_omp)) +- message( +- FATAL_ERROR +- "fftw3 was compiled without openmp support (--enable-openmp option in fftw build system)." +- ) ++ if((CP2K_ENABLE_FFTW3_OPENMP_SUPPORT) AND (NOT TARGET CP2K::FFTW3::fftw3_omp ++ )) ++ message( ++ FATAL_ERROR ++ "fftw3 was compiled without openmp support (--enable-openmp option in fftw build system)." ++ ) ++ endif() ++ # we use this variable later on to include the fftw target whenever mkl is ++ # found or not ++ set(CP2K_USE_FFTW3_ ON) ++ else() ++ message("-- Using the MKL implementation of FFTW3.") + endif() + endif() + +@@ -748,7 +762,7 @@ add_subdirectory(src) include(GNUInstallDirs) get_target_property(CP2K_LIBS cp2k_link_libs INTERFACE_LINK_LIBRARIES) @@ -37,19 +124,18 @@ index 3f81c7b524..1b6c6a0636 100644 message( STATUS "--------------------------------------------------------------------") -@@ -1039,6 +1040,10 @@ install(FILES "${PROJECT_BINARY_DIR}/cp2kConfig.cmake" +@@ -1039,6 +1053,9 @@ install(FILES "${PROJECT_BINARY_DIR}/cp2kConfig.cmake" "${PROJECT_BINARY_DIR}/cp2kConfigVersion.cmake" DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k") +install(FILES "${PROJECT_BINARY_DIR}/libcp2k.pc" + DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig") -+ + install( DIRECTORY "${PROJECT_SOURCE_DIR}/cmake" DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k" diff --git a/cmake/FindBlas.cmake b/cmake/FindBlas.cmake -index 6e5fb78240..335cbd964a 100644 +index 6e5fb7824..335cbd964 100644 --- a/cmake/FindBlas.cmake +++ b/cmake/FindBlas.cmake @@ -15,104 +15,108 @@ if(NOT @@ -247,7 +333,7 @@ index 6e5fb78240..335cbd964a 100644 # having the fortran interface is usually enough. C, C++ and others languages # might require this information though diff --git a/cmake/FindLapack.cmake b/cmake/FindLapack.cmake -index 966e0d78d3..77a1e04258 100644 +index 966e0d78d..77a1e0425 100644 --- a/cmake/FindLapack.cmake +++ b/cmake/FindLapack.cmake @@ -20,33 +20,34 @@ include(FindPackageHandleStandardArgs) @@ -310,7 +396,7 @@ index 966e0d78d3..77a1e04258 100644 REQUIRED_VARS CP2K_LAPACK_LINK_LIBRARIES) diff --git a/cmake/cp2k.pc.in b/cmake/cp2k.pc.in deleted file mode 100644 -index 5b4a095660..0000000000 +index 5b4a09566..000000000 --- a/cmake/cp2k.pc.in +++ /dev/null @@ -1,19 +0,0 @@ @@ -335,10 +421,10 @@ index 5b4a095660..0000000000 -#Libs.private: -L"${libdir}" @CP2K_LIBS@ \ No newline at end of file diff --git a/cmake/cp2kConfig.cmake.in b/cmake/cp2kConfig.cmake.in -index a3acd47442..a9e0eb5a58 100644 +index a3acd4744..1c310e19b 100644 --- a/cmake/cp2kConfig.cmake.in +++ b/cmake/cp2kConfig.cmake.in -@@ -5,112 +5,120 @@ +@@ -5,112 +5,121 @@ #! SPDX-License-Identifier: GPL-2.0-or-later ! #!-------------------------------------------------------------------------------------------------! @@ -405,9 +491,10 @@ index a3acd47442..a9e0eb5a58 100644 + find_dependency(MPI REQUIRED) + endif() + -+ if(@CP2K_USE_FFTW3@) ++ if(@CP2K_USE_FFTW3@ OR @CP2K_USE_FFTW3_WITH_MKL@) + find_dependency(Fftw REQUIRED) + endif() ++ + # QUIP + if(@CP2K_USE_QUIP@) + find_dependency(Quip REQUIRED) @@ -554,7 +641,7 @@ index a3acd47442..a9e0eb5a58 100644 -include("${CMAKE_CURRENT_LIST_DIR}/cp2kTargets.cmake") diff --git a/cmake/libcp2k.pc.in b/cmake/libcp2k.pc.in new file mode 100644 -index 0000000000..618af55e28 +index 000000000..618af55e2 --- /dev/null +++ b/cmake/libcp2k.pc.in @@ -0,0 +1,11 @@ @@ -570,19 +657,41 @@ index 0000000000..618af55e28 +Cflags: -I"${includedir}/cp2k" -I"${includedir}/cp2k/@CMAKE_Fortran_COMPILER_ID@-@CMAKE_Fortran_COMPILER_VERSION@" +Libs: -L"${libdir}" -lcp2k diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt -index dbc955885e..e003d4f88d 100644 +index dbc955885..1178101ad 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt +@@ -1536,9 +1536,9 @@ target_link_libraries( + $<$:CP2K::LIBSPG::libspg> + $<$:CP2K::Libxc::xc> + $<$:CP2K::ELPA::elpa> +- $<$:CP2K::FFTW3::fftw3> +- $<$:CP2K::FFTW3::fftw3_threads> +- $<$:CP2K::FFTW3::fftw3_omp> ++ $<$:CP2K::FFTW3::fftw3> ++ $<$,$>:CP2K::FFTW3::fftw3_threads> ++ $<$,$>:CP2K::FFTW3::fftw3_omp> + $<$:SPLA::spla> + $<$:CP2K::Libint2::int2> + $<$:${TORCH_LIBRARIES}> @@ -1555,7 +1555,7 @@ target_compile_definitions( cp2k PUBLIC $<$:__parallel> $<$:__SCALAPACK> - $<$:__MPI_08> -+ $<$:__MPI_08> ++ $<$:__MPI_F08> __COMPILE_DATE=\"${CP2K_TIMESTAMP}\" __COMPILE_HOST=\"${CP2K_HOST_NAME}\" __COMPILE_REVISION=\"${CP2K_GIT_HASH}\" -@@ -1774,12 +1774,12 @@ install( +@@ -1577,7 +1577,7 @@ target_compile_definitions( + $<$:__OFFLOAD_GEMM> + $<$:__ELPA> + $<$:__LIBXC> +- $<$:__FFTW3> ++ $<$:__FFTW3> + $<$:__LIBINT> + $<$:__LIBPEXSI> + $<$:__LIBTORCH> +@@ -1774,12 +1774,14 @@ install( EXPORT cp2k_targets FILE cp2kTargets.cmake NAMESPACE cp2k:: @@ -590,11 +699,16 @@ index dbc955885e..e003d4f88d 100644 + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") -install(FILES start/libcp2k.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/cp2k") -+install(FILES start/libcp2k.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}") ++install(FILES start/libcp2k.h ++ DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}") install( DIRECTORY "${PROJECT_BINARY_DIR}/src/mod_files" - DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/cp2k" -+ DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${CMAKE_Fortran_COMPILER_ID}-${CMAKE_Fortran_COMPILER_VERSION}" ++ DESTINATION ++ "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${CMAKE_Fortran_COMPILER_ID}-${CMAKE_Fortran_COMPILER_VERSION}" FILES_MATCHING PATTERN "*.mod") +-- +2.41.0 + From 4be590d297ed124109d82d1d23135c29b837fa34 Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Wed, 25 Oct 2023 12:13:32 +0200 Subject: [PATCH 313/408] Add dlaf variant to cp2k (#40702) --- .../repos/builtin/packages/cp2k/package.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index 27deecf78472aa..2e765c7539fffb 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -83,6 +83,13 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): description="Enable optimised diagonalisation routines from ELPA", when="@6.1:", ) + variant( + "dlaf", + default=False, + description="Enable DLA-Future eigensolver and Cholesky decomposition", + # TODO: Pin version when integrated in a release + when="@master build_system=cmake", + ) variant( "sirius", default=False, @@ -226,6 +233,15 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): depends_on("elpa@2021.11.001:", when="@9.1:") depends_on("elpa@2023.05.001:", when="@2023.2:") + with when("+dlaf"): + conflicts( + "~mpi", msg="DLA-Future requires MPI. Only the distributed eigensolver is available." + ) + depends_on("dla-future@0.2.1: +scalapack") + depends_on("dla-future ~cuda~rocm", when="~cuda~rocm") + depends_on("dla-future +cuda", when="+cuda") + depends_on("dla-future +rocm", when="+rocm") + with when("+plumed"): depends_on("plumed+shared") depends_on("plumed+mpi", when="+mpi") @@ -945,6 +961,7 @@ def cmake_args(self): args += [ self.define_from_variant("CP2K_ENABLE_REGTESTS", "enable_regtests"), self.define_from_variant("CP2K_USE_ELPA", "elpa"), + self.define_from_variant("CP2K_USE_DLAF", "dlaf"), self.define_from_variant("CP2K_USE_LIBINT2", "libint"), self.define_from_variant("CP2K_USE_SIRIUS", "sirius"), self.define_from_variant("CP2K_USE_SPLA", "spla"), From d0d095adc85e10f880c1c9dabcd32e63bf2b0674 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 25 Oct 2023 06:06:35 -0500 Subject: [PATCH 314/408] py-lightning: py-torch~distributed is broken again (#40696) --- var/spack/repos/builtin/packages/py-lightning/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-lightning/package.py b/var/spack/repos/builtin/packages/py-lightning/package.py index f5131ec0715265..8bec9806ee3478 100644 --- a/var/spack/repos/builtin/packages/py-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-lightning/package.py @@ -94,3 +94,6 @@ class PyLightning(PythonPackage): depends_on("py-websocket-client@:2", type=("build", "run")) depends_on("py-websockets@:12", when="@2.0.5:", type=("build", "run")) depends_on("py-websockets@:11", when="@:2.0.4", type=("build", "run")) + + # https://github.com/Lightning-AI/lightning/issues/18858 + conflicts("^py-torch~distributed", when="@2.1.0") From ce4fbd15adb93b00bfc429b3af741b372efc82ff Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 25 Oct 2023 17:35:47 +0200 Subject: [PATCH 315/408] ci: darwin aarch64 use apple-clang-15 tag (#40706) --- share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml | 2 +- .../cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 245bb51933ccf0..f4850a17ba8ec1 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -706,7 +706,7 @@ ml-linux-x86_64-rocm-build: SPACK_CI_STACK_NAME: ml-darwin-aarch64-mps ml-darwin-aarch64-mps-generate: - tags: [ "macos-ventura", "apple-clang-14", "aarch64-macos" ] + tags: [ "macos-ventura", "apple-clang-15", "aarch64-macos" ] extends: [ ".ml-darwin-aarch64-mps", ".generate-base"] ml-darwin-aarch64-mps-build: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml index 6d8a0b7491f9a3..0905305113f083 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml @@ -89,7 +89,7 @@ spack: - build-job: variables: CI_GPG_KEY_ROOT: /etc/protected-runner - tags: [ "macos-ventura", "apple-clang-14", "aarch64-macos" ] + tags: [ "macos-ventura", "apple-clang-15", "aarch64-macos" ] cdash: build-group: Machine Learning MPS From 8b88ac894df2da123e081cefe27d1e612c8a0fc6 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 25 Oct 2023 20:55:04 +0200 Subject: [PATCH 316/408] ci: don't put compilers in config (#40700) * ci: don't register detectable compilers Cause they go out of sync... * remove intel compiler, it can be detected too * Do not run spack compiler find since compilers are registered in concretize job already * trilinos: work around +stokhos +cuda +superlu-dist bug due to EMPTY macro --- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 1 + .../gitlab/cloud_pipelines/configs/ci.yaml | 2 +- .../stacks/e4s-neoverse_v1/spack.yaml | 15 ---------- .../stacks/e4s-oneapi/spack.yaml | 28 ------------------- .../stacks/e4s-power/spack.yaml | 15 ---------- .../stacks/e4s-rocm-external/spack.yaml | 15 ---------- .../cloud_pipelines/stacks/e4s/spack.yaml | 15 ---------- .../builtin/packages/trilinos/package.py | 5 ++++ 8 files changed, 7 insertions(+), 89 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index f4850a17ba8ec1..579153bdfdc395 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -140,6 +140,7 @@ default: - spack --version - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - spack env activate --without-view . + - spack compiler find - export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs" - spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))" < "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" diff --git a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml index 9aad850b5df065..29dc993a15578c 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml +++ b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml @@ -12,7 +12,7 @@ ci: before_script-: - - spack list --count # ensure that spack's cache is populated - - spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR} - - spack compiler find + - spack compiler list - if [ -n "$SPACK_BUILD_JOBS" ]; then spack config add "config:build_jobs:$SPACK_BUILD_JOBS"; fi - - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data # AWS runners mount E4S public key (verification), UO runners mount public/private (signing/verification) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml index d42e5f1fcade20..47f0b55f9f03be 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml @@ -54,21 +54,6 @@ spack: cuda: version: [11.8.0] - compilers: - - compiler: - spec: gcc@11.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: aarch64 - modules: [] - environment: {} - extra_rpaths: [] - specs: # CPU - adios diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 605a69e4a57d31..8e420a5b75c961 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -5,34 +5,6 @@ spack: reuse: false unify: false - compilers: - - compiler: - spec: oneapi@2023.2.1 - paths: - cc: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/icx - cxx: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/icpx - f77: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/ifx - fc: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/ifx - flags: {} - operating_system: ubuntu20.04 - target: x86_64 - modules: [] - environment: {} - extra_rpaths: [] - - compiler: - spec: gcc@=11.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: x86_64 - modules: [] - environment: {} - extra_rpaths: [] - packages: all: require: '%oneapi target=x86_64_v3' diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index 10bf4bc57d99f7..95f8d37e0436bc 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -5,21 +5,6 @@ spack: reuse: false unify: false - compilers: - - compiler: - spec: gcc@9.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: ppc64le - modules: [] - environment: {} - extra_rpaths: [] - packages: all: require: "%gcc@9.4.0 target=ppc64le" diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml index b5ac17207796fe..c11dcf6ae1a551 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml @@ -5,21 +5,6 @@ spack: reuse: false unify: false - compilers: - - compiler: - spec: gcc@=11.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: x86_64 - modules: [] - environment: {} - extra_rpaths: [] - packages: all: require: '%gcc target=x86_64_v3' diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 710360172ab1c2..ea9bd5fe70b72f 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -5,21 +5,6 @@ spack: reuse: false unify: false - compilers: - - compiler: - spec: gcc@=11.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: x86_64 - modules: [] - environment: {} - extra_rpaths: [] - packages: all: require: '%gcc target=x86_64_v3' diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 9af8ab14dcdd73..1681ac35d2e9a2 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -358,6 +358,11 @@ class Trilinos(CMakePackage, CudaPackage, ROCmPackage): conflicts("@:13.0.1 +cuda", when="^cuda@11:") # Build hangs with CUDA 11.6 (see #28439) conflicts("+cuda +stokhos", when="^cuda@11.6:") + # superlu-dist defines a macro EMPTY which conflicts with a header in cuda + # used when building stokhos + # Fix: https://github.com/xiaoyeli/superlu_dist/commit/09cb1430f7be288fd4d75b8ed461aa0b7e68fefe + # is not tagged yet. See discussion here https://github.com/trilinos/Trilinos/issues/11839 + conflicts("+cuda +stokhos +superlu-dist") # Cuda UVM must be enabled prior to 13.2 # See https://github.com/spack/spack/issues/28869 conflicts("~uvm", when="@:13.1 +cuda") From 4ab878e88279b420f53ecdc791a8b71ed979ad3c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Oct 2023 20:58:53 +0200 Subject: [PATCH 317/408] build(deps): bump pytest from 7.4.2 to 7.4.3 in /lib/spack/docs (#40697) --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 8c7b4e88cc47a1..31403710385657 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -6,7 +6,7 @@ python-levenshtein==0.23.0 docutils==0.18.1 pygments==2.16.1 urllib3==2.0.7 -pytest==7.4.2 +pytest==7.4.3 isort==5.12.0 black==23.9.1 flake8==6.1.0 From 564716ced892ddd4d973a12096d1c34e0d0a952d Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 25 Oct 2023 16:10:48 -0500 Subject: [PATCH 318/408] PyTorch: patch breakpad dependency (#40648) --- var/spack/repos/builtin/packages/py-torch/package.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index b876bf06362b98..8b641c4e702159 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -103,7 +103,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): "breakpad", default=True, description="Enable breakpad crash dump library", - when="@1.9:1.11", + when="@1.10:1.11", ) conflicts("+cuda+rocm") @@ -286,6 +286,14 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): when="@1.1:1.8.1", ) + # https://github.com/pytorch/pytorch/issues/70297 + patch( + "https://github.com/google/breakpad/commit/605c51ed96ad44b34c457bbca320e74e194c317e.patch?full_index=1", + sha256="694d83db3a2147d543357f22ba5c8d5683d0ed43e693d42bca8f24ec50080f98", + when="+breakpad", + working_dir="third_party/breakpad", + ) + # Fixes CMake configuration error when XNNPACK is disabled # https://github.com/pytorch/pytorch/pull/35607 # https://github.com/pytorch/pytorch/pull/37865 From f7e6064a9ff3a6b6c1b8c7541808168ea97fd249 Mon Sep 17 00:00:00 2001 From: Filippo Barbari <121092059+fbarbari@users.noreply.github.com> Date: Thu, 26 Oct 2023 00:49:46 +0200 Subject: [PATCH 319/408] Added Highway versions up to 1.0.7 (#40691) --- var/spack/repos/builtin/packages/highway/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/highway/package.py b/var/spack/repos/builtin/packages/highway/package.py index a708d3f3157df9..75f5398ab29717 100644 --- a/var/spack/repos/builtin/packages/highway/package.py +++ b/var/spack/repos/builtin/packages/highway/package.py @@ -12,7 +12,13 @@ class Highway(CMakePackage): homepage = "https://github.com/google/highway" url = "https://github.com/google/highway/archive/refs/tags/1.0.0.tar.gz" + version("1.0.7", sha256="5434488108186c170a5e2fca5e3c9b6ef59a1caa4d520b008a9b8be6b8abe6c5") + version("1.0.6", sha256="d89664a045a41d822146e787bceeefbf648cc228ce354f347b18f2b419e57207") + version("1.0.5", sha256="99b7dad98b8fa088673b720151458fae698ae5df9154016e39de4afdc23bb927") version("1.0.4", sha256="faccd343935c9e98afd1016e9d20e0b8b89d908508d1af958496f8c2d3004ac2") + version("1.0.3", sha256="566fc77315878473d9a6bd815f7de78c73734acdcb745c3dde8579560ac5440e") + version("1.0.2", sha256="e8ef71236ac0d97f12d553ec1ffc5b6375d57b5f0b860c7447dd69b6ed1072db") + version("1.0.1", sha256="7ca6af7dc2e3e054de9e17b9dfd88609a7fd202812b1c216f43cc41647c97311") version("1.0.0", sha256="ab4f5f864932268356f9f6aa86f612fa4430a7db3c8de0391076750197e876b8") depends_on("cmake@3.10:", type="build") From 84936625d26dc1a3d6403c2449d688faa4139431 Mon Sep 17 00:00:00 2001 From: Dominic Hofer <6570912+dominichofer@users.noreply.github.com> Date: Thu, 26 Oct 2023 01:22:22 +0200 Subject: [PATCH 320/408] cuda: add NVHPC_CUDA_HOME. (#40507) * [cuda] Add NVHPC_CUDA_HOME. * Add CUDA_HOME and NVHC_CUDA_HOME to cuda's dependent build env. --------- Co-authored-by: Dominic Hofer --- var/spack/repos/builtin/packages/cuda/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py index 77bf03da37a368..e624acbaa9db8d 100644 --- a/var/spack/repos/builtin/packages/cuda/package.py +++ b/var/spack/repos/builtin/packages/cuda/package.py @@ -582,6 +582,8 @@ def setup_build_environment(self, env): def setup_dependent_build_environment(self, env, dependent_spec): env.set("CUDAHOSTCXX", dependent_spec.package.compiler.cxx) + env.set("CUDA_HOME", self.prefix) + env.set("NVHPC_CUDA_HOME", self.prefix) @property def cmake_prefix_paths(self): @@ -593,6 +595,7 @@ def cmake_prefix_paths(self): def setup_run_environment(self, env): env.set("CUDA_HOME", self.prefix) + env.set("NVHPC_CUDA_HOME", self.prefix) def install(self, spec, prefix): if os.path.exists("/tmp/cuda-installer.log"): From b6b0cf498ae7505d5f5e6430ff220e4217b163ee Mon Sep 17 00:00:00 2001 From: afzpatel <122491982+afzpatel@users.noreply.github.com> Date: Wed, 25 Oct 2023 19:24:31 -0400 Subject: [PATCH 321/408] initial commit to fix mivisionx build for 5.6 (#40579) --- var/spack/repos/builtin/packages/mivisionx/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/mivisionx/package.py b/var/spack/repos/builtin/packages/mivisionx/package.py index bd1a40a8726aeb..b298160520f53f 100644 --- a/var/spack/repos/builtin/packages/mivisionx/package.py +++ b/var/spack/repos/builtin/packages/mivisionx/package.py @@ -182,7 +182,7 @@ def patch(self): depends_on("cmake@3.5:", type="build") depends_on("ffmpeg@:4", type="build", when="@:5.3") - depends_on("ffmpeg@4.4:", type="build", when="@5.4:") + depends_on("ffmpeg@4.4", type="build", when="@5.4:") depends_on("protobuf@:3", type="build") depends_on( "opencv@:3.4" From 888da9184d1ac585d3923fae53d54cedf99396c5 Mon Sep 17 00:00:00 2001 From: Victoria Cherkas <87643948+victoria-cherkas@users.noreply.github.com> Date: Thu, 26 Oct 2023 01:24:54 +0200 Subject: [PATCH 322/408] fdb: add releases v5.11.23 and v5.11.17 (#40571) --- var/spack/repos/builtin/packages/fdb/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/fdb/package.py b/var/spack/repos/builtin/packages/fdb/package.py index c6a2d6aa66b298..7dc2f75e76b031 100644 --- a/var/spack/repos/builtin/packages/fdb/package.py +++ b/var/spack/repos/builtin/packages/fdb/package.py @@ -18,6 +18,8 @@ class Fdb(CMakePackage): # master version of fdb is subject to frequent changes and is to be used experimentally. version("master", branch="master") + version("5.11.23", sha256="09b1d93f2b71d70c7b69472dfbd45a7da0257211f5505b5fcaf55bfc28ca6c65") + version("5.11.17", sha256="375c6893c7c60f6fdd666d2abaccb2558667bd450100817c0e1072708ad5591e") version("5.10.8", sha256="6a0db8f98e13c035098dd6ea2d7559f883664cbf9cba8143749539122ac46099") version("5.7.8", sha256="6adac23c0d1de54aafb3c663d077b85d0f804724596623b381ff15ea4a835f60") @@ -39,6 +41,7 @@ class Fdb(CMakePackage): depends_on("cmake@3.12:", type="build") depends_on("ecbuild@3.4:", type="build") + depends_on("ecbuild@3.7:", type="build", when="@5.11.6:") depends_on("eckit@1.16:") depends_on("eckit+admin", when="+tools") From 9f30e740edc75927b0e2ba4e01379778ec1fd4de Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Wed, 25 Oct 2023 23:26:49 +0000 Subject: [PATCH 323/408] Paraview 5.12 prep (#40527) * paraview: rebase the adios2 patch for 5.12-to-be * paraview: disable fastfloat and token for 5.12-to-be * paraview: require older protobuf for 5.12 as well * paraview: require C++11-supporting protobuf for `master` too --- .../builtin/packages/paraview/package.py | 14 +- .../vtk-adios2-module-no-kit-5.12.patch | 230 ++++++++++++++++++ 2 files changed, 241 insertions(+), 3 deletions(-) create mode 100644 var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 5760a9d68da7de..5ca64f29c08610 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -226,8 +226,10 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): depends_on("protobuf@3.4:3.18", when="@:5.10%xl") depends_on("protobuf@3.4:3.18", when="@:5.10%xl_r") # protobuf requires newer abseil-cpp, which in turn requires C++14, - # but paraview uses C++11 by default - depends_on("protobuf@3.4:3.21", when="@:5.11") + # but paraview uses C++11 by default. Use for 5.11+ until ParaView updates + # its C++ standard level. + depends_on("protobuf@3.4:3.21", when="@5.11:") + depends_on("protobuf@3.4:3.21", when="@master") depends_on("libxml2") depends_on("lz4") depends_on("xz") @@ -280,7 +282,9 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): # Fix IOADIOS2 module to work with kits # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/8653 - patch("vtk-adios2-module-no-kit.patch", when="@5.8:") + patch("vtk-adios2-module-no-kit.patch", when="@5.8:5.11") + # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/8653 + patch("vtk-adios2-module-no-kit-5.12.patch", when="@5.12:") # Patch for paraview 5.9.0%xl_r # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/7591 @@ -426,6 +430,10 @@ def nvariant_bool(feature): self.define_from_variant("VISIT_BUILD_READER_Silo", "visitbridge"), ] + if spec.satisfies("@5.12:"): + cmake_args.append("-DVTK_MODULE_USE_EXTERNAL_VTK_fast_float:BOOL=OFF") + cmake_args.append("-DVTK_MODULE_USE_EXTERNAL_VTK_token:BOOL=OFF") + if spec.satisfies("@5.11:"): cmake_args.append("-DVTK_MODULE_USE_EXTERNAL_VTK_verdict:BOOL=OFF") diff --git a/var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch b/var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch new file mode 100644 index 00000000000000..34a98eac474716 --- /dev/null +++ b/var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch @@ -0,0 +1,230 @@ +diff --git a/VTK/IO/ADIOS2/CMakeLists.txt b/VTK/IO/ADIOS2/CMakeLists.txt +index 86c6d49cc4f..07b1d4fe0ef 100644 +--- a/VTK/IO/ADIOS2/CMakeLists.txt ++++ b/VTK/IO/ADIOS2/CMakeLists.txt +@@ -1,9 +1,9 @@ + vtk_module_find_package(PRIVATE_IF_SHARED + PACKAGE ADIOS2 + VERSION 2.4) +-if (VTK_USE_MPI AND NOT ADIOS2_HAVE_MPI) ++if (TARGET VTK::ParallelMPI AND NOT ADIOS2_HAVE_MPI) + message(FATAL_ERROR "VTK built with MPI requires ADIOS2 built with MPI") +-elseif(NOT VTK_USE_MPI AND ADIOS2_HAVE_MPI) ++elseif(NOT TARGET VTK::ParallelMPI AND ADIOS2_HAVE_MPI) + message(FATAL_ERROR "VTK built without MPI requires ADIOS2 built without MPI") + endif() + +@@ -18,38 +18,30 @@ set(classes_core vtkADIOS2CoreImageReader) + set(private_classes_core Core/vtkADIOS2CoreArraySelection) + set(private_headers_core Core/vtkADIOS2CoreTypeTraits.h) + set(private_templates_core) +-set(vtk_io_adios2_core_enabled TRUE CACHE INTERNAL "" FORCE) + +-if (vtk_io_adios2_core_enabled) +- list(APPEND classes ${classes_core}) +- list(APPEND private_classes ${private_classes_core}) +- list(APPEND private_headers ${private_headers_core}) +- list(APPEND private_templates ${private_templates_core}) +-endif() ++list(APPEND classes ${classes_core}) ++list(APPEND private_classes ${private_classes_core}) ++list(APPEND private_headers ${private_headers_core}) ++list(APPEND private_templates ${private_templates_core}) ++ ++# Build VTX Schema for Parallel ++if (TARGET VTK::ParallelMPI) ++ set(classes_vtx vtkADIOS2VTXReader) ++ set(private_classes_vtx ++ VTX/VTXSchemaManager ++ VTX/common/VTXDataArray ++ VTX/common/VTXHelper ++ VTX/schema/VTXSchema ++ VTX/schema/vtk/VTXvtkBase ++ VTX/schema/vtk/VTXvtkVTI ++ VTX/schema/vtk/VTXvtkVTU) ++ set(private_headers_vtx VTX/common/VTXTypes.h) ++ set(private_templates_vtx ++ VTX/common/VTXHelper.txx ++ VTX/schema/VTXSchema.txx ++ VTX/schema/vtk/VTXvtkVTI.txx ++ VTX/schema/vtk/VTXvtkVTU.txx) + +-set(classes_vtx vtkADIOS2VTXReader) +-set(private_classes_vtx +- VTX/VTXSchemaManager +- VTX/common/VTXDataArray +- VTX/common/VTXHelper +- VTX/schema/VTXSchema +- VTX/schema/vtk/VTXvtkBase +- VTX/schema/vtk/VTXvtkVTI +- VTX/schema/vtk/VTXvtkVTU) +-set(private_headers_vtx VTX/common/VTXTypes.h) +-set(private_templates_vtx +- VTX/common/VTXHelper.txx +- VTX/schema/VTXSchema.txx +- VTX/schema/vtk/VTXvtkVTI.txx +- VTX/schema/vtk/VTXvtkVTU.txx) +- +-if (VTK_USE_MPI) +- set(vtk_io_adios2_vtx_enabled TRUE CACHE INTERNAL "" FORCE) +-else () +- set(vtk_io_adios2_vtx_enabled FALSE CACHE INTERNAL "" FORCE) +-endif() +- +-if (vtk_io_adios2_vtx_enabled) + list(APPEND classes ${classes_vtx}) + list(APPEND private_classes ${private_classes_vtx}) + list(APPEND private_headers ${private_headers_vtx}) +@@ -63,10 +55,6 @@ vtk_module_add_module(VTK::IOADIOS2 + PRIVATE_TEMPLATES ${private_templates}) + vtk_module_link(VTK::IOADIOS2 PRIVATE adios2::adios2) + +-if (ADIOS2_HAVE_MPI) +- vtk_module_definitions(VTK::IOADIOS2 PRIVATE IOADIOS2_HAVE_MPI) +-endif () +- + if (ADIOS2_VERSION VERSION_GREATER_EQUAL "2.8.0") + vtk_module_definitions(VTK::IOADIOS2 PRIVATE IOADIOS2_BP5_RANDOM_ACCESS) + endif () +diff --git a/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt b/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt +index 1534a1e7271..29c51970daf 100644 +--- a/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt ++++ b/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt +@@ -2,40 +2,34 @@ find_package(ADIOS2 2.4 REQUIRED + COMPONENTS CXX + OPTIONAL_COMPONENTS MPI) + +-if (ADIOS2_HAVE_MPI) +- if (vtk_io_adios2_core_enabled) +- set(TestADIOS2BPReaderSingleTimeStep_NUMPROCS 2) ++if (TARGET VTK::ParallelMPI) ++ set(TestADIOS2BPReaderSingleTimeStep_NUMPROCS 2) + # For now vtkMultiBlockVolumeMapper does not support rendering in parallel +- set(TestADIOS2BPReaderMultiTimeSteps_NUMPROCS 2) +- set(TestADIOS2BPReaderMultiTimeSteps2D_NUMPROCS 2) +- vtk_add_test_mpi(vtkIOADIOS2CxxTests-MPI mpiTests TESTING_DATA +- TestADIOS2BPReaderMPISingleTimeStep.cxx +- TestADIOS2BPReaderMPIMultiTimeSteps3D.cxx,NO_VALID +- TestADIOS2BPReaderMPIMultiTimeSteps2D.cxx) +- vtk_test_cxx_executable(vtkIOADIOS2CxxTests-MPI mpiTests) +- endif() ++ set(TestADIOS2BPReaderMultiTimeSteps_NUMPROCS 2) ++ set(TestADIOS2BPReaderMultiTimeSteps2D_NUMPROCS 2) ++ vtk_add_test_mpi(vtkIOADIOS2CxxTests-MPI mpiTests TESTING_DATA ++ TestADIOS2BPReaderMPISingleTimeStep.cxx ++ TestADIOS2BPReaderMPIMultiTimeSteps3D.cxx,NO_VALID ++ TestADIOS2BPReaderMPIMultiTimeSteps2D.cxx) ++ vtk_test_cxx_executable(vtkIOADIOS2CxxTests-MPI mpiTests) + + # VTX tests +- if (vtk_io_adios2_vtx_enabled) +- vtk_add_test_cxx(vtkIOADIOS2VTXCxxTests tests TESTING_DATA NO_OUTPUT +- UnitTestIOADIOS2VTX.cxx,NO_VALID +- #TestIOADIOS2VTX_VTI3D.cxx, +- TestIOADIOS2VTX_VTI3DRendering.cxx,NO_VALID +- #TestIOADIOS2VTX_VTU3D.cxx,NO_VALID +- TestIOADIOS2VTX_VTU3DRendering.cxx,NO_VALID +- TestIOADIOS2VTX_VTU2DRendering.cxx,NO_VALID +- TestIOADIOS2VTX_VTU1DRendering.cxx,NO_VALID) ++ vtk_add_test_cxx(vtkIOADIOS2VTXCxxTests tests TESTING_DATA NO_OUTPUT ++ UnitTestIOADIOS2VTX.cxx,NO_VALID ++ #TestIOADIOS2VTX_VTI3D.cxx, ++ TestIOADIOS2VTX_VTI3DRendering.cxx,NO_VALID ++ #TestIOADIOS2VTX_VTU3D.cxx,NO_VALID ++ TestIOADIOS2VTX_VTU3DRendering.cxx,NO_VALID ++ TestIOADIOS2VTX_VTU2DRendering.cxx,NO_VALID ++ TestIOADIOS2VTX_VTU1DRendering.cxx,NO_VALID) + +- vtk_test_cxx_executable(vtkIOADIOS2VTXCxxTests tests) +- target_link_libraries(vtkIOADIOS2VTXCxxTests PUBLIC adios2::adios2) +- endif () ++ vtk_test_cxx_executable(vtkIOADIOS2VTXCxxTests tests) ++ target_link_libraries(vtkIOADIOS2VTXCxxTests PUBLIC adios2::adios2) + else () +- if (vtk_io_adios2_core_enabled) +- vtk_add_test_cxx(vtkIOADIOS2CxxTests tests TESTING_DATA +- TestADIOS2BPReaderSingleTimeStep.cxx +- TestADIOS2BPReaderMultiTimeSteps3D.cxx +- TestADIOS2BPReaderMultiTimeSteps2D.cxx) ++ vtk_add_test_cxx(vtkIOADIOS2CxxTests tests TESTING_DATA ++ TestADIOS2BPReaderSingleTimeStep.cxx ++ TestADIOS2BPReaderMultiTimeSteps3D.cxx ++ TestADIOS2BPReaderMultiTimeSteps2D.cxx) + +- vtk_test_cxx_executable(vtkIOADIOS2CxxTests tests) +- endif () ++ vtk_test_cxx_executable(vtkIOADIOS2CxxTests tests) + endif () +diff --git a/VTK/IO/ADIOS2/vtk.module b/VTK/IO/ADIOS2/vtk.module +index 5069bd828b0..fe37260eb6d 100644 +--- a/VTK/IO/ADIOS2/vtk.module ++++ b/VTK/IO/ADIOS2/vtk.module +@@ -2,8 +2,6 @@ NAME + VTK::IOADIOS2 + LIBRARY_NAME + vtkIOADIOS2 +-KIT +- VTK::IO + SPDX_LICENSE_IDENTIFIER + LicenseRef-BSD-3-Clause-Sandia-USGov + SPDX_COPYRIGHT_TEXT +diff --git a/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx b/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx +index 6ba4d25230d..c209fd905d5 100644 +--- a/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx ++++ b/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx +@@ -28,7 +28,7 @@ + #include "vtkLongLongArray.h" + #include "vtkMultiBlockDataSet.h" + #include "vtkMultiPieceDataSet.h" +-#include "vtkMultiProcessController.h" ++#include "vtkMultiProcessController.h" // For the MPI controller member + #include "vtkNew.h" + #include "vtkObjectFactory.h" + #include "vtkPointData.h" +@@ -46,7 +46,7 @@ + #include "vtkUnstructuredGrid.h" + #include "vtksys/SystemTools.hxx" + +-#ifdef IOADIOS2_HAVE_MPI ++#if VTK_MODULE_ENABLE_VTK_ParallelMPI + #include "vtkMPI.h" + #include "vtkMPIController.h" + #endif +@@ -126,7 +126,7 @@ vtkNew vtkADIOS2CoreImageReader::vtkADIOS2CoreImageReaderI + int myLen = static_cast(ibds->GetNumberOfBlocks()); + int* allLens{ nullptr }; + int procId{ 0 }, numProcess{ 0 }; +-#ifdef IOADIOS2_HAVE_MPI ++#if VTK_MODULE_ENABLE_VTK_ParallelMPI + auto ctrl = vtkMultiProcessController::GetGlobalController(); + if (ctrl) + { +@@ -286,7 +286,7 @@ const vtkADIOS2CoreImageReader::StringToParams& vtkADIOS2CoreImageReader::GetAva + //------------------------------------------------------------------------------ + void vtkADIOS2CoreImageReader::SetController(vtkMultiProcessController* controller) + { +-#ifdef IOADIOS2_HAVE_MPI ++#if VTK_MODULE_ENABLE_VTK_ParallelMPI + vtkMPIController* mpiController = vtkMPIController::SafeDownCast(controller); + if (controller && !mpiController) + { +@@ -337,7 +337,7 @@ bool vtkADIOS2CoreImageReader::OpenAndReadMetaData() + // Initialize the ADIOS2 data structures + if (!this->Impl->Adios) + { +-#ifdef IOADIOS2_HAVE_MPI ++#if VTK_MODULE_ENABLE_VTK_ParallelMPI + // Make sure the ADIOS subsystem is initialized before processing any + // sort of request. + if (!this->Controller) +@@ -910,7 +910,7 @@ void vtkADIOS2CoreImageReader::CalculateWorkDistribution(const std::string& varN + auto var = this->Impl->AdiosIO.InquireVariable(varName); + size_t blockNum = this->Impl->BpReader.BlocksInfo(var, this->Impl->RequestStep).size(); + +-#ifdef IOADIOS2_HAVE_MPI ++#if VTK_MODULE_ENABLE_VTK_ParallelMPI + size_t rank = static_cast(this->Controller->GetLocalProcessId()); + size_t procs = static_cast(this->Controller->GetNumberOfProcesses()); + #else +-- +GitLab From 642485b79fcffe5eebbbfc3489a7dabc7c662429 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 26 Oct 2023 11:26:47 +0200 Subject: [PATCH 324/408] git versions: fix commit shas [automated] (#40703) --- .../builtin/packages/cpp-logger/package.py | 2 +- .../repos/builtin/packages/dakota/package.py | 7 +++- .../repos/builtin/packages/damaris/package.py | 36 ++++++++++++------- .../repos/builtin/packages/exago/package.py | 4 ++- .../repos/builtin/packages/hpx/package.py | 2 +- .../repos/builtin/packages/ipm/package.py | 2 +- .../repos/builtin/packages/neovim/package.py | 2 +- .../repos/builtin/packages/upp/package.py | 21 +++++++++-- 8 files changed, 54 insertions(+), 22 deletions(-) diff --git a/var/spack/repos/builtin/packages/cpp-logger/package.py b/var/spack/repos/builtin/packages/cpp-logger/package.py index 93c27cf9842ccf..f325fa09dfc2ba 100644 --- a/var/spack/repos/builtin/packages/cpp-logger/package.py +++ b/var/spack/repos/builtin/packages/cpp-logger/package.py @@ -15,4 +15,4 @@ class CppLogger(CMakePackage): version("develop", branch="develop") version("master", branch="master") - version("0.0.1", tag="v0.0.1", commit="47994ccd8958129a422950a432742b902bb283ca") + version("0.0.1", tag="v0.0.1", commit="d48b38ab14477bb7c53f8189b8b4be2ea214c28a") diff --git a/var/spack/repos/builtin/packages/dakota/package.py b/var/spack/repos/builtin/packages/dakota/package.py index a91764e9e43edd..e0374ad927880e 100644 --- a/var/spack/repos/builtin/packages/dakota/package.py +++ b/var/spack/repos/builtin/packages/dakota/package.py @@ -40,7 +40,12 @@ class Dakota(CMakePackage): git = "https://github.com/snl-dakota/dakota.git" url = "https://dakota.sandia.gov/sites/default/files/distributions/public/dakota-6.12-release-public.src.tar.gz" - version("6.18", tag="v6.18.0", submodules=submodules) + version( + "6.18", + tag="v6.18.0", + commit="f6cb33b517bb304795e1e14d3673fe289df2ec9b", + submodules=submodules, + ) version("6.12", sha256="4d69f9cbb0c7319384ab9df27643ff6767eb410823930b8fbd56cc9de0885bc9") version("6.9", sha256="989b689278964b96496e3058b8ef5c2724d74bcd232f898fe450c51eba7fe0c2") version("6.3", sha256="0fbc310105860d77bb5c96de0e8813d75441fca1a5e6dfaf732aa095c4488d52") diff --git a/var/spack/repos/builtin/packages/damaris/package.py b/var/spack/repos/builtin/packages/damaris/package.py index a93bbece1318c3..3f8b6f156bead8 100644 --- a/var/spack/repos/builtin/packages/damaris/package.py +++ b/var/spack/repos/builtin/packages/damaris/package.py @@ -16,19 +16,29 @@ class Damaris(CMakePackage): maintainers("jcbowden") version("master", branch="master") - version("1.9.2", tag="v1.9.2") - version("1.9.1", tag="v1.9.1") - version("1.9.0", tag="v1.9.0") - version("1.8.2", tag="v1.8.2") - version("1.8.1", tag="v1.8.1") - version("1.8.0", tag="v1.8.0") - version("1.7.1", tag="v1.7.1") - version("1.7.0", tag="v1.7.0") - version("1.6.0", tag="v1.6.0", deprecated=True) - version("1.5.0", tag="v1.5.0", deprecated=True) - version("1.3.3", tag="v1.3.3", deprecated=True) - version("1.3.2", tag="v1.3.2", deprecated=True) - version("1.3.1", tag="v1.3.1", deprecated=True) + version("1.9.2", tag="v1.9.2", commit="22c146b4b4ca047d4d36fd904d248e0280b3c0ea") + version("1.9.1", tag="v1.9.1", commit="2fe83f587837b7ad0b5c187b8ff453f7d3ad2c18") + version("1.9.0", tag="v1.9.0", commit="23cac3a8ade9f9c20499081a8ed10b3e51801428") + version("1.8.2", tag="v1.8.2", commit="bd447e677cdf81389f93bea3139af0fa54554a01") + version("1.8.1", tag="v1.8.1", commit="18513edb1e11974a4296263ff8499d2802e17891") + version("1.8.0", tag="v1.8.0", commit="56701eee59d464cc73d248fbd5e7a8a70e7a3933") + version("1.7.1", tag="v1.7.1", commit="09dfbe7828ee295b4433c9e01c6523fa6b4adab5") + version("1.7.0", tag="v1.7.0", commit="9ab3ea4c568de16f5d43b8b5ad71feb4864a5584") + version( + "1.6.0", tag="v1.6.0", commit="1fe4c61cce03babd24315b8e6156f226baac97a2", deprecated=True + ) + version( + "1.5.0", tag="v1.5.0", commit="68206a696ad430aa8426ca370501aa71914fbc87", deprecated=True + ) + version( + "1.3.3", tag="v1.3.3", commit="f1c473507c080738f7092f6a7d72deb938ade786", deprecated=True + ) + version( + "1.3.2", tag="v1.3.2", commit="38b50664523e56900809a19f0cf52fc0ab5dca53", deprecated=True + ) + version( + "1.3.1", tag="v1.3.1", commit="6cee3690fa7d387acc8f5f650a7b019e13b90284", deprecated=True + ) variant("fortran", default=True, description="Enables Fortran support") variant("hdf5", default=False, description="Enables the HDF5 storage plugin") diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index b38aff0147b9a0..c7868779166a36 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -17,7 +17,9 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pnnl/ExaGO.git" maintainers("ryandanehy", "cameronrutherford", "pelesh") - version("1.5.1", tag="v1.5.1", submodules=True) + version( + "1.5.1", tag="v1.5.1", commit="84e9faf9d9dad8d851075eba26038338d90e6d3a", submodules=True + ) version("1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True) version("1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True) version("1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True) diff --git a/var/spack/repos/builtin/packages/hpx/package.py b/var/spack/repos/builtin/packages/hpx/package.py index b55c9ea6143b6a..5c0d390e590441 100644 --- a/var/spack/repos/builtin/packages/hpx/package.py +++ b/var/spack/repos/builtin/packages/hpx/package.py @@ -21,7 +21,7 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage): tags = ["e4s"] version("master", branch="master") - version("stable", tag="stable", commit="38d5bf935e5a49f9466c5e615e04e8d553a73dc6") + version("stable", tag="stable", commit="103a7b8e3719a0db948d1abde29de0ff91e070be") version("1.9.1", sha256="1adae9d408388a723277290ddb33c699aa9ea72defadf3f12d4acc913a0ff22d") version("1.9.0", sha256="2a8dca78172fbb15eae5a5e9facf26ab021c845f9c09e61b1912e6cf9e72915a") version("1.8.1", sha256="2fc4c10f55e2e6bcdc6f6ff950e26c6d8e218e138fdbd885ee71ccf5c5549054") diff --git a/var/spack/repos/builtin/packages/ipm/package.py b/var/spack/repos/builtin/packages/ipm/package.py index 0c4b94c8c47b4f..654707a96abdf2 100644 --- a/var/spack/repos/builtin/packages/ipm/package.py +++ b/var/spack/repos/builtin/packages/ipm/package.py @@ -19,7 +19,7 @@ class Ipm(AutotoolsPackage): maintainers("Christoph-TU") version("master", branch="master", preferred=True) - version("2.0.6", tag="2.0.6") + version("2.0.6", tag="2.0.6", commit="b008141ee16d39b33e20bffde615564afa107575") variant("papi", default=False, description="Enable PAPI") variant("cuda", default=False, description="Enable CUDA") diff --git a/var/spack/repos/builtin/packages/neovim/package.py b/var/spack/repos/builtin/packages/neovim/package.py index 737cc57de7e39b..36069fa76b9428 100644 --- a/var/spack/repos/builtin/packages/neovim/package.py +++ b/var/spack/repos/builtin/packages/neovim/package.py @@ -16,7 +16,7 @@ class Neovim(CMakePackage): maintainers("albestro", "trws") version("master", branch="master") - version("stable", tag="stable", commit="7d4bba7aa7a4a3444919ea7a3804094c290395ef") + version("stable", tag="stable", commit="d772f697a281ce9c58bf933997b87c7f27428a60") version("0.9.4", sha256="148356027ee8d586adebb6513a94d76accc79da9597109ace5c445b09d383093") version("0.9.2", sha256="06b8518bad4237a28a67a4fbc16ec32581f35f216b27f4c98347acee7f5fb369") version("0.9.1", sha256="8db17c2a1f4776dcda00e59489ea0d98ba82f7d1a8ea03281d640e58d8a3a00e") diff --git a/var/spack/repos/builtin/packages/upp/package.py b/var/spack/repos/builtin/packages/upp/package.py index 3cef205afdb6ef..8bdb1187921da4 100644 --- a/var/spack/repos/builtin/packages/upp/package.py +++ b/var/spack/repos/builtin/packages/upp/package.py @@ -20,10 +20,25 @@ class Upp(CMakePackage): maintainers("AlexanderRichert-NOAA", "edwardhartnett", "Hang-Lei-NOAA") version("develop", branch="develop") - version("11.0.0", tag="upp_v11.0.0", submodules=True) + version( + "11.0.0", + tag="upp_v11.0.0", + commit="6b5c589c7650132c6f13a729a2853676a7b93bbb", + submodules=True, + ) version("10.0.10", sha256="0c96a88d0e79b554d5fcee9401efcf4d6273da01d15e3413845274f73d70b66e") - version("10.0.9", tag="upp_v10.0.9", submodules=True) - version("10.0.8", tag="upp_v10.0.8", submodules=True) + version( + "10.0.9", + tag="upp_v10.0.9", + commit="a49af0549958def4744cb3903c7315476fe44530", + submodules=True, + ) + version( + "10.0.8", + tag="upp_v10.0.8", + commit="ce989911a7a09a2e2a0e61b3acc87588b5b9fc26", + submodules=True, + ) version("8.2.0", sha256="38de2178dc79420f42aa3fb8b85796fc49d43d66f90e5276e47ab50c282627ac") variant("openmp", default=True, description="Use OpenMP threading") From 52318477a45d9cd7cb0ffbce9c83dcc1497a2210 Mon Sep 17 00:00:00 2001 From: Alberto Invernizzi <9337627+albestro@users.noreply.github.com> Date: Thu, 26 Oct 2023 11:33:27 +0200 Subject: [PATCH 325/408] libluv: require CMake 3 and CMP0042 (#40716) --- var/spack/repos/builtin/packages/libluv/package.py | 7 +++++++ var/spack/repos/builtin/packages/neovim/package.py | 3 --- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/libluv/package.py b/var/spack/repos/builtin/packages/libluv/package.py index b3600f63ce6f54..ff9a9db5e62d09 100644 --- a/var/spack/repos/builtin/packages/libluv/package.py +++ b/var/spack/repos/builtin/packages/libluv/package.py @@ -22,11 +22,18 @@ class Libluv(CMakePackage): version("1.42.0-0", sha256="b5228a9d0eaacd9f862b6270c732d5c90773a28ce53b6d9e32a14050e7947f36") version("1.36.0-0", sha256="f2e7eb372574f25c6978c1dc74280d22efdcd7df2dda4a286c7fe7dceda26445") + # https://github.com/neovim/neovim/issues/25770 + # up to 1.45 (included) dynamic library on macOS did not have the @rpath prefix, being not + # usable on this platform. + # from 1.46, by requiring a newer cmake version, CMP0042 is in place and it works correctly. + depends_on("cmake@3:", type="build") + depends_on("lua-lang", type="link") depends_on("libuv", type="link") def cmake_args(self): args = [ + self.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"), "-DLUA_BUILD_TYPE=System", "-DBUILD_STATIC_LIBS=ON", "-DBUILD_SHARED_LIBS=ON", diff --git a/var/spack/repos/builtin/packages/neovim/package.py b/var/spack/repos/builtin/packages/neovim/package.py index 36069fa76b9428..ff59c4f539f0f2 100644 --- a/var/spack/repos/builtin/packages/neovim/package.py +++ b/var/spack/repos/builtin/packages/neovim/package.py @@ -140,9 +140,6 @@ class Neovim(CMakePackage): # https://github.com/neovim/neovim/issues/16217#issuecomment-958590493 conflicts("libvterm@0.2:", when="@:0.7") - # https://github.com/neovim/neovim/issues/25770 - conflicts("libluv@1.44:", when="platform=darwin") - @when("^lua") def cmake_args(self): return [self.define("PREFER_LUA", True)] From 23573734218d5191101c7b4c534d6ff9d1fb6b5d Mon Sep 17 00:00:00 2001 From: Xavier Delaruelle Date: Thu, 26 Oct 2023 13:49:13 +0200 Subject: [PATCH 326/408] modules: hide implicit modulefiles (#36619) Renames exclude_implicits to hide_implicits When hide_implicits option is enabled, generate modulefile of implicitly installed software and hide them. Even if implicit, those modulefiles may be referred as dependency in other modulefiles thus they should be generated to make module properly load dependent module. A new hidden property is added to BaseConfiguration class. To hide modulefiles, modulercs are generated along modulefiles. Such rc files contain specific module command to indicate a module should be hidden (for instance when using "module avail"). A modulerc property is added to TclFileLayout and LmodFileLayout classes to get fully qualified path name of the modulerc associated to a given modulefile. Modulerc files will be located in each module directory, next to the version modulefiles. This scheme is supported by both module tool implementations. modulerc_header and hide_cmd_format attributes are added to TclModulefileWriter and LmodModulefileWriter. They help to know how to generate a modulerc file with hidden commands for each module tool. Tcl modulerc file requires an header. As we use a command introduced on Modules 4.7 (module-hide --hidden-loaded), a version requirement is added to header string. For lmod, modules that open up a hierarchy are never hidden, even if they are implicitly installed. Modulerc is created, updated or removed when associated modulefile is written or removed. If an implicit modulefile becomes explicit, hidden command in modulerc for this modulefile is removed. If modulerc becomes empty, this file is removed. Modulerc file is not rewritten when no content change is detected. Co-authored-by: Harmen Stoppels --- lib/spack/spack/modules/common.py | 118 ++++++++++++++++-- lib/spack/spack/modules/lmod.py | 18 +++ lib/spack/spack/modules/tcl.py | 10 ++ lib/spack/spack/schema/modules.py | 52 +++++++- .../data/modules/lmod/hide_implicits.yaml | 11 ++ .../data/modules/tcl/exclude_implicits.yaml | 2 + .../test/data/modules/tcl/hide_implicits.yaml | 6 + lib/spack/spack/test/modules/common.py | 22 +++- lib/spack/spack/test/modules/lmod.py | 85 +++++++++++++ lib/spack/spack/test/modules/tcl.py | 103 +++++++++++++-- 10 files changed, 407 insertions(+), 20 deletions(-) create mode 100644 lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml create mode 100644 lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 57b7da5ad52ab8..98dcdb4fb1e3e5 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -491,10 +491,6 @@ def excluded(self): exclude_rules = conf.get("exclude", []) exclude_matches = [x for x in exclude_rules if spec.satisfies(x)] - # Should I exclude the module because it's implicit? - exclude_implicits = conf.get("exclude_implicits", None) - excluded_as_implicit = exclude_implicits and not self.explicit - def debug_info(line_header, match_list): if match_list: msg = "\t{0} : {1}".format(line_header, spec.cshort_spec) @@ -505,16 +501,28 @@ def debug_info(line_header, match_list): debug_info("INCLUDE", include_matches) debug_info("EXCLUDE", exclude_matches) - if excluded_as_implicit: - msg = "\tEXCLUDED_AS_IMPLICIT : {0}".format(spec.cshort_spec) - tty.debug(msg) - - is_excluded = exclude_matches or excluded_as_implicit - if not include_matches and is_excluded: + if not include_matches and exclude_matches: return True return False + @property + def hidden(self): + """Returns True if the module has been hidden, False otherwise.""" + + # A few variables for convenience of writing the method + spec = self.spec + conf = self.module.configuration(self.name) + + hidden_as_implicit = not self.explicit and conf.get( + "hide_implicits", conf.get("exclude_implicits", False) + ) + + if hidden_as_implicit: + tty.debug(f"\tHIDDEN_AS_IMPLICIT : {spec.cshort_spec}") + + return hidden_as_implicit + @property def context(self): return self.conf.get("context", {}) @@ -849,6 +857,26 @@ def __init__(self, spec, module_set_name, explicit=None): name = type(self).__name__ raise DefaultTemplateNotDefined(msg.format(name)) + # Check if format for module hide command has been defined, + # throw if not found + try: + self.hide_cmd_format + except AttributeError: + msg = "'{0}' object has no attribute 'hide_cmd_format'\n" + msg += "Did you forget to define it in the class?" + name = type(self).__name__ + raise HideCmdFormatNotDefined(msg.format(name)) + + # Check if modulerc header content has been defined, + # throw if not found + try: + self.modulerc_header + except AttributeError: + msg = "'{0}' object has no attribute 'modulerc_header'\n" + msg += "Did you forget to define it in the class?" + name = type(self).__name__ + raise ModulercHeaderNotDefined(msg.format(name)) + def _get_template(self): """Gets the template that will be rendered for this spec.""" # Get templates and put them in the order of importance: @@ -943,6 +971,9 @@ def write(self, overwrite=False): # Symlink defaults if needed self.update_module_defaults() + # record module hiddenness if implicit + self.update_module_hiddenness() + def update_module_defaults(self): if any(self.spec.satisfies(default) for default in self.conf.defaults): # This spec matches a default, it needs to be symlinked to default @@ -953,6 +984,60 @@ def update_module_defaults(self): os.symlink(self.layout.filename, default_tmp) os.rename(default_tmp, default_path) + def update_module_hiddenness(self, remove=False): + """Update modulerc file corresponding to module to add or remove + command that hides module depending on its hidden state. + + Args: + remove (bool): if True, hiddenness information for module is + removed from modulerc. + """ + modulerc_path = self.layout.modulerc + hide_module_cmd = self.hide_cmd_format % self.layout.use_name + hidden = self.conf.hidden and not remove + modulerc_exists = os.path.exists(modulerc_path) + updated = False + + if modulerc_exists: + # retrieve modulerc content + with open(modulerc_path, "r") as f: + content = f.readlines() + content = "".join(content).split("\n") + # remove last empty item if any + if len(content[-1]) == 0: + del content[-1] + already_hidden = hide_module_cmd in content + + # remove hide command if module not hidden + if already_hidden and not hidden: + content.remove(hide_module_cmd) + updated = True + + # add hide command if module is hidden + elif not already_hidden and hidden: + if len(content) == 0: + content = self.modulerc_header.copy() + content.append(hide_module_cmd) + updated = True + else: + content = self.modulerc_header.copy() + if hidden: + content.append(hide_module_cmd) + updated = True + + # no modulerc file change if no content update + if updated: + is_empty = content == self.modulerc_header or len(content) == 0 + # remove existing modulerc if empty + if modulerc_exists and is_empty: + os.remove(modulerc_path) + # create or update modulerc + elif content != self.modulerc_header: + # ensure file ends with a newline character + content.append("") + with open(modulerc_path, "w") as f: + f.write("\n".join(content)) + def remove(self): """Deletes the module file.""" mod_file = self.layout.filename @@ -960,6 +1045,7 @@ def remove(self): try: os.remove(mod_file) # Remove the module file self.remove_module_defaults() # Remove default targeting module file + self.update_module_hiddenness(remove=True) # Remove hide cmd in modulerc os.removedirs( os.path.dirname(mod_file) ) # Remove all the empty directories from the leaf up @@ -1003,5 +1089,17 @@ class DefaultTemplateNotDefined(AttributeError, ModulesError): """ +class HideCmdFormatNotDefined(AttributeError, ModulesError): + """Raised if the attribute 'hide_cmd_format' has not been specified + in the derived classes. + """ + + +class ModulercHeaderNotDefined(AttributeError, ModulesError): + """Raised if the attribute 'modulerc_header' has not been specified + in the derived classes. + """ + + class ModulesTemplateNotFoundError(ModulesError, RuntimeError): """Raised if the template for a module file was not found.""" diff --git a/lib/spack/spack/modules/lmod.py b/lib/spack/spack/modules/lmod.py index d81e07e0bf9449..e2bcfa2973ecea 100644 --- a/lib/spack/spack/modules/lmod.py +++ b/lib/spack/spack/modules/lmod.py @@ -232,6 +232,13 @@ def missing(self): """Returns the list of tokens that are not available.""" return [x for x in self.hierarchy_tokens if x not in self.available] + @property + def hidden(self): + # Never hide a module that opens a hierarchy + if any(self.spec.package.provides(x) for x in self.hierarchy_tokens): + return False + return super().hidden + class LmodFileLayout(BaseFileLayout): """File layout for lmod module files.""" @@ -274,6 +281,13 @@ def filename(self): ) return fullname + @property + def modulerc(self): + """Returns the modulerc file associated with current module file""" + return os.path.join( + os.path.dirname(self.filename), ".".join([".modulerc", self.extension]) + ) + def token_to_path(self, name, value): """Transforms a hierarchy token into the corresponding path part. @@ -470,6 +484,10 @@ class LmodModulefileWriter(BaseModuleFileWriter): default_template = posixpath.join("modules", "modulefile.lua") + modulerc_header: list = [] + + hide_cmd_format = 'hide_version("%s")' + class CoreCompilersNotFoundError(spack.error.SpackError, KeyError): """Error raised if the key 'core_compilers' has not been specified diff --git a/lib/spack/spack/modules/tcl.py b/lib/spack/spack/modules/tcl.py index 58b075379294b5..ed12827c33ef3a 100644 --- a/lib/spack/spack/modules/tcl.py +++ b/lib/spack/spack/modules/tcl.py @@ -6,6 +6,7 @@ """This module implements the classes necessary to generate Tcl non-hierarchical modules. """ +import os.path import posixpath from typing import Any, Dict @@ -56,6 +57,11 @@ class TclConfiguration(BaseConfiguration): class TclFileLayout(BaseFileLayout): """File layout for tcl module files.""" + @property + def modulerc(self): + """Returns the modulerc file associated with current module file""" + return os.path.join(os.path.dirname(self.filename), ".modulerc") + class TclContext(BaseContext): """Context class for tcl module files.""" @@ -73,3 +79,7 @@ class TclModulefileWriter(BaseModuleFileWriter): # os.path.join due to spack.spec.Spec.format # requiring forward slash path seperators at this stage default_template = posixpath.join("modules", "modulefile.tcl") + + modulerc_header = ["#%Module4.7"] + + hide_cmd_format = "module-hide --soft --hidden-loaded %s" diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py index 1d285f851bb85f..adf1a935861010 100644 --- a/lib/spack/spack/schema/modules.py +++ b/lib/spack/spack/schema/modules.py @@ -17,7 +17,7 @@ #: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT #: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE spec_regex = ( - r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|" + r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|hide|" r"whitelist|blacklist|" # DEPRECATED: remove in 0.20. r"include|exclude|" # use these more inclusive/consistent options r"projections|naming_scheme|core_compilers|all)(^\w[\w-]*)" @@ -89,6 +89,7 @@ "exclude": array_of_strings, "exclude_implicits": {"type": "boolean", "default": False}, "defaults": array_of_strings, + "hide_implicits": {"type": "boolean", "default": False}, "naming_scheme": {"type": "string"}, # Can we be more specific here? "projections": projections_scheme, "all": module_file_configuration, @@ -187,3 +188,52 @@ "additionalProperties": False, "properties": properties, } + + +# deprecated keys and their replacements +old_to_new_key = {"exclude_implicits": "hide_implicits"} + + +def update_keys(data, key_translations): + """Change blacklist/whitelist to exclude/include. + + Arguments: + data (dict): data from a valid modules configuration. + key_translations (dict): A dictionary of keys to translate to + their respective values. + + Return: + (bool) whether anything was changed in data + """ + changed = False + + if isinstance(data, dict): + keys = list(data.keys()) + for key in keys: + value = data[key] + + translation = key_translations.get(key) + if translation: + data[translation] = data.pop(key) + changed = True + + changed |= update_keys(value, key_translations) + + elif isinstance(data, list): + for elt in data: + changed |= update_keys(elt, key_translations) + + return changed + + +def update(data): + """Update the data in place to remove deprecated properties. + + Args: + data (dict): dictionary to be updated + + Returns: + True if data was changed, False otherwise + """ + # translate blacklist/whitelist to exclude/include + return update_keys(data, old_to_new_key) diff --git a/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml b/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml new file mode 100644 index 00000000000000..d13c1a7b975ff1 --- /dev/null +++ b/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml @@ -0,0 +1,11 @@ +enable: + - lmod +lmod: + hide_implicits: true + core_compilers: + - 'clang@3.3' + hierarchy: + - mpi + + all: + autoload: direct diff --git a/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml index 2d892c43513a51..5af22e6e40c272 100644 --- a/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml +++ b/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml @@ -1,3 +1,5 @@ +# DEPRECATED: remove this in ? +# See `hide_implicits.yaml` for the new syntax enable: - tcl tcl: diff --git a/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml new file mode 100644 index 00000000000000..3ae7517b8f8b2b --- /dev/null +++ b/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml @@ -0,0 +1,6 @@ +enable: + - tcl +tcl: + hide_implicits: true + all: + autoload: direct diff --git a/lib/spack/spack/test/modules/common.py b/lib/spack/spack/test/modules/common.py index 0c8a98432ff378..15656dff259671 100644 --- a/lib/spack/spack/test/modules/common.py +++ b/lib/spack/spack/test/modules/common.py @@ -14,6 +14,7 @@ import spack.package_base import spack.schema.modules import spack.spec +import spack.util.spack_yaml as syaml from spack.modules.common import UpstreamModuleIndex from spack.spec import Spec @@ -190,11 +191,30 @@ def find_nothing(*args): spack.package_base.PackageBase.uninstall_by_spec(spec) +@pytest.mark.parametrize( + "module_type, old_config,new_config", + [("tcl", "exclude_implicits.yaml", "hide_implicits.yaml")], +) +def test_exclude_include_update(module_type, old_config, new_config): + module_test_data_root = os.path.join(spack.paths.test_path, "data", "modules", module_type) + with open(os.path.join(module_test_data_root, old_config)) as f: + old_yaml = syaml.load(f) + with open(os.path.join(module_test_data_root, new_config)) as f: + new_yaml = syaml.load(f) + + # ensure file that needs updating is translated to the right thing. + assert spack.schema.modules.update_keys(old_yaml, spack.schema.modules.old_to_new_key) + assert new_yaml == old_yaml + # ensure a file that doesn't need updates doesn't get updated + original_new_yaml = new_yaml.copy() + assert not spack.schema.modules.update_keys(new_yaml, spack.schema.modules.old_to_new_key) + assert original_new_yaml == new_yaml + + @pytest.mark.regression("37649") def test_check_module_set_name(mutable_config): """Tests that modules set name are validated correctly and an error is reported if the name we require does not exist or is reserved by the configuration.""" - # Minimal modules.yaml config. spack.config.set( "modules", diff --git a/lib/spack/spack/test/modules/lmod.py b/lib/spack/spack/test/modules/lmod.py index fcea6b0e794eaf..510006f0a98dda 100644 --- a/lib/spack/spack/test/modules/lmod.py +++ b/lib/spack/spack/test/modules/lmod.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os import pytest @@ -433,3 +434,87 @@ def test_modules_no_arch(self, factory, module_configuration): path = module.layout.filename assert str(spec.os) not in path + + def test_hide_implicits(self, module_configuration): + """Tests the addition and removal of hide command in modulerc.""" + module_configuration("hide_implicits") + + spec = spack.spec.Spec("mpileaks@2.3").concretized() + + # mpileaks is defined as implicit, thus hide command should appear in modulerc + writer = writer_cls(spec, "default", False) + writer.write() + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + hide_cmd = 'hide_version("%s")' % writer.layout.use_name + assert len([x for x in content if hide_cmd == x]) == 1 + + # mpileaks becomes explicit, thus modulerc is removed + writer = writer_cls(spec, "default", True) + writer.write(overwrite=True) + assert not os.path.exists(writer.layout.modulerc) + + # mpileaks is defined as explicit, no modulerc file should exist + writer = writer_cls(spec, "default", True) + writer.write() + assert not os.path.exists(writer.layout.modulerc) + + # explicit module is removed + writer.remove() + assert not os.path.exists(writer.layout.modulerc) + assert not os.path.exists(writer.layout.filename) + + # implicit module is removed + writer = writer_cls(spec, "default", False) + writer.write(overwrite=True) + assert os.path.exists(writer.layout.filename) + assert os.path.exists(writer.layout.modulerc) + writer.remove() + assert not os.path.exists(writer.layout.modulerc) + assert not os.path.exists(writer.layout.filename) + + # three versions of mpileaks are implicit + writer = writer_cls(spec, "default", False) + writer.write(overwrite=True) + spec_alt1 = spack.spec.Spec("mpileaks@2.2").concretized() + spec_alt2 = spack.spec.Spec("mpileaks@2.1").concretized() + writer_alt1 = writer_cls(spec_alt1, "default", False) + writer_alt1.write(overwrite=True) + writer_alt2 = writer_cls(spec_alt2, "default", False) + writer_alt2.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + hide_cmd = 'hide_version("%s")' % writer.layout.use_name + hide_cmd_alt1 = 'hide_version("%s")' % writer_alt1.layout.use_name + hide_cmd_alt2 = 'hide_version("%s")' % writer_alt2.layout.use_name + assert len([x for x in content if hide_cmd == x]) == 1 + assert len([x for x in content if hide_cmd_alt1 == x]) == 1 + assert len([x for x in content if hide_cmd_alt2 == x]) == 1 + + # one version is removed, a second becomes explicit + writer_alt1.remove() + writer_alt2 = writer_cls(spec_alt2, "default", True) + writer_alt2.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + assert len([x for x in content if hide_cmd == x]) == 1 + assert len([x for x in content if hide_cmd_alt1 == x]) == 0 + assert len([x for x in content if hide_cmd_alt2 == x]) == 0 + + # disable hide_implicits configuration option + module_configuration("autoload_direct") + writer = writer_cls(spec, "default") + writer.write(overwrite=True) + assert not os.path.exists(writer.layout.modulerc) + + # reenable hide_implicits configuration option + module_configuration("hide_implicits") + writer = writer_cls(spec, "default") + writer.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py index 3c5bb01b81035a..cc12a1eedc8bb8 100644 --- a/lib/spack/spack/test/modules/tcl.py +++ b/lib/spack/spack/test/modules/tcl.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os import pytest @@ -438,38 +439,40 @@ def test_extend_context(self, modulefile_content, module_configuration): @pytest.mark.regression("4400") @pytest.mark.db - def test_exclude_implicits(self, module_configuration, database): - module_configuration("exclude_implicits") + @pytest.mark.parametrize("config_name", ["hide_implicits", "exclude_implicits"]) + def test_hide_implicits_no_arg(self, module_configuration, database, config_name): + module_configuration(config_name) # mpileaks has been installed explicitly when setting up # the tests database mpileaks_specs = database.query("mpileaks") for item in mpileaks_specs: writer = writer_cls(item, "default") - assert not writer.conf.excluded + assert not writer.conf.hidden # callpath is a dependency of mpileaks, and has been pulled # in implicitly callpath_specs = database.query("callpath") for item in callpath_specs: writer = writer_cls(item, "default") - assert writer.conf.excluded + assert writer.conf.hidden @pytest.mark.regression("12105") - def test_exclude_implicits_with_arg(self, module_configuration): - module_configuration("exclude_implicits") + @pytest.mark.parametrize("config_name", ["hide_implicits", "exclude_implicits"]) + def test_hide_implicits_with_arg(self, module_configuration, config_name): + module_configuration(config_name) # mpileaks is defined as explicit with explicit argument set on writer mpileaks_spec = spack.spec.Spec("mpileaks") mpileaks_spec.concretize() writer = writer_cls(mpileaks_spec, "default", True) - assert not writer.conf.excluded + assert not writer.conf.hidden # callpath is defined as implicit with explicit argument set on writer callpath_spec = spack.spec.Spec("callpath") callpath_spec.concretize() writer = writer_cls(callpath_spec, "default", False) - assert writer.conf.excluded + assert writer.conf.hidden @pytest.mark.regression("9624") @pytest.mark.db @@ -498,3 +501,87 @@ def test_modules_no_arch(self, factory, module_configuration): path = module.layout.filename assert str(spec.os) not in path + + def test_hide_implicits(self, module_configuration): + """Tests the addition and removal of hide command in modulerc.""" + module_configuration("hide_implicits") + + spec = spack.spec.Spec("mpileaks@2.3").concretized() + + # mpileaks is defined as implicit, thus hide command should appear in modulerc + writer = writer_cls(spec, "default", False) + writer.write() + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + hide_cmd = "module-hide --soft --hidden-loaded %s" % writer.layout.use_name + assert len([x for x in content if hide_cmd == x]) == 1 + + # mpileaks becomes explicit, thus modulerc is removed + writer = writer_cls(spec, "default", True) + writer.write(overwrite=True) + assert not os.path.exists(writer.layout.modulerc) + + # mpileaks is defined as explicit, no modulerc file should exist + writer = writer_cls(spec, "default", True) + writer.write() + assert not os.path.exists(writer.layout.modulerc) + + # explicit module is removed + writer.remove() + assert not os.path.exists(writer.layout.modulerc) + assert not os.path.exists(writer.layout.filename) + + # implicit module is removed + writer = writer_cls(spec, "default", False) + writer.write(overwrite=True) + assert os.path.exists(writer.layout.filename) + assert os.path.exists(writer.layout.modulerc) + writer.remove() + assert not os.path.exists(writer.layout.modulerc) + assert not os.path.exists(writer.layout.filename) + + # three versions of mpileaks are implicit + writer = writer_cls(spec, "default", False) + writer.write(overwrite=True) + spec_alt1 = spack.spec.Spec("mpileaks@2.2").concretized() + spec_alt2 = spack.spec.Spec("mpileaks@2.1").concretized() + writer_alt1 = writer_cls(spec_alt1, "default", False) + writer_alt1.write(overwrite=True) + writer_alt2 = writer_cls(spec_alt2, "default", False) + writer_alt2.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + hide_cmd = "module-hide --soft --hidden-loaded %s" % writer.layout.use_name + hide_cmd_alt1 = "module-hide --soft --hidden-loaded %s" % writer_alt1.layout.use_name + hide_cmd_alt2 = "module-hide --soft --hidden-loaded %s" % writer_alt2.layout.use_name + assert len([x for x in content if hide_cmd == x]) == 1 + assert len([x for x in content if hide_cmd_alt1 == x]) == 1 + assert len([x for x in content if hide_cmd_alt2 == x]) == 1 + + # one version is removed, a second becomes explicit + writer_alt1.remove() + writer_alt2 = writer_cls(spec_alt2, "default", True) + writer_alt2.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + assert len([x for x in content if hide_cmd == x]) == 1 + assert len([x for x in content if hide_cmd_alt1 == x]) == 0 + assert len([x for x in content if hide_cmd_alt2 == x]) == 0 + + # disable hide_implicits configuration option + module_configuration("autoload_direct") + writer = writer_cls(spec, "default") + writer.write(overwrite=True) + assert not os.path.exists(writer.layout.modulerc) + + # reenable hide_implicits configuration option + module_configuration("hide_implicits") + writer = writer_cls(spec, "default") + writer.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) From d33b0576c79c5298f651ead740bb48c7a2272a66 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 26 Oct 2023 14:48:35 +0200 Subject: [PATCH 327/408] spack checksum: show long flags in usage output (#40407) --- lib/spack/spack/cmd/checksum.py | 8 ++++---- share/spack/spack-completion.bash | 2 +- share/spack/spack-completion.fish | 16 ++++++++-------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index efa4a268c16b5b..91a04ca1c9dd03 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -37,30 +37,30 @@ def setup_parser(subparser): help="don't clean up staging area when command completes", ) subparser.add_argument( - "-b", "--batch", + "-b", action="store_true", default=False, help="don't ask which versions to checksum", ) subparser.add_argument( - "-l", "--latest", + "-l", action="store_true", default=False, help="checksum the latest available version", ) subparser.add_argument( - "-p", "--preferred", + "-p", action="store_true", default=False, help="checksum the known Spack preferred version", ) modes_parser = subparser.add_mutually_exclusive_group() modes_parser.add_argument( - "-a", "--add-to-package", + "-a", action="store_true", default=False, help="add new versions to package", diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 0280524536cfbc..890948892a1a7d 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -681,7 +681,7 @@ _spack_change() { _spack_checksum() { if $list_options then - SPACK_COMPREPLY="-h --help --keep-stage -b --batch -l --latest -p --preferred -a --add-to-package --verify -j --jobs" + SPACK_COMPREPLY="-h --help --keep-stage --batch -b --latest -l --preferred -p --add-to-package -a --verify -j --jobs" else _all_packages fi diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index e37b3448d5fcfc..a09cdfa83716ea 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -906,14 +906,14 @@ complete -c spack -n '__fish_spack_using_command checksum' -s h -l help -f -a he complete -c spack -n '__fish_spack_using_command checksum' -s h -l help -d 'show this help message and exit' complete -c spack -n '__fish_spack_using_command checksum' -l keep-stage -f -a keep_stage complete -c spack -n '__fish_spack_using_command checksum' -l keep-stage -d 'don\'t clean up staging area when command completes' -complete -c spack -n '__fish_spack_using_command checksum' -s b -l batch -f -a batch -complete -c spack -n '__fish_spack_using_command checksum' -s b -l batch -d 'don\'t ask which versions to checksum' -complete -c spack -n '__fish_spack_using_command checksum' -s l -l latest -f -a latest -complete -c spack -n '__fish_spack_using_command checksum' -s l -l latest -d 'checksum the latest available version' -complete -c spack -n '__fish_spack_using_command checksum' -s p -l preferred -f -a preferred -complete -c spack -n '__fish_spack_using_command checksum' -s p -l preferred -d 'checksum the known Spack preferred version' -complete -c spack -n '__fish_spack_using_command checksum' -s a -l add-to-package -f -a add_to_package -complete -c spack -n '__fish_spack_using_command checksum' -s a -l add-to-package -d 'add new versions to package' +complete -c spack -n '__fish_spack_using_command checksum' -l batch -s b -f -a batch +complete -c spack -n '__fish_spack_using_command checksum' -l batch -s b -d 'don\'t ask which versions to checksum' +complete -c spack -n '__fish_spack_using_command checksum' -l latest -s l -f -a latest +complete -c spack -n '__fish_spack_using_command checksum' -l latest -s l -d 'checksum the latest available version' +complete -c spack -n '__fish_spack_using_command checksum' -l preferred -s p -f -a preferred +complete -c spack -n '__fish_spack_using_command checksum' -l preferred -s p -d 'checksum the known Spack preferred version' +complete -c spack -n '__fish_spack_using_command checksum' -l add-to-package -s a -f -a add_to_package +complete -c spack -n '__fish_spack_using_command checksum' -l add-to-package -s a -d 'add new versions to package' complete -c spack -n '__fish_spack_using_command checksum' -l verify -f -a verify complete -c spack -n '__fish_spack_using_command checksum' -l verify -d 'verify known package checksums' complete -c spack -n '__fish_spack_using_command checksum' -s j -l jobs -r -f -a jobs From 3d91c582615ad2bea35a2b9f326c617c43c9423e Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 26 Oct 2023 08:18:02 -0500 Subject: [PATCH 328/408] PythonPackage: nested config_settings (#40693) * PythonPackage: nested config_settings * flake8 --- lib/spack/spack/build_systems/python.py | 43 +++++++++++++++---------- 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index fa27f8de495b67..c370178d7032b6 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -24,14 +24,30 @@ import spack.spec import spack.store from spack.directives import build_system, depends_on, extends, maintainers -from spack.error import NoHeadersError, NoLibrariesError, SpecError +from spack.error import NoHeadersError, NoLibrariesError from spack.install_test import test_part from spack.util.executable import Executable -from spack.version import Version from ._checks import BaseBuilder, execute_install_time_tests +def _flatten_dict(dictionary): + """Iterable that yields KEY=VALUE paths through a dictionary. + Args: + dictionary: Possibly nested dictionary of arbitrary keys and values. + Yields: + A single path through the dictionary. + """ + for key, item in dictionary.items(): + if isinstance(item, dict): + # Recursive case + for value in _flatten_dict(item): + yield f"{key}={value}" + else: + # Base case + yield f"{key}={item}" + + class PythonExtension(spack.package_base.PackageBase): maintainers("adamjstewart") @@ -454,14 +470,15 @@ def build_directory(self): def config_settings(self, spec, prefix): """Configuration settings to be passed to the PEP 517 build backend. - Requires pip 22.1 or newer. + Requires pip 22.1 or newer for keys that appear only a single time, + or pip 23.1 or newer if the same key appears multiple times. Args: spec (spack.spec.Spec): build spec prefix (spack.util.prefix.Prefix): installation prefix Returns: - dict: dictionary of KEY, VALUE settings + dict: Possibly nested dictionary of KEY, VALUE settings """ return {} @@ -525,22 +542,14 @@ def install(self, pkg, spec, prefix): pip.add_default_arg("-m") pip.add_default_arg("pip") - args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix] - - for key, value in self.config_settings(spec, prefix).items(): - if spec["py-pip"].version < Version("22.1"): - raise SpecError( - "'{}' package uses 'config_settings' which is only supported by " - "pip 22.1+. Add the following line to the package to fix this:\n\n" - ' depends_on("py-pip@22.1:", type="build")'.format(spec.name) - ) - - args.append("--config-settings={}={}".format(key, value)) + args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"] + for setting in _flatten_dict(self.config_settings(spec, prefix)): + args.append(f"--config-settings={setting}") for option in self.install_options(spec, prefix): - args.append("--install-option=" + option) + args.append(f"--install-option={option}") for option in self.global_options(spec, prefix): - args.append("--global-option=" + option) + args.append(f"--global-option={option}") if pkg.stage.archive_file and pkg.stage.archive_file.endswith(".whl"): args.append(pkg.stage.archive_file) From 1de326a82abf103ba6b7fe3796883c879e203e37 Mon Sep 17 00:00:00 2001 From: Xavier Delaruelle Date: Thu, 26 Oct 2023 15:55:49 +0200 Subject: [PATCH 329/408] modules: no --delim option if separator is colon character (#39010) Update Tcl modulefile template to simplify generated `append-path`, `prepend-path` and `remove-path` commands and improve their readability. If path element delimiter is colon character, do not set the `--delim` option as it is the default delimiter value. --- lib/spack/spack/test/modules/tcl.py | 34 ++++++-------------- share/spack/templates/modules/modulefile.tcl | 14 +++++++- 2 files changed, 23 insertions(+), 25 deletions(-) diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py index cc12a1eedc8bb8..4a8d9e10a2fdae 100644 --- a/lib/spack/spack/test/modules/tcl.py +++ b/lib/spack/spack/test/modules/tcl.py @@ -133,9 +133,9 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration): module_configuration("module_path_separator") content = modulefile_content("module-path-separator") - assert len([x for x in content if "append-path --delim {:} COLON {foo}" in x]) == 1 - assert len([x for x in content if "prepend-path --delim {:} COLON {foo}" in x]) == 1 - assert len([x for x in content if "remove-path --delim {:} COLON {foo}" in x]) == 1 + assert len([x for x in content if "append-path COLON {foo}" in x]) == 1 + assert len([x for x in content if "prepend-path COLON {foo}" in x]) == 1 + assert len([x for x in content if "remove-path COLON {foo}" in x]) == 1 assert len([x for x in content if "append-path --delim {;} SEMICOLON {bar}" in x]) == 1 assert len([x for x in content if "prepend-path --delim {;} SEMICOLON {bar}" in x]) == 1 assert len([x for x in content if "remove-path --delim {;} SEMICOLON {bar}" in x]) == 1 @@ -150,37 +150,23 @@ def test_manpath_setup(self, modulefile_content, module_configuration): # no manpath set by module content = modulefile_content("mpileaks") - assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 0 + assert len([x for x in content if "append-path MANPATH {}" in x]) == 0 # manpath set by module with prepend-path content = modulefile_content("module-manpath-prepend") - assert ( - len([x for x in content if "prepend-path --delim {:} MANPATH {/path/to/man}" in x]) - == 1 - ) - assert ( - len( - [ - x - for x in content - if "prepend-path --delim {:} MANPATH {/path/to/share/man}" in x - ] - ) - == 1 - ) - assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 1 + assert len([x for x in content if "prepend-path MANPATH {/path/to/man}" in x]) == 1 + assert len([x for x in content if "prepend-path MANPATH {/path/to/share/man}" in x]) == 1 + assert len([x for x in content if "append-path MANPATH {}" in x]) == 1 # manpath set by module with append-path content = modulefile_content("module-manpath-append") - assert ( - len([x for x in content if "append-path --delim {:} MANPATH {/path/to/man}" in x]) == 1 - ) - assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 1 + assert len([x for x in content if "append-path MANPATH {/path/to/man}" in x]) == 1 + assert len([x for x in content if "append-path MANPATH {}" in x]) == 1 # manpath set by module with setenv content = modulefile_content("module-manpath-setenv") assert len([x for x in content if "setenv MANPATH {/path/to/man}" in x]) == 1 - assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 0 + assert len([x for x in content if "append-path MANPATH {}" in x]) == 0 @pytest.mark.regression("29578") def test_setenv_raw_value(self, modulefile_content, module_configuration): diff --git a/share/spack/templates/modules/modulefile.tcl b/share/spack/templates/modules/modulefile.tcl index 746fea2f31def9..d1593b88280e65 100644 --- a/share/spack/templates/modules/modulefile.tcl +++ b/share/spack/templates/modules/modulefile.tcl @@ -54,11 +54,23 @@ conflict {{ name }} {% block environment %} {% for command_name, cmd in environment_modifications %} {% if command_name == 'PrependPath' %} +{% if cmd.separator == ':' %} +prepend-path {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% else %} prepend-path --delim {{ '{' }}{{ cmd.separator }}{{ '}' }} {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% endif %} {% elif command_name in ('AppendPath', 'AppendFlagsEnv') %} +{% if cmd.separator == ':' %} +append-path {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% else %} append-path --delim {{ '{' }}{{ cmd.separator }}{{ '}' }} {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% endif %} {% elif command_name in ('RemovePath', 'RemoveFlagsEnv') %} +{% if cmd.separator == ':' %} +remove-path {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% else %} remove-path --delim {{ '{' }}{{ cmd.separator }}{{ '}' }} {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% endif %} {% elif command_name == 'SetEnv' %} setenv {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} {% elif command_name == 'UnsetEnv' %} @@ -68,7 +80,7 @@ unsetenv {{ cmd.name }} {% endfor %} {# Make sure system man pages are enabled by appending trailing delimiter to MANPATH #} {% if has_manpath_modifications %} -append-path --delim {{ '{' }}:{{ '}' }} MANPATH {{ '{' }}{{ '}' }} +append-path MANPATH {{ '{' }}{{ '}' }} {% endif %} {% endblock %} From 57fbd33369fdf8b7fb9c010951a591b03d0a6c71 Mon Sep 17 00:00:00 2001 From: Auriane R <48684432+aurianer@users.noreply.github.com> Date: Thu, 26 Oct 2023 16:08:21 +0200 Subject: [PATCH 330/408] Add conflict between cxxstd > 17 and cuda < 12 in pika (#40717) * Add conflict with C++ standard > 17 and cuda < 12 * Removing map_cxxstd since boost supports C++20 flag --- var/spack/repos/builtin/packages/pika/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index 50ff40f4112d90..0fd20bb0a3656a 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -42,7 +42,6 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): generator("ninja") - map_cxxstd = lambda cxxstd: "2a" if cxxstd == "20" else cxxstd cxxstds = ("17", "20", "23") variant( "cxxstd", @@ -91,6 +90,9 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): conflicts("%clang@:8", when="@0.2:") conflicts("+stdexec", when="cxxstd=17") conflicts("cxxstd=23", when="^cmake@:3.20.2") + # CUDA version <= 11 does not support C++20 and newer + for cxxstd in filter(lambda x: x != "17", cxxstds): + conflicts(f"cxxstd={cxxstd}", when="^cuda@:11") # Other dependencies depends_on("boost@1.71:") @@ -139,7 +141,7 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): ) for cxxstd in cxxstds: - depends_on("boost cxxstd={0}".format(map_cxxstd(cxxstd)), when="cxxstd={0}".format(cxxstd)) + depends_on("boost cxxstd={0}".format(cxxstd), when="cxxstd={0}".format(cxxstd)) depends_on("fmt cxxstd={0}".format(cxxstd), when="@0.11: cxxstd={0}".format(cxxstd)) # COROUTINES From a1e8615dda3dac30887e2232921c6f5354e4282f Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 26 Oct 2023 18:57:55 +0200 Subject: [PATCH 331/408] spack checksum pkg@1.2, use as version filter (#39694) * spack checksum pkg@1.2, use as version filter Currently pkg@1.2 splits on @ and looks for 1.2 specifically, with this PR pkg@1.2 is a filter so any matching 1.2, 1.2.1, ..., 1.2.10 version is displayed. * fix tests * fix style --- lib/spack/spack/cmd/checksum.py | 22 ++++++++-------------- lib/spack/spack/stage.py | 6 ++++-- lib/spack/spack/test/cmd/checksum.py | 12 +++--------- 3 files changed, 15 insertions(+), 25 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 91a04ca1c9dd03..9e5e32b3b76c7a 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -21,7 +21,6 @@ from spack.package_base import PackageBase, deprecated_version, preferred_version from spack.util.editor import editor from spack.util.format import get_version_lines -from spack.util.naming import valid_fully_qualified_module_name from spack.version import Version description = "checksum available versions of a package" @@ -68,27 +67,19 @@ def setup_parser(subparser): modes_parser.add_argument( "--verify", action="store_true", default=False, help="verify known package checksums" ) - arguments.add_common_arguments(subparser, ["package", "jobs"]) + subparser.add_argument("package", help="package or spec. for example cmake or cmake@3.18") subparser.add_argument( "versions", nargs=argparse.REMAINDER, help="versions to generate checksums for" ) + arguments.add_common_arguments(subparser, ["jobs"]) def checksum(parser, args): - # Did the user pass 'package@version' string? - if len(args.versions) == 0 and "@" in args.package: - args.versions = [args.package.split("@")[1]] - args.package = args.package.split("@")[0] - - # Make sure the user provided a package and not a URL - if not valid_fully_qualified_module_name(args.package): - tty.die("`spack checksum` accepts package names, not URLs.") + spec = spack.spec.Spec(args.package) # Get the package we're going to generate checksums for - pkg_cls = spack.repo.PATH.get_pkg_class(args.package) - pkg = pkg_cls(spack.spec.Spec(args.package)) + pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec) - # Build a list of versions to checksum versions = [Version(v) for v in args.versions] # Define placeholder for remote versions. @@ -152,7 +143,10 @@ def checksum(parser, args): tty.die(f"Could not find any remote versions for {pkg.name}") elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty(): filtered_url_dict = spack.stage.interactive_version_filter( - url_dict, pkg.versions, url_changes=url_changed_for_version + url_dict, + pkg.versions, + url_changes=url_changed_for_version, + initial_verion_filter=spec.versions, ) if not filtered_url_dict: exit(0) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 1c7ebdec5c50df..690a45e7c5106e 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -870,6 +870,7 @@ def interactive_version_filter( url_dict: Dict[StandardVersion, str], known_versions: Iterable[StandardVersion] = (), *, + initial_verion_filter: Optional[VersionList] = None, url_changes: Set[StandardVersion] = set(), input: Callable[..., str] = input, ) -> Optional[Dict[StandardVersion, str]]: @@ -883,8 +884,9 @@ def interactive_version_filter( Filtered dictionary of versions to URLs or None if the user wants to quit """ # Find length of longest string in the list for padding - sorted_and_filtered = sorted(url_dict.keys(), reverse=True) - version_filter = VersionList([":"]) + version_filter = initial_verion_filter or VersionList([":"]) + sorted_and_filtered = [v for v in url_dict if v.satisfies(version_filter)] + sorted_and_filtered.sort(reverse=True) max_len = max(len(str(v)) for v in sorted_and_filtered) orig_url_dict = url_dict # only copy when using editor to modify print_header = True diff --git a/lib/spack/spack/test/cmd/checksum.py b/lib/spack/spack/test/cmd/checksum.py index b2fc9d5f6ce11c..0dbaa88053070a 100644 --- a/lib/spack/spack/test/cmd/checksum.py +++ b/lib/spack/spack/test/cmd/checksum.py @@ -8,6 +8,7 @@ import pytest import spack.cmd.checksum +import spack.parser import spack.repo import spack.spec from spack.main import SpackCommand @@ -254,17 +255,10 @@ def test_checksum_deprecated_version(mock_packages, mock_clone_repo, mock_fetch, assert "Added 0 new versions to" not in output -def test_checksum_at(mock_packages): - pkg_cls = spack.repo.PATH.get_pkg_class("zlib") - versions = [str(v) for v in pkg_cls.versions] - output = spack_checksum(f"zlib@{versions[0]}") - assert "Found 1 version" in output - - def test_checksum_url(mock_packages): pkg_cls = spack.repo.PATH.get_pkg_class("zlib") - output = spack_checksum(f"{pkg_cls.url}", fail_on_error=False) - assert "accepts package names" in output + with pytest.raises(spack.parser.SpecSyntaxError): + spack_checksum(f"{pkg_cls.url}") def test_checksum_verification_fails(install_mockery, capsys): From 668ff992e6abcdd68a21ef92b10c43c4baaba6bd Mon Sep 17 00:00:00 2001 From: Ryan Danehy Date: Thu, 26 Oct 2023 11:18:31 -0700 Subject: [PATCH 332/408] Update spack package for exago@1.6.0 release (#40614) * Update spack package for exago:1.6.0 * update style * Weird spack style env bug fixed * Update spack package for exago:1.6.0 * update style * Weird spack style env bug fixed * changes to allow release 1.6.0 * fix depends, and versioning * rm cmake variable * add s * style fix --------- Co-authored-by: Ryan Danehy Co-authored-by: Ryan Danehy Co-authored-by: ryan.danehy@pnnl.gov --- .../repos/builtin/packages/exago/package.py | 50 +++++++++++++------ 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index c7868779166a36..ab48bab3776b86 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -17,20 +17,36 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pnnl/ExaGO.git" maintainers("ryandanehy", "cameronrutherford", "pelesh") + version( + "1.6.0", tag="v1.6.0", commit="159cd173572280ac0f6f094a71dcc3ebeeb34076", submodules=True + ) version( "1.5.1", tag="v1.5.1", commit="84e9faf9d9dad8d851075eba26038338d90e6d3a", submodules=True ) - version("1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True) - version("1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True) - version("1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True) - version("1.3.0", commit="58b039d746a6eac8e84b0afc01354cd58caec485", submodules=True) - version("1.2.0", commit="255a214ec747b7bdde7a6d8151c083067b4d0907", submodules=True) - version("1.1.2", commit="db3bb16e19c09e01402071623258dae4d13e5133", submodules=True) - version("1.1.1", commit="0e0a3f27604876749d47c06ec71daaca4b270df9", submodules=True) - version("1.1.0", commit="dc8dd85544ff1b55a64a3cbbbdf12b8a0c6fdaf6", submodules=True) - version("1.0.0", commit="230d7df2f384f68b952a1ea03aad41431eaad283") - version("0.99.2", commit="56961641f50827b3aa4c14524f2f978dc48b9ce5") - version("0.99.1", commit="0ae426c76651ba5a9dbcaeb95f18d1b8ba961690") + version( + "1.5.0", tag="v1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True + ) + version( + "1.4.1", tag="v1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True + ) + version( + "1.4.0", tag="v1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True + ) + version( + "1.3.0", tag="v1.3.0", commit="58b039d746a6eac8e84b0afc01354cd58caec485", submodules=True + ) + version( + "1.1.2", tag="v1.1.2", commit="db3bb16e19c09e01402071623258dae4d13e5133", submodules=True + ) + version( + "1.1.1", tag="v1.1.1", commit="0e0a3f27604876749d47c06ec71daaca4b270df9", submodules=True + ) + version( + "1.1.0", tag="v1.1.0", commit="dc8dd85544ff1b55a64a3cbbbdf12b8a0c6fdaf6", submodules=True + ) + version("1.0.0", tag="v1.0.0", commit="230d7df2f384f68b952a1ea03aad41431eaad283") + version("0.99.2", tag="v0.99.2", commit="56961641f50827b3aa4c14524f2f978dc48b9ce5") + version("0.99.1", tag="v0.99.1", commit="0ae426c76651ba5a9dbcaeb95f18d1b8ba961690") version("main", branch="main", submodules=True) version("develop", branch="develop", submodules=True) version( @@ -49,7 +65,6 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): conflicts( "+python", when="+ipopt+rocm", msg="Python bindings require -fPIC with Ipopt for rocm." ) - variant("logging", default=False, description="Enable/Disable spdlog based logging") # Solver options variant("hiop", default=False, description="Enable/Disable HiOp") @@ -64,7 +79,12 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): "~hiop~ipopt+python @:1.5.0", msg="ExaGO Python wrapper requires at least one solver enabled.", ) - + conflicts( + "+hiop~mpi ^hiop@1.0.0:~mpi", + when="@1.5.1:1.6.1", + msg="#18 - builds with hiop and without MPI cause compile time errors", + ) + conflicts("+python~mpi", msg="#16 - Python wrapper requires MPI enabled") # Dependencies depends_on("python@3.6:3.10", when="@1.3.0:1.5+python") depends_on("py-pytest", type=("build", "run"), when="@1.5.0:+python") @@ -76,7 +96,6 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): depends_on("cuda", when="+cuda") depends_on("raja", when="+raja") depends_on("umpire", when="+raja") - depends_on("cmake@3.18:", type="build") # Profiling @@ -117,7 +136,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): depends_on("hiop@0.3.99:", when="@0.99:+hiop") depends_on("hiop@0.5.1:", when="@1.1.0:+hiop") depends_on("hiop@0.5.3:", when="@1.3.0:+hiop") - depends_on("hiop@0.7.0:", when="@1.5.0:+hiop") + depends_on("hiop@0.7.0:1.0.0", when="@1.5.0:+hiop") depends_on("hiop~mpi", when="+hiop~mpi") depends_on("hiop+mpi", when="+hiop+mpi") @@ -191,7 +210,6 @@ def cmake_args(self): self.define_from_variant("EXAGO_ENABLE_HIOP", "hiop"), self.define_from_variant("EXAGO_ENABLE_IPOPT", "ipopt"), self.define_from_variant("EXAGO_ENABLE_PYTHON", "python"), - self.define_from_variant("EXAGO_ENABLE_LOGGING", "logging"), ] ) From fee88d773ad7a906025a433f330c4faa613bd763 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Thu, 26 Oct 2023 12:08:55 -0700 Subject: [PATCH 333/408] unmaintained a* packages: update to use f-strings (#40467) --- .../abi-compliance-checker/package.py | 2 +- .../builtin/packages/abi-dumper/package.py | 2 +- .../repos/builtin/packages/abinit/package.py | 79 ++++++++----------- .../repos/builtin/packages/abyss/package.py | 8 +- .../repos/builtin/packages/accfft/package.py | 10 +-- .../repos/builtin/packages/ace/package.py | 2 +- .../repos/builtin/packages/ack/package.py | 2 +- .../builtin/packages/acpica-tools/package.py | 2 +- .../builtin/packages/activeharmony/package.py | 2 +- .../repos/builtin/packages/adf/package.py | 2 +- .../repos/builtin/packages/alglib/package.py | 2 +- .../builtin/packages/alsa-lib/package.py | 4 +- .../repos/builtin/packages/amg2013/package.py | 6 +- .../repos/builtin/packages/amg2023/package.py | 2 +- .../repos/builtin/packages/amp/package.py | 4 +- .../builtin/packages/anicalculator/package.py | 2 +- .../repos/builtin/packages/ape/package.py | 6 +- .../builtin/packages/apr-util/package.py | 24 +++--- .../repos/builtin/packages/apr/package.py | 2 +- .../repos/builtin/packages/aragorn/package.py | 2 +- .../repos/builtin/packages/archer/package.py | 8 +- .../repos/builtin/packages/argon2/package.py | 2 +- .../builtin/packages/armadillo/package.py | 12 +-- .../builtin/packages/arpack-ng/package.py | 6 +- .../builtin/packages/asdcplib/package.py | 2 +- .../repos/builtin/packages/aspa/package.py | 4 +- .../repos/builtin/packages/aspcud/package.py | 12 ++- .../builtin/packages/aspera-cli/package.py | 2 +- .../repos/builtin/packages/astral/package.py | 2 +- .../repos/builtin/packages/astyle/package.py | 4 +- .../builtin/packages/at-spi2-atk/package.py | 2 +- .../builtin/packages/at-spi2-core/package.py | 2 +- .../repos/builtin/packages/atk/package.py | 4 +- .../repos/builtin/packages/atlas/package.py | 6 +- .../builtin/packages/atom-dft/package.py | 4 +- .../repos/builtin/packages/atompaw/package.py | 6 +- .../builtin/packages/audacious/package.py | 2 +- .../builtin/packages/augustus/package.py | 32 ++++---- .../repos/builtin/packages/authd/package.py | 2 +- .../builtin/packages/autodock-vina/package.py | 4 +- .../repos/builtin/packages/autogen/package.py | 2 +- .../repos/builtin/packages/avizo/package.py | 16 ++-- 42 files changed, 137 insertions(+), 164 deletions(-) diff --git a/var/spack/repos/builtin/packages/abi-compliance-checker/package.py b/var/spack/repos/builtin/packages/abi-compliance-checker/package.py index 05d57471ba24f2..6f575badd208dc 100644 --- a/var/spack/repos/builtin/packages/abi-compliance-checker/package.py +++ b/var/spack/repos/builtin/packages/abi-compliance-checker/package.py @@ -22,4 +22,4 @@ class AbiComplianceChecker(MakefilePackage): depends_on("universal-ctags") def install(self, spec, prefix): - make("prefix={0}".format(prefix), "install") + make(f"prefix={prefix}", "install") diff --git a/var/spack/repos/builtin/packages/abi-dumper/package.py b/var/spack/repos/builtin/packages/abi-dumper/package.py index f649bf9db20874..584eed3664cdd0 100644 --- a/var/spack/repos/builtin/packages/abi-dumper/package.py +++ b/var/spack/repos/builtin/packages/abi-dumper/package.py @@ -25,4 +25,4 @@ class AbiDumper(Package): depends_on("vtable-dumper@1.1:") def install(self, spec, prefix): - make("prefix={0}".format(prefix), "install") + make(f"prefix={prefix}", "install") diff --git a/var/spack/repos/builtin/packages/abinit/package.py b/var/spack/repos/builtin/packages/abinit/package.py index 445fc60bbe0985..a343bf69d7e3c4 100644 --- a/var/spack/repos/builtin/packages/abinit/package.py +++ b/var/spack/repos/builtin/packages/abinit/package.py @@ -101,11 +101,7 @@ class Abinit(AutotoolsPackage): # TODO: The logic here can be reversed with the new concretizer. Instead of # using `conflicts`, `depends_on` could be used instead. for fftw in ["amdfftw", "cray-fftw", "fujitsu-fftw", "fftw"]: - conflicts( - "+openmp", - when="^{0}~openmp".format(fftw), - msg="Need to request {0} +openmp".format(fftw), - ) + conflicts("+openmp", when=f"^{fftw}~openmp", msg=f"Need to request {fftw} +openmp") mkl_message = "Need to set dependent variant to threads=openmp" conflicts("+openmp", when="^intel-mkl threads=none", msg=mkl_message) @@ -137,34 +133,28 @@ def configure_args(self): oapp = options.append if "@:8" in spec: - oapp("--enable-optim={0}".format(self.spec.variants["optimization-flavor"].value)) + oapp(f"--enable-optim={self.spec.variants['optimization-flavor'].value}") else: - oapp("--with-optim-flavor={0}".format(self.spec.variants["optimization-flavor"].value)) + oapp(f"--with-optim-flavor={self.spec.variants['optimization-flavor'].value}") if "+wannier90" in spec: if "@:8" in spec: - oapp( - "--with-wannier90-libs=-L{0}".format( - spec["wannier90"].prefix.lib + " -lwannier -lm" - ) - ) - oapp("--with-wannier90-incs=-I{0}".format(spec["wannier90"].prefix.modules)) - oapp("--with-wannier90-bins={0}".format(spec["wannier90"].prefix.bin)) + oapp(f"--with-wannier90-libs=-L{spec['wannier90'].prefix.lib} -lwannier -lm") + oapp(f"--with-wannier90-incs=-I{spec['wannier90'].prefix.modules}") + oapp(f"--with-wannier90-bins={spec['wannier90'].prefix.bin}") oapp("--enable-connectors") oapp("--with-dft-flavor=atompaw+libxc+wannier90") else: options.extend( [ - "WANNIER90_CPPFLAGS=-I{0}".format(spec["wannier90"].prefix.modules), - "WANNIER90_LIBS=-L{0} {1}".format( - spec["wannier90"].prefix.lib, "-lwannier" - ), + f"WANNIER90_CPPFLAGS=-I{spec['wannier90'].prefix.modules}", + f"WANNIER90_LIBS=-L{spec['wannier90'].prefix.lib} -lwannier", ] ) else: if "@:9.8" in spec: - oapp("--with-fftw={0}".format(spec["fftw-api"].prefix)) - oapp("--with-hdf5={0}".format(spec["hdf5"].prefix)) + oapp(f"--with-fftw={spec['fftw-api'].prefix}") + oapp(f"--with-hdf5={spec['hdf5'].prefix}") if "@:8" in spec: oapp("--with-dft-flavor=atompaw+libxc") @@ -172,9 +162,9 @@ def configure_args(self): "--without-wannier90", if "+mpi" in spec: - oapp("CC={0}".format(spec["mpi"].mpicc)) - oapp("CXX={0}".format(spec["mpi"].mpicxx)) - oapp("FC={0}".format(spec["mpi"].mpifc)) + oapp(f"CC={spec['mpi'].mpicc}") + oapp(f"CXX={spec['mpi'].mpicxx}") + oapp(f"FC={spec['mpi'].mpifc}") # MPI version: # let the configure script auto-detect MPI support from mpi_prefix @@ -208,14 +198,14 @@ def configure_args(self): if "+scalapack" in spec: linalg = spec["scalapack"].libs + linalg if "@:8" in spec: - linalg_flavor = "scalapack+{0}".format(linalg_flavor) + linalg_flavor = f"scalapack+{linalg_flavor}" if "@:8" in spec: - oapp("--with-linalg-libs={0}".format(linalg.ld_flags)) + oapp(f"--with-linalg-libs={linalg.ld_flags}") else: - oapp("LINALG_LIBS={0}".format(linalg.ld_flags)) + oapp(f"LINALG_LIBS={linalg.ld_flags}") - oapp("--with-linalg-flavor={0}".format(linalg_flavor)) + oapp(f"--with-linalg-flavor={linalg_flavor}") if "^mkl" in spec: fftflavor = "dfti" @@ -225,32 +215,32 @@ def configure_args(self): else: fftflavor, fftlibs = "fftw3", "-lfftw3 -lfftw3f" - oapp("--with-fft-flavor={0}".format(fftflavor)) + oapp(f"--with-fft-flavor={fftflavor}") if "@:8" in spec: if "^mkl" in spec: - oapp("--with-fft-incs={0}".format(spec["fftw-api"].headers.cpp_flags)) - oapp("--with-fft-libs={0}".format(spec["fftw-api"].libs.ld_flags)) + oapp(f"--with-fft-incs={spec['fftw-api'].headers.cpp_flags}") + oapp(f"--with-fft-libs={spec['fftw-api'].libs.ld_flags}") else: options.extend( [ - "--with-fft-incs={0}".format(spec["fftw-api"].headers.cpp_flags), - "--with-fft-libs=-L{0} {1}".format(spec["fftw-api"].prefix.lib, fftlibs), + f"--with-fft-incs={spec['fftw-api'].headers.cpp_flags}", + f"--with-fft-libs=-L{spec['fftw-api'].prefix.lib} {fftlibs}", ] ) else: if "^mkl" in spec: options.extend( [ - "FFT_CPPFLAGS={0}".format(spec["fftw-api"].headers.cpp_flags), - "FFT_LIBs={0}".format(spec["fftw-api"].libs.ld_flags), + f"FFT_CPPFLAGS={spec['fftw-api'].headers.cpp_flags}", + f"FFT_LIBs={spec['fftw-api'].libs.ld_flags}", ] ) else: options.extend( [ - "FFTW3_CPPFLAGS={0}".format(spec["fftw-api"].headers.cpp_flags), - "FFTW3_LIBS=-L{0} {1}".format(spec["fftw-api"].prefix.lib, fftlibs), + f"FFTW3_CPPFLAGS={spec['fftw-api'].headers.cpp_flags}", + f"FFTW3_LIBS=-L{spec['fftw-api'].prefix.lib} {fftlibs}", ] ) @@ -259,12 +249,12 @@ def configure_args(self): if "@:8" in spec: options.extend( [ - "--with-libxc-incs={0}".format(libxc.headers.cpp_flags), - "--with-libxc-libs={0}".format(libxc.libs.ld_flags + " -lm"), + f"--with-libxc-incs={libxc.headers.cpp_flags}", + f"--with-libxc-libs={libxc.libs.ld_flags + ' -lm'}", ] ) else: - oapp("--with-libxc={0}".format(libxc.prefix)) + oapp(f"--with-libxc={libxc.prefix}") # Netcdf4/HDF5 hdf5 = spec["hdf5:hl"] @@ -276,24 +266,21 @@ def configure_args(self): # to link with the high level HDF5 library options.extend( [ - "--with-netcdf-incs={0}".format( + "--with-netcdf-incs={}".format( netcdfc.headers.cpp_flags + " " + netcdff.headers.cpp_flags ), - "--with-netcdf-libs={0}".format( + "--with-netcdf-libs={}".format( netcdff.libs.ld_flags + " " + hdf5.libs.ld_flags ), ] ) else: options.extend( - [ - "--with-netcdf={0}".format(netcdfc.prefix), - "--with-netcdf-fortran={0}".format(netcdff.prefix), - ] + [f"--with-netcdf={netcdfc.prefix}", f"--with-netcdf-fortran={netcdff.prefix}"] ) if self.spec.satisfies("%fj"): - oapp("FCFLAGS_MODDIR=-M{0}".format(join_path(self.stage.source_path, "src/mods"))) + oapp(f"FCFLAGS_MODDIR=-M{join_path(self.stage.source_path, 'src/mods')}") return options diff --git a/var/spack/repos/builtin/packages/abyss/package.py b/var/spack/repos/builtin/packages/abyss/package.py index c345626761d1c3..1cb46a8957eed1 100644 --- a/var/spack/repos/builtin/packages/abyss/package.py +++ b/var/spack/repos/builtin/packages/abyss/package.py @@ -60,12 +60,12 @@ class Abyss(AutotoolsPackage): def configure_args(self): maxk = int(self.spec.variants["maxk"].value) args = [ - "--with-boost=%s" % self.spec["boost"].prefix, - "--with-sqlite=%s" % self.spec["sqlite"].prefix, - "--with-mpi=%s" % self.spec["mpi"].prefix, + f"--with-boost={self.spec['boost'].prefix}", + f"--with-sqlite={self.spec['sqlite'].prefix}", + f"--with-mpi={self.spec['mpi'].prefix}", ] if maxk: - args.append("--enable-maxk=%s" % maxk) + args.append(f"--enable-maxk={maxk}") if self.spec["mpi"].name == "mpich": args.append("--enable-mpich") return args diff --git a/var/spack/repos/builtin/packages/accfft/package.py b/var/spack/repos/builtin/packages/accfft/package.py index aa32f1b0a53e81..eb99aec48492d9 100644 --- a/var/spack/repos/builtin/packages/accfft/package.py +++ b/var/spack/repos/builtin/packages/accfft/package.py @@ -32,15 +32,15 @@ class Accfft(CMakePackage, CudaPackage): def cmake_args(self): spec = self.spec args = [ - "-DFFTW_ROOT={0}".format(spec["fftw"].prefix), - "-DFFTW_USE_STATIC_LIBS=false", - "-DBUILD_GPU={0}".format("true" if "+cuda" in spec else "false"), - "-DBUILD_SHARED={0}".format("true" if "+shared" in spec else "false"), + self.define("FFTW_ROOT", spec["fftw"].prefix), + self.define("FFTW_USE_STATIC_LIBS", "false"), + self.define("BUILD_GPU", str(spec.satisfies("+cuda")).lower()), + self.define("BUILD_SHARED", str(spec.satisfies("+shared")).lower()), ] if "+cuda" in spec: cuda_arch = [x for x in spec.variants["cuda_arch"].value if x] if cuda_arch: - args.append("-DCUDA_NVCC_FLAGS={0}".format(" ".join(self.cuda_flags(cuda_arch)))) + args.append(f"-DCUDA_NVCC_FLAGS={' '.join(self.cuda_flags(cuda_arch))}") return args diff --git a/var/spack/repos/builtin/packages/ace/package.py b/var/spack/repos/builtin/packages/ace/package.py index c152bbdeb60fc8..afd164fc31527c 100644 --- a/var/spack/repos/builtin/packages/ace/package.py +++ b/var/spack/repos/builtin/packages/ace/package.py @@ -43,4 +43,4 @@ def edit(self, spec, prefix): "include $(ACE_ROOT)/include/makeinclude/" "platform_linux" + supported[self.compiler.name] + ".GNU\n" ) - f.write("INSTALL_PREFIX=%s" % prefix) + f.write(f"INSTALL_PREFIX={prefix}") diff --git a/var/spack/repos/builtin/packages/ack/package.py b/var/spack/repos/builtin/packages/ack/package.py index 684106ff5db260..320f9e818cc60a 100644 --- a/var/spack/repos/builtin/packages/ack/package.py +++ b/var/spack/repos/builtin/packages/ack/package.py @@ -41,7 +41,7 @@ class Ack(Package): def install(self, spec, prefix): mkdirp(prefix.bin) - ack_source = "ack-{0}-single-file".format(self.version) + ack_source = f"ack-{self.version}-single-file" ack_installed = join_path(prefix.bin, "ack") # install source diff --git a/var/spack/repos/builtin/packages/acpica-tools/package.py b/var/spack/repos/builtin/packages/acpica-tools/package.py index 55fee583c7841b..c9d063a5a975f1 100644 --- a/var/spack/repos/builtin/packages/acpica-tools/package.py +++ b/var/spack/repos/builtin/packages/acpica-tools/package.py @@ -19,4 +19,4 @@ class AcpicaTools(MakefilePackage): depends_on("bison", type="build") def install(self, spec, prefix): - make("PREFIX={0}".format(prefix), "install") + make(f"PREFIX={prefix}", "install") diff --git a/var/spack/repos/builtin/packages/activeharmony/package.py b/var/spack/repos/builtin/packages/activeharmony/package.py index e3f2d92955a6c4..62af3515b9051c 100644 --- a/var/spack/repos/builtin/packages/activeharmony/package.py +++ b/var/spack/repos/builtin/packages/activeharmony/package.py @@ -29,7 +29,7 @@ def setup_build_environment(self, spack_env): @when("@:4.5") def install(self, spec, prefix): - make("install", "PREFIX=%s" % prefix) + make("install", f"PREFIX={prefix}") @when("@4.6.0:") def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/adf/package.py b/var/spack/repos/builtin/packages/adf/package.py index 908cd5351faf16..244087f1116027 100644 --- a/var/spack/repos/builtin/packages/adf/package.py +++ b/var/spack/repos/builtin/packages/adf/package.py @@ -19,7 +19,7 @@ class Adf(Package): version("2017.113", sha256="666ef15d253b74c707dd14da35e7cf283ca20e21e24ed43cb953fb9d1f2f1e15") def url_for_version(self, version): - return "file://{0}/adf/adf{1}.pc64_linux.openmpi.bin.tgz".format(os.getcwd(), version) + return f"file://{os.getcwd()}/adf/adf{version}.pc64_linux.openmpi.bin.tgz" # Licensing license_required = True diff --git a/var/spack/repos/builtin/packages/alglib/package.py b/var/spack/repos/builtin/packages/alglib/package.py index f962d0fd44fc10..98ade340fd2e11 100644 --- a/var/spack/repos/builtin/packages/alglib/package.py +++ b/var/spack/repos/builtin/packages/alglib/package.py @@ -30,7 +30,7 @@ def edit(self, spec, prefix): filter_file(r"so", dso_suffix, make_file) def install(self, spec, prefix): - name = "libalglib.{0}".format(dso_suffix) + name = f"libalglib.{dso_suffix}" with working_dir("src"): mkdirp(prefix.lib) install(name, prefix.lib) diff --git a/var/spack/repos/builtin/packages/alsa-lib/package.py b/var/spack/repos/builtin/packages/alsa-lib/package.py index 631e9bf6851717..46e3d2f1bfa672 100644 --- a/var/spack/repos/builtin/packages/alsa-lib/package.py +++ b/var/spack/repos/builtin/packages/alsa-lib/package.py @@ -30,8 +30,8 @@ def configure_args(self): spec = self.spec args = [] if spec.satisfies("+python"): - args.append("--with-pythonlibs={0}".format(spec["python"].libs.ld_flags)) - args.append("--with-pythonincludes={0}".format(spec["python"].headers.include_flags)) + args.append(f"--with-pythonlibs={spec['python'].libs.ld_flags}") + args.append(f"--with-pythonincludes={spec['python'].headers.include_flags}") else: args.append("--disable-python") return args diff --git a/var/spack/repos/builtin/packages/amg2013/package.py b/var/spack/repos/builtin/packages/amg2013/package.py index 638e874a46cf10..def0b495a83aac 100644 --- a/var/spack/repos/builtin/packages/amg2013/package.py +++ b/var/spack/repos/builtin/packages/amg2013/package.py @@ -46,9 +46,9 @@ def build_targets(self): if "+int64" in self.spec: include_cflags.append("-DHYPRE_BIGINT") - targets.append("INCLUDE_CFLAGS={0}".format(" ".join(include_cflags))) - targets.append("INCLUDE_LFLAGS={0}".format(" ".join(include_lflags))) - targets.append("CC={0}".format(self.spec["mpi"].mpicc)) + targets.append(f"INCLUDE_CFLAGS={' '.join(include_cflags)}") + targets.append(f"INCLUDE_LFLAGS={' '.join(include_lflags)}") + targets.append(f"CC={self.spec['mpi'].mpicc}") return targets diff --git a/var/spack/repos/builtin/packages/amg2023/package.py b/var/spack/repos/builtin/packages/amg2023/package.py index 96b2dc335d74a3..a2e8b676e9a9c9 100644 --- a/var/spack/repos/builtin/packages/amg2023/package.py +++ b/var/spack/repos/builtin/packages/amg2023/package.py @@ -40,7 +40,7 @@ def cmake_args(self): cmake_options = [] cmake_options.append(self.define_from_variant("AMG_WITH_CALIPER", "caliper")) cmake_options.append(self.define_from_variant("AMG_WITH_OMP", "openmp")) - cmake_options.append("-DHYPRE_PREFIX={0}".format(self.spec["hypre"].prefix)) + cmake_options.append(self.define("HYPRE_PREFIX", self.spec["hypre"].prefix)) if self.spec["hypre"].satisfies("+cuda"): cmake_options.append("-DAMG_WITH_CUDA=ON") if self.spec["hypre"].satisfies("+rocm"): diff --git a/var/spack/repos/builtin/packages/amp/package.py b/var/spack/repos/builtin/packages/amp/package.py index 1716a13e57a36a..f0ec4071ce14a4 100644 --- a/var/spack/repos/builtin/packages/amp/package.py +++ b/var/spack/repos/builtin/packages/amp/package.py @@ -117,9 +117,7 @@ def cmake_args(self): ): if "+" + vname in spec: tpl_list.append(vname.upper()) - options.append( - self.define("TPL_{0}_INSTALL_DIR".format(vname.upper()), spec[vname].prefix) - ) + options.append(self.define(f"TPL_{vname.upper()}_INSTALL_DIR", spec[vname].prefix)) if "+netcdf" in spec: tpl_list.append("NETCDF") diff --git a/var/spack/repos/builtin/packages/anicalculator/package.py b/var/spack/repos/builtin/packages/anicalculator/package.py index b14e05a67020fe..9d002975d98485 100644 --- a/var/spack/repos/builtin/packages/anicalculator/package.py +++ b/var/spack/repos/builtin/packages/anicalculator/package.py @@ -19,7 +19,7 @@ class Anicalculator(Package): https://spack.readthedocs.io/en/latest/mirrors.html""" homepage = "https://ani.jgi.doe.gov/html/download.php?" - url = "file://{0}/ANIcalculator_v1.tgz".format(os.getcwd()) + url = f"file://{os.getcwd()}/ANIcalculator_v1.tgz" manual_download = True version("1", sha256="236596a9a204cbcad162fc66be3506b2530b1f48f4f84d9647ccec3ca7483a43") diff --git a/var/spack/repos/builtin/packages/ape/package.py b/var/spack/repos/builtin/packages/ape/package.py index 9468a7f9a69567..39b833ab0e10d6 100644 --- a/var/spack/repos/builtin/packages/ape/package.py +++ b/var/spack/repos/builtin/packages/ape/package.py @@ -23,9 +23,9 @@ def install(self, spec, prefix): args = [] args.extend( [ - "--prefix=%s" % prefix, - "--with-gsl-prefix=%s" % spec["gsl"].prefix, - "--with-libxc-prefix=%s" % spec["libxc"].prefix, + f"--prefix={prefix}", + f"--with-gsl-prefix={spec['gsl'].prefix}", + f"--with-libxc-prefix={spec['libxc'].prefix}", ] ) diff --git a/var/spack/repos/builtin/packages/apr-util/package.py b/var/spack/repos/builtin/packages/apr-util/package.py index 2351c48619f49c..dc0fad53d225d9 100644 --- a/var/spack/repos/builtin/packages/apr-util/package.py +++ b/var/spack/repos/builtin/packages/apr-util/package.py @@ -37,16 +37,16 @@ class AprUtil(AutotoolsPackage): @property def libs(self): return find_libraries( - ["libaprutil-{0}".format(self.version.up_to(1))], root=self.prefix, recursive=True + [f"libaprutil-{self.version.up_to(1)}"], root=self.prefix, recursive=True ) def configure_args(self): spec = self.spec args = [ - "--with-apr={0}".format(spec["apr"].prefix), - "--with-expat={0}".format(spec["expat"].prefix), - "--with-iconv={0}".format(spec["iconv"].prefix), + f"--with-apr={spec['apr'].prefix}", + f"--with-expat={spec['expat'].prefix}", + f"--with-iconv={spec['iconv'].prefix}", # TODO: Add support for the following database managers "--without-ndbm", "--without-berkeley-db", @@ -55,34 +55,30 @@ def configure_args(self): ] if "+crypto" in spec: - args.extend(["--with-crypto", "--with-openssl={0}".format(spec["openssl"].prefix)]) + args.extend(["--with-crypto", f"--with-openssl={spec['openssl'].prefix}"]) else: args.append("--without-crypto") if "+gdbm" in spec: - args.append("--with-gdbm={0}".format(spec["gdbm"].prefix)) + args.append(f"--with-gdbm={spec['gdbm'].prefix}") else: args.append("--without-gdbm") if "+pgsql" in spec: - args.append("--with-pgsql={0}".format(spec["postgresql"].prefix)) + args.append(f"--with-pgsql={spec['postgresql'].prefix}") else: args.append("--without-pgsql") if "+sqlite" in spec: if spec.satisfies("^sqlite@3.0:3"): - args.extend( - ["--with-sqlite3={0}".format(spec["sqlite"].prefix), "--without-sqlite2"] - ) + args.extend([f"--with-sqlite3={spec['sqlite'].prefix}", "--without-sqlite2"]) elif spec.satisfies("^sqlite@2.0:2"): - args.extend( - ["--with-sqlite2={0}".format(spec["sqlite"].prefix), "--without-sqlite3"] - ) + args.extend([f"--with-sqlite2={spec['sqlite'].prefix}", "--without-sqlite3"]) else: args.extend(["--without-sqlite2", "--without-sqlite3"]) if "+odbc" in spec: - args.append("--with-odbc={0}".format(spec["unixodbc"].prefix)) + args.append(f"--with-odbc={spec['unixodbc'].prefix}") else: args.append("--without-odbc") diff --git a/var/spack/repos/builtin/packages/apr/package.py b/var/spack/repos/builtin/packages/apr/package.py index de82ee5817c18a..45de21e3ee02af 100644 --- a/var/spack/repos/builtin/packages/apr/package.py +++ b/var/spack/repos/builtin/packages/apr/package.py @@ -26,5 +26,5 @@ class Apr(AutotoolsPackage): @property def libs(self): return find_libraries( - ["libapr-{0}".format(self.version.up_to(1))], root=self.prefix, recursive=True + [f"libapr-{self.version.up_to(1)}"], root=self.prefix, recursive=True ) diff --git a/var/spack/repos/builtin/packages/aragorn/package.py b/var/spack/repos/builtin/packages/aragorn/package.py index dc55dc52bb84be..8ac7894192f457 100644 --- a/var/spack/repos/builtin/packages/aragorn/package.py +++ b/var/spack/repos/builtin/packages/aragorn/package.py @@ -31,7 +31,7 @@ class Aragorn(Package): # fix checksum error def url_for_version(self, version): - return "http://www.ansikte.se/ARAGORN/Downloads/aragorn{0}.c".format(version) + return f"http://www.ansikte.se/ARAGORN/Downloads/aragorn{version}.c" def install(self, spec, prefix): cc = Executable(spack_cc) diff --git a/var/spack/repos/builtin/packages/archer/package.py b/var/spack/repos/builtin/packages/archer/package.py index 8492eebc96b26c..52011bebd46f26 100644 --- a/var/spack/repos/builtin/packages/archer/package.py +++ b/var/spack/repos/builtin/packages/archer/package.py @@ -38,9 +38,9 @@ def patch(self): def cmake_args(self): return [ - "-DCMAKE_C_COMPILER=clang", - "-DCMAKE_CXX_COMPILER=clang++", - "-DOMP_PREFIX:PATH=%s" % self.spec["llvm-openmp-ompt"].prefix, + self.define("CMAKE_C_COMPILER", "clang"), + self.define("CMAKE_CXX_COMPILER", "clang++"), + self.define("OMP_PREFIX:PATH", self.spec["llvm-openmp-ompt"].prefix), ] @run_after("install") @@ -56,7 +56,7 @@ def test_run_parallel_example(self): raise SkipTest("Parallel test directory does not exist") test_exe = "parallel-simple" - test_src = "{0}.c".format(test_exe) + test_src = f"{test_exe}.c" with working_dir(test_dir): clang = which("clang-archer") clang("-o", test_exe, test_src) diff --git a/var/spack/repos/builtin/packages/argon2/package.py b/var/spack/repos/builtin/packages/argon2/package.py index c9762b6bc978cb..d41ba9761c1366 100644 --- a/var/spack/repos/builtin/packages/argon2/package.py +++ b/var/spack/repos/builtin/packages/argon2/package.py @@ -20,4 +20,4 @@ class Argon2(MakefilePackage): version("20161029", sha256="fe0049728b946b58b94cc6db89b34e2d050c62325d16316a534d2bedd78cd5e7") def install(self, spec, prefix): - make("PREFIX={0}".format(prefix), "install", "LIBRARY_REL=lib") + make(f"PREFIX={prefix}", "install", "LIBRARY_REL=lib") diff --git a/var/spack/repos/builtin/packages/armadillo/package.py b/var/spack/repos/builtin/packages/armadillo/package.py index 9d83de741a8efb..78794086f91b7c 100644 --- a/var/spack/repos/builtin/packages/armadillo/package.py +++ b/var/spack/repos/builtin/packages/armadillo/package.py @@ -66,14 +66,14 @@ def cmake_args(self): return [ # ARPACK support - "-DARPACK_LIBRARY={0}".format(spec["arpack-ng"].libs.joined(";")), + self.define("ARPACK_LIBRARY", spec["arpack-ng"].libs.joined(";")), # BLAS support - "-DBLAS_LIBRARY={0}".format(spec["blas"].libs.joined(";")), + self.define("BLAS_LIBRARY", spec["blas"].libs.joined(";")), # LAPACK support - "-DLAPACK_LIBRARY={0}".format(spec["lapack"].libs.joined(";")), + self.define("LAPACK_LIBRARY", spec["lapack"].libs.joined(";")), # SuperLU support - "-DSuperLU_INCLUDE_DIR={0}".format(spec["superlu"].prefix.include), - "-DSuperLU_LIBRARY={0}".format(spec["superlu"].libs.joined(";")), + self.define("SuperLU_INCLUDE_DIR", spec["superlu"].prefix.include), + self.define("SuperLU_LIBRARY", spec["superlu"].libs.joined(";")), # HDF5 support - "-DDETECT_HDF5={0}".format("ON" if "+hdf5" in spec else "OFF"), + self.define("DETECT_HDF5", "ON" if spec.satisfies("+hdf5") else "OFF"), ] diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py index 92176069c1f19a..c50b90d6d10f45 100644 --- a/var/spack/repos/builtin/packages/arpack-ng/package.py +++ b/var/spack/repos/builtin/packages/arpack-ng/package.py @@ -150,14 +150,14 @@ def configure_args(self): options = ( self.enable_or_disable("mpi") + [ - "--with-blas={0}".format(spec["blas"].libs.ld_flags), - "--with-lapack={0}".format(spec["lapack"].libs.ld_flags), + f"--with-blas={spec['blas'].libs.ld_flags}", + f"--with-lapack={spec['lapack'].libs.ld_flags}", ] + self.enable_or_disable("shared") ) if "+mpi" in spec: - options.append("F77={0}".format(spec["mpi"].mpif77)) + options.append(f"F77={spec['mpi'].mpif77}") return options diff --git a/var/spack/repos/builtin/packages/asdcplib/package.py b/var/spack/repos/builtin/packages/asdcplib/package.py index 7ca3b2f9f1b786..5aec849ee59480 100644 --- a/var/spack/repos/builtin/packages/asdcplib/package.py +++ b/var/spack/repos/builtin/packages/asdcplib/package.py @@ -27,6 +27,6 @@ class Asdcplib(AutotoolsPackage): def configure_args(self): spec = self.spec - args = ["--with-openssl={0}".format(spec["openssl"].prefix)] + args = [f"--with-openssl={spec['openssl'].prefix}"] return args diff --git a/var/spack/repos/builtin/packages/aspa/package.py b/var/spack/repos/builtin/packages/aspa/package.py index 6bfbad1d3926e2..8219a46b004bd3 100644 --- a/var/spack/repos/builtin/packages/aspa/package.py +++ b/var/spack/repos/builtin/packages/aspa/package.py @@ -35,12 +35,12 @@ def build_targets(self): targets = [ "--directory=exec", "--file=Makefile", - "LIBS={0} {1} {2}".format( + "LIBS={} {} {}".format( self.spec["lapack"].libs.ld_flags, self.spec["blas"].libs.ld_flags, self.spec["hdf5"].libs.ld_flags, ), - "CXX={0}".format(self.spec["mpi"].mpicxx), + f"CXX={self.spec['mpi'].mpicxx}", ] return targets diff --git a/var/spack/repos/builtin/packages/aspcud/package.py b/var/spack/repos/builtin/packages/aspcud/package.py index fbaef453e990e4..8233dcaba0e058 100644 --- a/var/spack/repos/builtin/packages/aspcud/package.py +++ b/var/spack/repos/builtin/packages/aspcud/package.py @@ -28,11 +28,9 @@ class Aspcud(CMakePackage): depends_on("clingo") def cmake_args(self): - spec = self.spec - gringo_path = join_path(spec["clingo"].prefix.bin, "gringo") - clasp_path = join_path(spec["clingo"].prefix.bin, "clasp") - args = [ - "-DASPCUD_GRINGO_PATH={0}".format(gringo_path), - "-DASPCUD_CLASP_PATH={0}".format(clasp_path), + gringo_path = join_path(self.spec["clingo"].prefix.bin, "gringo") + clasp_path = join_path(self.spec["clingo"].prefix.bin, "clasp") + return [ + self.define("ASPCUD_GRINGO_PATH", gringo_path), + self.define("ASPCUD_CLASP_PATH", clasp_path), ] - return args diff --git a/var/spack/repos/builtin/packages/aspera-cli/package.py b/var/spack/repos/builtin/packages/aspera-cli/package.py index afa63b4c33dc8f..91aa1e19e47818 100644 --- a/var/spack/repos/builtin/packages/aspera-cli/package.py +++ b/var/spack/repos/builtin/packages/aspera-cli/package.py @@ -29,7 +29,7 @@ def install(self, spec, prefix): # Update destination path filter_file( "INSTALL_DIR=~/.aspera", - "INSTALL_DIR=%s" % prefix, + f"INSTALL_DIR={prefix}", runfile, string=True, stop_at="__ARCHIVE_FOLLOWS__", diff --git a/var/spack/repos/builtin/packages/astral/package.py b/var/spack/repos/builtin/packages/astral/package.py index 3afa3691ba0b0f..af196ab9c391c4 100644 --- a/var/spack/repos/builtin/packages/astral/package.py +++ b/var/spack/repos/builtin/packages/astral/package.py @@ -31,7 +31,7 @@ def install(self, spec, prefix): make() mkdirp(prefix.bin) install_tree("lib", prefix.tools.lib) - jar_file = "astral.{v}.jar".format(v=self.version) + jar_file = f"astral.{self.version}.jar" install(jar_file, prefix.tools) script_sh = join_path(os.path.dirname(__file__), "astral.sh") diff --git a/var/spack/repos/builtin/packages/astyle/package.py b/var/spack/repos/builtin/packages/astyle/package.py index 951661004b724a..ef4fe29378ffa0 100644 --- a/var/spack/repos/builtin/packages/astyle/package.py +++ b/var/spack/repos/builtin/packages/astyle/package.py @@ -30,11 +30,11 @@ def build_directory(self): def edit(self, spec, prefix): makefile = join_path(self.build_directory, "Makefile") - filter_file(r"^CXX\s*=.*", "CXX=%s" % spack_cxx, makefile) + filter_file(r"^CXX\s*=.*", f"CXX={spack_cxx}", makefile) # If the group is not a user account, the installation will fail, # so remove the -o $ (USER) -g $ (USER) parameter. filter_file(r"^INSTALL=.*", "INSTALL=install", makefile) @property def install_targets(self): - return ["install", "prefix={0}".format(self.prefix)] + return ["install", f"prefix={self.prefix}"] diff --git a/var/spack/repos/builtin/packages/at-spi2-atk/package.py b/var/spack/repos/builtin/packages/at-spi2-atk/package.py index 27e875f9f65c30..6e2f492112ce25 100644 --- a/var/spack/repos/builtin/packages/at-spi2-atk/package.py +++ b/var/spack/repos/builtin/packages/at-spi2-atk/package.py @@ -27,4 +27,4 @@ class AtSpi2Atk(MesonPackage): def url_for_version(self, version): """Handle gnome's version-based custom URLs.""" url = "http://ftp.gnome.org/pub/gnome/sources/at-spi2-atk" - return url + "/%s/at-spi2-atk-%s.tar.xz" % (version.up_to(2), version) + return url + f"/{version.up_to(2)}/at-spi2-atk-{version}.tar.xz" diff --git a/var/spack/repos/builtin/packages/at-spi2-core/package.py b/var/spack/repos/builtin/packages/at-spi2-core/package.py index f1bf5a61c86fca..ec8cbd5e23c68b 100644 --- a/var/spack/repos/builtin/packages/at-spi2-core/package.py +++ b/var/spack/repos/builtin/packages/at-spi2-core/package.py @@ -45,7 +45,7 @@ def patch(self): def url_for_version(self, version): """Handle gnome's version-based custom URLs.""" url = "http://ftp.gnome.org/pub/gnome/sources/at-spi2-core" - return url + "/%s/at-spi2-core-%s.tar.xz" % (version.up_to(2), version) + return url + f"/{version.up_to(2)}/at-spi2-core-{version}.tar.xz" def setup_run_environment(self, env): env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0")) diff --git a/var/spack/repos/builtin/packages/atk/package.py b/var/spack/repos/builtin/packages/atk/package.py index 41dec1c587080d..52849669ecca01 100644 --- a/var/spack/repos/builtin/packages/atk/package.py +++ b/var/spack/repos/builtin/packages/atk/package.py @@ -43,7 +43,7 @@ class Atk(Package): def url_for_version(self, version): """Handle gnome's version-based custom URLs.""" url = "http://ftp.gnome.org/pub/gnome/sources/atk" - return url + "/%s/atk-%s.tar.xz" % (version.up_to(2), version) + return url + f"/{version.up_to(2)}/atk-{version}.tar.xz" def setup_run_environment(self, env): env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0")) @@ -64,7 +64,7 @@ def install(self, spec, prefix): @when("@:2.27") def install(self, spec, prefix): - configure("--prefix={0}".format(prefix)) + configure(f"--prefix={prefix}") make() if self.run_tests: make("check") diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index d23a16cb1ba16b..ecc6379b570c62 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -109,11 +109,11 @@ def install(self, spec, prefix): # Lapack resource to provide full lapack build. Note that # ATLAS only provides a few LAPACK routines natively. - options.append("--with-netlib-lapack-tarfile=%s" % self.stage[1].archive_file) + options.append(f"--with-netlib-lapack-tarfile={self.stage[1].archive_file}") with working_dir("spack-build", create=True): configure = Executable("../configure") - configure("--prefix=%s" % prefix, *options) + configure(f"--prefix={prefix}", *options) make() make("check") make("ptcheck") @@ -147,7 +147,7 @@ def install_test(self): source_file = join_path(os.path.dirname(self.module.__file__), "test_cblas_dgemm.c") blessed_file = join_path(os.path.dirname(self.module.__file__), "test_cblas_dgemm.output") - include_flags = ["-I%s" % self.spec.prefix.include] + include_flags = [f"-I{self.spec.prefix.include}"] link_flags = self.spec["atlas"].libs.ld_flags.split() output = compile_c_and_execute(source_file, include_flags, link_flags) diff --git a/var/spack/repos/builtin/packages/atom-dft/package.py b/var/spack/repos/builtin/packages/atom-dft/package.py index 9015067428fb15..3f8c5e7756303a 100644 --- a/var/spack/repos/builtin/packages/atom-dft/package.py +++ b/var/spack/repos/builtin/packages/atom-dft/package.py @@ -24,8 +24,8 @@ def edit(self, spec, prefix): @property def build_targets(self): return [ - "XMLF90_ROOT=%s" % self.spec["xmlf90"].prefix, - "GRIDXC_ROOT=%s" % self.spec["libgridxc"].prefix, + f"XMLF90_ROOT={self.spec['xmlf90'].prefix}", + f"GRIDXC_ROOT={self.spec['libgridxc'].prefix}", "FC=fc", ] diff --git a/var/spack/repos/builtin/packages/atompaw/package.py b/var/spack/repos/builtin/packages/atompaw/package.py index 7cc4b4d417fc0f..f0ea750583910a 100644 --- a/var/spack/repos/builtin/packages/atompaw/package.py +++ b/var/spack/repos/builtin/packages/atompaw/package.py @@ -49,8 +49,8 @@ def configure_args(self): spec = self.spec linalg = spec["lapack"].libs + spec["blas"].libs return [ - "--with-linalg-libs=%s" % linalg.ld_flags, + f"--with-linalg-libs={linalg.ld_flags}", "--enable-libxc", - "--with-libxc-incs=-I%s" % spec["libxc"].prefix.include, - "--with-libxc-libs=-L%s -lxcf90 -lxc" % spec["libxc"].prefix.lib, + f"--with-libxc-incs=-I{spec['libxc'].prefix.include}", + f"--with-libxc-libs=-L{spec['libxc'].prefix.lib} -lxcf90 -lxc", ] diff --git a/var/spack/repos/builtin/packages/audacious/package.py b/var/spack/repos/builtin/packages/audacious/package.py index 4cce09e0fcf23f..1d6634780b5870 100644 --- a/var/spack/repos/builtin/packages/audacious/package.py +++ b/var/spack/repos/builtin/packages/audacious/package.py @@ -28,7 +28,7 @@ class Audacious(AutotoolsPackage): def patch(self): search_path_args = " ".join(self.autoreconf_search_path_args) - search_path_str = "-I m4 {0}".format(search_path_args) + search_path_str = f"-I m4 {search_path_args}" filter_file("-I m4", search_path_str, "autogen.sh") def autoreconf(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/augustus/package.py b/var/spack/repos/builtin/packages/augustus/package.py index e0e002d6fcef81..2c5cfa5c0d531e 100644 --- a/var/spack/repos/builtin/packages/augustus/package.py +++ b/var/spack/repos/builtin/packages/augustus/package.py @@ -69,7 +69,7 @@ def edit(self, spec, prefix): filter_file("g++", spack_cxx, "makefile", string=True) filter_file( "g++ -I/usr/include/boost", - "{0} -I{1}".format(spack_cxx, self.spec["boost"].prefix.include), + f"{spack_cxx} -I{self.spec['boost'].prefix.include}", "src/subdir.mk", string=True, ) @@ -101,7 +101,7 @@ def edit(self, spec, prefix): with working_dir(join_path("auxprogs", "filterBam", "src")): makefile = FileFilter("Makefile") - makefile.filter("BAMTOOLS = .*", "BAMTOOLS = {0}".format(bamtools)) + makefile.filter("BAMTOOLS = .*", f"BAMTOOLS = {bamtools}") makefile.filter("INCLUDES = *", "INCLUDES = -I$(BAMTOOLS)/include/bamtools ") if "bamtools@2.5:" in spec: makefile.filter( @@ -113,32 +113,30 @@ def edit(self, spec, prefix): ) with working_dir(join_path("auxprogs", "bam2hints")): makefile = FileFilter("Makefile") - makefile.filter("/usr/include/bamtools", "{0}/include/bamtools".format(bamtools)) + makefile.filter("/usr/include/bamtools", f"{bamtools}/include/bamtools") if "bamtools@2.5:" in spec: makefile.filter( - "LIBS = -lbamtools -lz", - "LIBS = {0}/lib64" "/libbamtools.a -lz".format(bamtools), + "LIBS = -lbamtools -lz", f"LIBS = {bamtools}/lib64/libbamtools.a -lz" ) if "bamtools@:2.4" in spec: makefile.filter( - "LIBS = -lbamtools -lz", - "LIBS = {0}/lib/bamtools" "/libbamtools.a -lz".format(bamtools), + "LIBS = -lbamtools -lz", f"LIBS = {bamtools}/lib/bamtools/libbamtools.a -lz" ) if self.version < Version("3.4.0"): with working_dir(join_path("auxprogs", "bam2wig")): makefile = FileFilter("Makefile") # point tools to spack installations - makefile.filter("BCFTOOLS=.*$", "BCFTOOLS={0}/include".format(bcftools)) - makefile.filter("SAMTOOLS=.*$", "SAMTOOLS={0}/include".format(samtools)) - makefile.filter("HTSLIB=.*$", "HTSLIB={0}/include".format(htslib)) + makefile.filter("BCFTOOLS=.*$", f"BCFTOOLS={bcftools}/include") + makefile.filter("SAMTOOLS=.*$", f"SAMTOOLS={samtools}/include") + makefile.filter("HTSLIB=.*$", f"HTSLIB={htslib}/include") # fix bad linking dirs makefile.filter("$(SAMTOOLS)/libbam.a", "$(SAMTOOLS)/../lib/libbam.a", string=True) makefile.filter("$(HTSLIB)/libhts.a", "$(HTSLIB)/../lib/libhts.a", string=True) with working_dir(join_path("auxprogs", "checkTargetSortedness")): makefile = FileFilter("Makefile") - makefile.filter("SAMTOOLS.*=.*$", "SAMTOOLS={0}/include".format(samtools)) + makefile.filter("SAMTOOLS.*=.*$", f"SAMTOOLS={samtools}/include") makefile.filter("LIBS=-lbam", "LIBS=$(SAMTOOLS)/../lib/libbam.a", string=True) else: mysql = self.spec["mysql-client"].prefix @@ -147,12 +145,12 @@ def edit(self, spec, prefix): with working_dir("src"): makefile = FileFilter("Makefile") - makefile.filter(r"/usr/include/mysql\+\+", "{0}/include/mysql++".format(mysqlpp)) + makefile.filter(r"/usr/include/mysql\+\+", f"{mysqlpp}/include/mysql++") if "^mariadb-c-client" in spec: - makefile.filter("/usr/include/mysql", "{0}/include/mariadb".format(mysql)) + makefile.filter("/usr/include/mysql", f"{mysql}/include/mariadb") else: - makefile.filter("/usr/include/mysql", "{0}/include/mysql".format(mysql)) - makefile.filter("/usr/include/lpsolve", "{0}/include/lpsolve".format(lpsolve)) + makefile.filter("/usr/include/mysql", f"{mysql}/include/mysql") + makefile.filter("/usr/include/lpsolve", f"{lpsolve}/include/lpsolve") def install(self, spec, prefix): install_tree("bin", join_path(self.spec.prefix, "bin")) @@ -163,12 +161,12 @@ def install(self, spec, prefix): def filter_sbang(self): with working_dir(self.prefix.scripts): pattern = "^#!.*" - repl = "#!{0}".format(self.spec["perl"].command.path) + repl = f"#!{self.spec['perl'].command.path}" files = glob.glob("*.pl") for file in files: filter_file(pattern, repl, *files, backup=False) - repl = "#!{0}".format(self.spec["python"].command.path) + repl = f"#!{self.spec['python'].command.path}" files = glob.glob("*.py") for file in files: filter_file(pattern, repl, *files, backup=False) diff --git a/var/spack/repos/builtin/packages/authd/package.py b/var/spack/repos/builtin/packages/authd/package.py index b2cee813c1eae0..dbb290839bc916 100644 --- a/var/spack/repos/builtin/packages/authd/package.py +++ b/var/spack/repos/builtin/packages/authd/package.py @@ -20,4 +20,4 @@ def setup_run_environment(self, env): env.prepend_path("PATH", self.prefix.sbin) def install(self, spec, prefix): - make("prefix={0}".format(prefix), "install") + make(f"prefix={prefix}", "install") diff --git a/var/spack/repos/builtin/packages/autodock-vina/package.py b/var/spack/repos/builtin/packages/autodock-vina/package.py index dadf50ea054ab3..8ca01804d8c0dc 100644 --- a/var/spack/repos/builtin/packages/autodock-vina/package.py +++ b/var/spack/repos/builtin/packages/autodock-vina/package.py @@ -44,10 +44,10 @@ def edit(self, spec, prefix): with working_dir(self.build_directory): makefile = FileFilter("Makefile") makefile.filter( - "BOOST_INCLUDE = .*", "BOOST_INCLUDE = %s" % self.spec["boost"].prefix.include + "BOOST_INCLUDE = .*", f"BOOST_INCLUDE = {self.spec['boost'].prefix.include}" ) makefile.filter("C_PLATFORM=.*", "C_PLATFORM=-pthread") - makefile.filter("GPP=.*", "GPP=%s" % spack_cxx) + makefile.filter("GPP=.*", f"GPP={spack_cxx}") def build(self, spec, prefix): with working_dir(self.build_directory): diff --git a/var/spack/repos/builtin/packages/autogen/package.py b/var/spack/repos/builtin/packages/autogen/package.py index 2ecc434106eec3..54b088beb599c6 100644 --- a/var/spack/repos/builtin/packages/autogen/package.py +++ b/var/spack/repos/builtin/packages/autogen/package.py @@ -36,7 +36,7 @@ def configure_args(self): ] if "+xml" in spec: - args.append("--with-libxml2={0}".format(spec["libxml2"].prefix)) + args.append(f"--with-libxml2={spec['libxml2'].prefix}") else: args.append("--without-libxml2") diff --git a/var/spack/repos/builtin/packages/avizo/package.py b/var/spack/repos/builtin/packages/avizo/package.py index 42c201e780ed0a..43364919cd757f 100644 --- a/var/spack/repos/builtin/packages/avizo/package.py +++ b/var/spack/repos/builtin/packages/avizo/package.py @@ -24,25 +24,25 @@ class Avizo(Package): version( "2020.1", sha256="9321aaa276567eebf116e268353c33a4c930d768d22793f921338e1d8cefe991", - url="file://{0}/Avizo-20201-Linux64-gcc48.bin".format(os.getcwd()), + url=f"file://{os.getcwd()}/Avizo-20201-Linux64-gcc48.bin", expand=False, ) version( "2019.4", sha256="a637720535bcbe254ab56368004a9544c64ec36186373fa24f26cee279685248", - url="file://{0}/Avizo-20194-Linux64-gcc48.bin".format(os.getcwd()), + url=f"file://{os.getcwd()}/Avizo-20194-Linux64-gcc48.bin", expand=False, ) version( "2019.3", sha256="be109df81e2f7238f234862367841dae05e76cc62218c1f36b1d9bc9514ce5f7", - url="file://{0}/Avizo-20193-Linux64-gcc48.bin".format(os.getcwd()), + url=f"file://{os.getcwd()}/Avizo-20193-Linux64-gcc48.bin", expand=False, ) version( "9.7.0", sha256="9c9b9e81957387f4218df0c5adbb80717e9ae80ab3ca6ff8da523f7f499dcc5b", - url="file://{0}/Avizo-970-Linux64-gcc44.bin".format(os.getcwd()), + url=f"file://{os.getcwd()}/Avizo-970-Linux64-gcc44.bin", expand=False, ) @@ -67,15 +67,11 @@ def setup_run_environment(self, env): def install(self, spec, prefix): ver = self.version.joined sh = which("sh") - sh( - "Avizo-{0}-Linux64-gcc{1}.bin".format(ver, self.gcc_ver[self.version.string]), - "--noexec", - "--keep", - ) + sh(f"Avizo-{ver}-Linux64-gcc{self.gcc_ver[self.version.string]}.bin", "--noexec", "--keep") with working_dir("Avizo"): avizo_tar = tarfile.open( - name="Avizo-{0}-Linux64-gcc{1}.tar.bz2".format( + name="Avizo-{}-Linux64-gcc{}.tar.bz2".format( self.version, self.gcc_ver[self.version.string] ) ) From 2d8897aff5a6c9cd7137e20c0d96d7684eaa4f23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torbj=C3=B6rn=20L=C3=B6nnemark?= Date: Thu, 26 Oct 2023 21:11:43 +0200 Subject: [PATCH 334/408] curl: Fix librtmp variant (#40713) * rtmpdump: New package * curl: Fix librtmp variant Add the previously missing dependency required for rtmp support. The variant has been broken since its addition in PR #25166. Fixes one of the two issues reported in #26887. --- .../repos/builtin/packages/curl/package.py | 1 + .../packages/rtmpdump/missing-include.patch | 23 ++ .../builtin/packages/rtmpdump/package.py | 38 +++ .../rtmpdump/rtmpdump-fix-chunk-size.patch | 48 ++++ .../rtmpdump/rtmpdump-openssl-1.1-v2.patch | 248 ++++++++++++++++++ .../rtmpdump-swf_vertification_type_2.patch | 14 + ...dump-swf_vertification_type_2_part_2.patch | 22 ++ 7 files changed, 394 insertions(+) create mode 100644 var/spack/repos/builtin/packages/rtmpdump/missing-include.patch create mode 100644 var/spack/repos/builtin/packages/rtmpdump/package.py create mode 100644 var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch create mode 100644 var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch create mode 100644 var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch create mode 100644 var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py index 46894046df60a4..362b559ab63aa0 100644 --- a/var/spack/repos/builtin/packages/curl/package.py +++ b/var/spack/repos/builtin/packages/curl/package.py @@ -305,6 +305,7 @@ class Curl(NMakePackage, AutotoolsPackage): depends_on("libssh2", when="+libssh2") depends_on("libssh", when="+libssh") depends_on("krb5", when="+gssapi") + depends_on("rtmpdump", when="+librtmp") # https://github.com/curl/curl/pull/9054 patch("easy-lock-sched-header.patch", when="@7.84.0") diff --git a/var/spack/repos/builtin/packages/rtmpdump/missing-include.patch b/var/spack/repos/builtin/packages/rtmpdump/missing-include.patch new file mode 100644 index 00000000000000..4325ed07381f54 --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/missing-include.patch @@ -0,0 +1,23 @@ +https://bugs.gentoo.org/828082 +--- a/librtmp/rtmp.c ++++ b/librtmp/rtmp.c +@@ -28,6 +28,7 @@ + #include + #include + #include ++#include + + #include "rtmp_sys.h" + #include "log.h" +diff --git a/librtmp/hashswf.c b/librtmp/hashswf.c +index 32b2eed..e3669e3 100644 +--- a/librtmp/hashswf.c ++++ b/librtmp/hashswf.c +@@ -25,6 +25,7 @@ + #include + #include + #include ++#include + + #include "rtmp_sys.h" + #include "log.h" diff --git a/var/spack/repos/builtin/packages/rtmpdump/package.py b/var/spack/repos/builtin/packages/rtmpdump/package.py new file mode 100644 index 00000000000000..a868e6e3d0d8c9 --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/package.py @@ -0,0 +1,38 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Rtmpdump(MakefilePackage): + """rtmpdump is a toolkit for RTMP streams.""" + + homepage = "https://rtmpdump.mplayerhq.hu/" + git = "https://git.ffmpeg.org/rtmpdump.git" + + maintainers("tobbez") + + license("GPL-2.0-or-later") + + version("2021-02-19", commit="f1b83c10d8beb43fcc70a6e88cf4325499f25857") + + variant("tls", default="openssl", description="TLS backend", values=("gnutls", "openssl")) + + depends_on("openssl@:3", when="tls=openssl") + depends_on("gnutls", when="tls=gnutls") + depends_on("zlib-api") + + patch("missing-include.patch") + patch("rtmpdump-fix-chunk-size.patch") + patch("rtmpdump-openssl-1.1-v2.patch") + patch("rtmpdump-swf_vertification_type_2.patch") + patch("rtmpdump-swf_vertification_type_2_part_2.patch") + + @property + def build_targets(self): + return [f"CRYPTO={self.spec.variants['tls'].value.upper()}"] + + def install(self, spec, prefix): + make("install", f"prefix={prefix}", "sbindir=$(bindir)") diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch new file mode 100644 index 00000000000000..1c6cfdc6261075 --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch @@ -0,0 +1,48 @@ +https://git.alpinelinux.org/aports/commit/main/rtmpdump/fix-chunk-size.patch?id=bf39fb1177ee77eee6c214a7393cc0054958ce08 +https://git.alpinelinux.org/aports/commit/main/rtmpdump/fix-chunk-size.patch?id=69bc162319b12e9b6c6d3ea345dbf7c218753594 +diff --git a/librtmp/rtmp.c b/librtmp/rtmp.c +index a2863b0..ac1b3be 100644 +--- a/librtmp/rtmp.c ++++ b/librtmp/rtmp.c +@@ -2077,6 +2077,29 @@ RTMP_SendClientBW(RTMP *r) + } + + static int ++SendClientChunkSize(RTMP *r, int chunkSize) ++{ ++ RTMPPacket packet; ++ char pbuf[256], *pend = pbuf + sizeof(pbuf); ++ int ret; ++ ++ packet.m_nChannel = 0x02; /* control channel (invoke) */ ++ packet.m_headerType = RTMP_PACKET_SIZE_LARGE; ++ packet.m_packetType = RTMP_PACKET_TYPE_CHUNK_SIZE; ++ packet.m_nTimeStamp = 0; ++ packet.m_nInfoField2 = 0; ++ packet.m_hasAbsTimestamp = 0; ++ packet.m_body = pbuf + RTMP_MAX_HEADER_SIZE; ++ ++ packet.m_nBodySize = 4; ++ ++ AMF_EncodeInt32(packet.m_body, pend, chunkSize); ++ ret = RTMP_SendPacket(r, &packet, FALSE); ++ r->m_outChunkSize = chunkSize; ++ return ret; ++} ++ ++static int + SendBytesReceived(RTMP *r) + { + RTMPPacket packet; +@@ -3349,6 +3372,11 @@ HandleChangeChunkSize(RTMP *r, const RTMPPacket *packet) + r->m_inChunkSize = AMF_DecodeInt32(packet->m_body); + RTMP_Log(RTMP_LOGDEBUG, "%s, received: chunk size change to %d", __FUNCTION__, + r->m_inChunkSize); ++ if (r->Link.protocol & RTMP_FEATURE_WRITE) ++ { ++ RTMP_Log(RTMP_LOGDEBUG, "%s, updating outChunkSize too", __FUNCTION__); ++ SendClientChunkSize(r, r->m_inChunkSize); ++ } + } + } + diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch new file mode 100644 index 00000000000000..146243bd111188 --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch @@ -0,0 +1,248 @@ +https://raw.githubusercontent.com/xbmc/inputstream.rtmp/master/depends/common/librtmp/0003-openssl-1.1.patch +See also https://github.com/xbmc/inputstream.rtmp/pull/46 +--- a/librtmp/dh.h ++++ b/librtmp/dh.h +@@ -253,20 +253,42 @@ + if (!dh) + goto failed; + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + MP_new(dh->g); + + if (!dh->g) + goto failed; ++#else ++ BIGNUM *g = NULL; ++ MP_new(g); ++ if (!g) ++ goto failed; ++#endif + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + MP_gethex(dh->p, P1024, res); /* prime P1024, see dhgroups.h */ ++#else ++ BIGNUM* p = NULL; ++ DH_get0_pqg(dh, (BIGNUM const**)&p, NULL, NULL); ++ MP_gethex(p, P1024, res); /* prime P1024, see dhgroups.h */ ++#endif + if (!res) + { + goto failed; + } + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + MP_set_w(dh->g, 2); /* base 2 */ ++#else ++ MP_set_w(g, 2); /* base 2 */ ++ DH_set0_pqg(dh, p, NULL, g); ++#endif + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + dh->length = nKeyBits; ++#else ++ DH_set_length(dh, nKeyBits); ++#endif + return dh; + + failed: +@@ -293,12 +315,24 @@ + MP_gethex(q1, Q1024, res); + assert(res); + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + res = isValidPublicKey(dh->pub_key, dh->p, q1); ++#else ++ BIGNUM const* pub_key = NULL; ++ BIGNUM const* p = NULL; ++ DH_get0_key(dh, &pub_key, NULL); ++ DH_get0_pqg(dh, &p, NULL, NULL); ++ res = isValidPublicKey((BIGNUM*)pub_key, (BIGNUM*)p, q1); ++#endif + if (!res) + { ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + MP_free(dh->pub_key); + MP_free(dh->priv_key); + dh->pub_key = dh->priv_key = 0; ++#else ++ DH_free(dh); ++#endif + } + + MP_free(q1); +@@ -314,15 +348,29 @@ + DHGetPublicKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen) + { + int len; ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + if (!dh || !dh->pub_key) ++#else ++ BIGNUM const* pub_key = NULL; ++ DH_get0_key(dh, &pub_key, NULL); ++ if (!dh || !pub_key) ++#endif + return 0; + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + len = MP_bytes(dh->pub_key); ++#else ++ len = MP_bytes(pub_key); ++#endif + if (len <= 0 || len > (int) nPubkeyLen) + return 0; + + memset(pubkey, 0, nPubkeyLen); ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + MP_setbin(dh->pub_key, pubkey + (nPubkeyLen - len), len); ++#else ++ MP_setbin(pub_key, pubkey + (nPubkeyLen - len), len); ++#endif + return 1; + } + +@@ -364,7 +412,13 @@ + MP_gethex(q1, Q1024, len); + assert(len); + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + if (isValidPublicKey(pubkeyBn, dh->p, q1)) ++#else ++ BIGNUM const* p = NULL; ++ DH_get0_pqg(dh, &p, NULL, NULL); ++ if (isValidPublicKey(pubkeyBn, (BIGNUM*)p, q1)) ++#endif + res = MDH_compute_key(secret, nPubkeyLen, pubkeyBn, dh); + else + res = -1; +--- a/librtmp/handshake.h ++++ b/librtmp/handshake.h +@@ -31,9 +31,9 @@ + #define SHA256_DIGEST_LENGTH 32 + #endif + #define HMAC_CTX sha2_context +-#define HMAC_setup(ctx, key, len) sha2_hmac_starts(&ctx, (unsigned char *)key, len, 0) +-#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(&ctx, buf, len) +-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(&ctx, dig) ++#define HMAC_setup(ctx, key, len) sha2_hmac_starts(ctx, (unsigned char *)key, len, 0) ++#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(ctx, buf, len) ++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(ctx, dig) + + typedef arc4_context * RC4_handle; + #define RC4_alloc(h) *h = malloc(sizeof(arc4_context)) +@@ -50,9 +50,9 @@ + #endif + #undef HMAC_CTX + #define HMAC_CTX struct hmac_sha256_ctx +-#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(&ctx, len, key) +-#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(&ctx, len, buf) +-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(&ctx, SHA256_DIGEST_LENGTH, dig) ++#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(ctx, len, key) ++#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(ctx, len, buf) ++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(ctx, SHA256_DIGEST_LENGTH, dig) + #define HMAC_close(ctx) + + typedef struct arcfour_ctx* RC4_handle; +@@ -64,14 +64,23 @@ + + #else /* USE_OPENSSL */ + #include ++#include + #include + #include + #if OPENSSL_VERSION_NUMBER < 0x0090800 || !defined(SHA256_DIGEST_LENGTH) + #error Your OpenSSL is too old, need 0.9.8 or newer with SHA256 + #endif +-#define HMAC_setup(ctx, key, len) HMAC_CTX_init(&ctx); HMAC_Init_ex(&ctx, key, len, EVP_sha256(), 0) +-#define HMAC_crunch(ctx, buf, len) HMAC_Update(&ctx, buf, len) +-#define HMAC_finish(ctx, dig, dlen) HMAC_Final(&ctx, dig, &dlen); HMAC_CTX_cleanup(&ctx) ++#if OPENSSL_VERSION_NUMBER < 0x10100000L ++#define HMAC_setup(ctx, key, len) HMAC_CTX_init(ctx); HMAC_Init_ex(ctx, key, len, EVP_sha256(), 0) ++#else ++#define HMAC_setup(ctx, key, len) ctx = HMAC_CTX_new(); HMAC_CTX_reset(ctx); HMAC_Init_ex(ctx, key, len, EVP_sha256(), 0) ++#endif ++#define HMAC_crunch(ctx, buf, len) HMAC_Update(ctx, buf, len) ++#if OPENSSL_VERSION_NUMBER < 0x10100000L ++#define HMAC_finish(ctx, dig, dlen) HMAC_Final(ctx, dig, &dlen); HMAC_CTX_cleanup(ctx) ++#else ++#define HMAC_finish(ctx, dig, dlen) HMAC_Final(ctx, dig, &dlen); HMAC_CTX_free(ctx) ++#endif + + typedef RC4_KEY * RC4_handle; + #define RC4_alloc(h) *h = malloc(sizeof(RC4_KEY)) +@@ -117,7 +126,7 @@ + { + uint8_t digest[SHA256_DIGEST_LENGTH]; + unsigned int digestLen = 0; +- HMAC_CTX ctx; ++ HMAC_CTX* ctx = NULL; + + RC4_alloc(rc4keyIn); + RC4_alloc(rc4keyOut); +@@ -266,7 +275,7 @@ + size_t keylen, uint8_t *digest) + { + unsigned int digestLen; +- HMAC_CTX ctx; ++ HMAC_CTX* ctx = NULL; + + HMAC_setup(ctx, key, keylen); + HMAC_crunch(ctx, message, messageLen); +--- a/librtmp/hashswf.c ++++ b/librtmp/hashswf.c +@@ -37,9 +37,9 @@ + #define SHA256_DIGEST_LENGTH 32 + #endif + #define HMAC_CTX sha2_context +-#define HMAC_setup(ctx, key, len) sha2_hmac_starts(&ctx, (unsigned char *)key, len, 0) +-#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(&ctx, buf, len) +-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(&ctx, dig) ++#define HMAC_setup(ctx, key, len) sha2_hmac_starts(ctx, (unsigned char *)key, len, 0) ++#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(ctx, buf, len) ++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(ctx, dig) + #define HMAC_close(ctx) + #elif defined(USE_GNUTLS) + #include +@@ -48,19 +48,27 @@ + #endif + #undef HMAC_CTX + #define HMAC_CTX struct hmac_sha256_ctx +-#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(&ctx, len, key) +-#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(&ctx, len, buf) +-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(&ctx, SHA256_DIGEST_LENGTH, dig) ++#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(ctx, len, key) ++#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(ctx, len, buf) ++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(ctx, SHA256_DIGEST_LENGTH, dig) + #define HMAC_close(ctx) + #else /* USE_OPENSSL */ + #include + #include + #include + #include +-#define HMAC_setup(ctx, key, len) HMAC_CTX_init(&ctx); HMAC_Init_ex(&ctx, (unsigned char *)key, len, EVP_sha256(), 0) +-#define HMAC_crunch(ctx, buf, len) HMAC_Update(&ctx, (unsigned char *)buf, len) +-#define HMAC_finish(ctx, dig, dlen) HMAC_Final(&ctx, (unsigned char *)dig, &dlen); +-#define HMAC_close(ctx) HMAC_CTX_cleanup(&ctx) ++#if OPENSSL_VERSION_NUMBER < 0x10100000L ++#define HMAC_setup(ctx, key, len) HMAC_CTX_init(ctx); HMAC_Init_ex(ctx, (unsigned char *)key, len, EVP_sha256(), 0) ++#else ++#define HMAC_setup(ctx, key, len) ctx = HMAC_CTX_new(); HMAC_CTX_reset(ctx); HMAC_Init_ex(ctx, key, len, EVP_sha256(), 0) ++#endif ++#define HMAC_crunch(ctx, buf, len) HMAC_Update(ctx, (unsigned char *)buf, len) ++#define HMAC_finish(ctx, dig, dlen) HMAC_Final(ctx, (unsigned char *)dig, &dlen); ++#if OPENSSL_VERSION_NUMBER < 0x10100000L ++#define HMAC_close(ctx) HMAC_CTX_cleanup(ctx) ++#else ++#define HMAC_close(ctx) HMAC_CTX_reset(ctx); HMAC_CTX_free(ctx) ++#endif + #endif + + extern void RTMP_TLS_Init(); +@@ -289,7 +297,7 @@ + struct info + { + z_stream *zs; +- HMAC_CTX ctx; ++ HMAC_CTX *ctx; + int first; + int zlib; + int size; diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch new file mode 100644 index 00000000000000..cc7637d84943af --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch @@ -0,0 +1,14 @@ +https://bugs.gentoo.org/669574 +diff --git a/librtmp/rtmp.c b/librtmp/rtmp.c +index 5311a8a..79fefae 100644 +--- a/librtmp/rtmp.c ++++ b/librtmp/rtmp.c +@@ -2854,7 +2854,7 @@ HandleCtrl(RTMP *r, const RTMPPacket *packet) + if (nType == 0x1A) + { + RTMP_Log(RTMP_LOGDEBUG, "%s, SWFVerification ping received: ", __FUNCTION__); +- if (packet->m_nBodySize > 2 && packet->m_body[2] > 0x01) ++ if (packet->m_nBodySize > 2 && packet->m_body[2] > 0x02) + { + RTMP_Log(RTMP_LOGERROR, + "%s: SWFVerification Type %d request not supported! Patches welcome...", diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch new file mode 100644 index 00000000000000..ade0d9baa79a46 --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch @@ -0,0 +1,22 @@ +https://bugs.gentoo.org/669574 +diff --git a/librtmp/rtmp.c b/librtmp/rtmp.c +index df2cb27..b72dc64 100644 +--- a/librtmp/rtmp.c ++++ b/librtmp/rtmp.c +@@ -2857,14 +2857,14 @@ HandleCtrl(RTMP *r, const RTMPPacket *packet) + if (packet->m_nBodySize > 2 && packet->m_body[2] > 0x01) + { + RTMP_Log(RTMP_LOGERROR, +- "%s: SWFVerification Type %d request not supported! Patches welcome...", ++ "%s: SWFVerification Type %d request not supported, attempting to use SWFVerification Type 1! Patches welcome...", + __FUNCTION__, packet->m_body[2]); + } + #ifdef CRYPTO + /*RTMP_LogHex(packet.m_body, packet.m_nBodySize); */ + + /* respond with HMAC SHA256 of decompressed SWF, key is the 30byte player key, also the last 30 bytes of the server handshake are applied */ +- else if (r->Link.SWFSize) ++ if (r->Link.SWFSize) + { + RTMP_SendCtrl(r, 0x1B, 0, 0); + } From 396c6a9a5a7e4a91f4a47fbe9005322fa3631457 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 26 Oct 2023 15:25:56 -0500 Subject: [PATCH 335/408] PythonPackage: allow archive_files to be overridden (#40694) --- lib/spack/spack/build_systems/python.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index c370178d7032b6..7f71cbae7058f0 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -425,7 +425,7 @@ class PythonPipBuilder(BaseBuilder): legacy_long_methods = ("install_options", "global_options", "config_settings") #: Names associated with package attributes in the old build-system format - legacy_attributes = ("build_directory", "install_time_test_callbacks") + legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks") #: Callback names for install-time test install_time_test_callbacks = ["test"] From 2092885ab09cbded81ccf4e011226f9d825f1705 Mon Sep 17 00:00:00 2001 From: Daniel Arndt Date: Thu, 26 Oct 2023 18:10:16 -0400 Subject: [PATCH 336/408] dataTransferKit: add v3.1.1, v3.1.0 (#40556) * Update DataTransferKit for 3.1.1 release * Require Trilinos-14 for 3.1.0 and higher --- .../repos/builtin/packages/datatransferkit/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/datatransferkit/package.py b/var/spack/repos/builtin/packages/datatransferkit/package.py index 1829e2414df090..7e0d050069d0cd 100644 --- a/var/spack/repos/builtin/packages/datatransferkit/package.py +++ b/var/spack/repos/builtin/packages/datatransferkit/package.py @@ -11,7 +11,7 @@ class Datatransferkit(CMakePackage): parallel solution transfer services for multiphysics simulations""" homepage = "https://datatransferkit.readthedoc.io" - url = "https://github.com/ORNL-CEES/DataTransferKit/archive/3.1-rc3.tar.gz" + url = "https://github.com/ORNL-CEES/DataTransferKit/archive/3.1.1.tar.gz" git = "https://github.com/ORNL-CEES/DataTransferKit.git" tags = ["e4s"] @@ -19,6 +19,8 @@ class Datatransferkit(CMakePackage): maintainers("Rombur") version("master", branch="master", submodules=True) + version("3.1.1", commit="bfb7673cc233c26a6a541cbf096f37f26df1e5fb", submodules=True) + version("3.1.0", commit="60a4cbd0a55505e0450f1ac979e1eef8966dc03f", submodules=True) version("3.1-rc3", commit="691d5a1540f7cd42141a3b3d2a7c8370cbc3560a", submodules=True) version("3.1-rc2", commit="1abc1a43b33dffc7a16d7497b4185d09d865e36a", submodules=True) @@ -37,7 +39,8 @@ class Datatransferkit(CMakePackage): depends_on("trilinos+intrepid2+shards~dtk") depends_on("trilinos+openmp", when="+openmp") depends_on("trilinos+stratimikos+belos", when="@master") - depends_on("trilinos@13:", when="@3.1-rc2:") + depends_on("trilinos@13:13.4.1", when="@3.1-rc2:3.1-rc3") + depends_on("trilinos@14:", when="@3.1.0:") def cmake_args(self): spec = self.spec From f9f306bf577c48ec4858d7fc09e6a41e022a28a9 Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Thu, 26 Oct 2023 17:12:10 -0500 Subject: [PATCH 337/408] ldak: add v5.2 & add maintainer (#40710) * ldak: update to 5.2, add maintainer * ldak: use compiler.openmp_flag --- .../repos/builtin/packages/ldak/package.py | 73 ++++++++++++++----- 1 file changed, 55 insertions(+), 18 deletions(-) diff --git a/var/spack/repos/builtin/packages/ldak/package.py b/var/spack/repos/builtin/packages/ldak/package.py index 022a3beacbe10e..1fbb7de0900b16 100644 --- a/var/spack/repos/builtin/packages/ldak/package.py +++ b/var/spack/repos/builtin/packages/ldak/package.py @@ -12,31 +12,68 @@ class Ldak(Package): homepage = "https://dougspeed.com/ldak/" url = "https://dougspeed.com/wp-content/uploads/source.zip" - version("5.1", sha256="ae3eb8c2ef31af210e138336fd6edcd0e3a26ea9bae89fd6c0c6ea33e3a1517e") + maintainers("snehring") - variant("mkl", default=False, description="Use MKL") + version("5.2", sha256="ba3de4eb4f2d664b3c2a54bef2eb66d1a498ac423179e97a5795d010161b1805") + version( + "5.1", + sha256="ae3eb8c2ef31af210e138336fd6edcd0e3a26ea9bae89fd6c0c6ea33e3a1517e", + deprecated=True, + ) + + variant("glpk", default=False, description="Use glpk instead of vendored qsopt") depends_on("zlib-api") depends_on("blas") depends_on("lapack") - depends_on("mkl", when="+mkl") - - for t in ["aarch64", "arm", "ppc", "ppc64", "ppc64le", "ppcle", "sparc", "sparc64", "x86"]: - conflicts("target={0}:".format(t), msg="libspot is available linux x86_64 only") - - def setup_build_environment(self, env): - env.append_flags("LDLIBS", "-lm") - env.append_flags("LDLIBS", "-lz") - libs = (self.spec["lapack"].libs + self.spec["blas"].libs).ld_flags - env.append_flags("LDLIBS", libs) - if self.spec.platform == "darwin": - env.append_flags("LDLIBS", "libqsopt.mac.a") + depends_on("openblas threads=openmp", when="^openblas") + depends_on("intel-mkl threads=openmp", when="^intel-mkl") + depends_on("intel-oneapi-mkl threads=openmp", when="^intel-oneapi-mkl") + depends_on("glpk", when="+glpk") + + requires("target=x86_64:", when="~glpk", msg="bundled qsopt is only for x86_64") + requires( + "^mkl", + "^openblas", + policy="one_of", + msg="Only mkl or openblas are supported for blas/lapack with ldak", + ) + conflicts("platform=cray", when="~glpk", msg="bundled qsopt only for linux or mac") + + phases = ["build", "install"] + + def build(self, spec, prefix): + libs = [ + "-lm", + (self.spec["lapack"].libs + self.spec["blas"].libs).link_flags, + self.spec["zlib-api"].libs.link_flags, + ] + includes = [ + (self.spec["lapack"].headers + self.spec["blas"].headers).include_flags, + self.spec["zlib-api"].headers.include_flags, + ] + + if self.spec.satisfies("~glpk"): + if self.spec.satisfies("platform=darwin"): + libs.append("libqsopt.mac.a") + else: + libs.append("libqsopt.linux.a") else: - env.append_flags("LDLIBS", "libqsopt.linux.a") + includes.append(self.spec["glpk"].headers.include_flags) + libs.append(self.spec["glpk"].libs.link_flags) + if self.spec.satisfies("^mkl"): + filter_file("#define MKL.*", "#define MKL 1", "ldak.c") + if self.spec.satisfies("^openblas"): + filter_file("#define MKL.*", "#define MKL 2", "ldak.c") + filter_file("#if MKL==2", "#if MKL==2\n#include \n", "ldak.c") + if self.spec.satisfies("+glpk"): + filter_file("#define MET.*", "#define MET 1", "ldak.c") + filter_file('#include"glpk.h"', "#include", "ldak.c") + filter_file(r"weights\[", "tally3[", "weightfuns.c") + cc = Executable(spack_cc) + args = ["ldak.c", self.compiler.openmp_flag, "-o", "ldak"] + includes + libs + cc(*args) def install(self, spec, prefix): - if self.spec.satisfies("~mkl"): - filter_file("#define MKL.*", "#define MKL 0", "ldak.c") - make("ldak") mkdirp(prefix.bin) install("ldak", prefix.bin.ldak) From caa7f56a166eac7406b43e66eb6969a324a40bff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Bederi=C3=A1n?= <4043375+zzzoom@users.noreply.github.com> Date: Thu, 26 Oct 2023 19:13:27 -0300 Subject: [PATCH 338/408] itk: misc fixes (#39832) * itk: patch missing include for newer compilers * itk: The package doesn't use MPI * itk: package requires the high-level hdf5 api * itk: patch url with ?full_index=1 * itk: point to 4041 commit in master * itk: don't constrain hdf5 with ~mpi --- var/spack/repos/builtin/packages/itk/package.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/itk/package.py b/var/spack/repos/builtin/packages/itk/package.py index bd20a28d3dc7c5..d0123b60f6a4ab 100644 --- a/var/spack/repos/builtin/packages/itk/package.py +++ b/var/spack/repos/builtin/packages/itk/package.py @@ -58,13 +58,18 @@ class Itk(CMakePackage): depends_on("expat") depends_on("fftw-api") depends_on("googletest") - depends_on("hdf5+cxx") + depends_on("hdf5+cxx+hl") depends_on("jpeg") depends_on("libpng") depends_on("libtiff") - depends_on("mpi") depends_on("zlib-api") + patch( + "https://github.com/InsightSoftwareConsortium/ITK/commit/9a719a0d2f5f489eeb9351b0ef913c3693147a4f.patch?full_index=1", + sha256="ec1f7fa71f2b7f05d9632c6b0321e7d436fff86fca92c60c12839b13ea79bd70", + when="@5.2.0:5.3.0", + ) + def cmake_args(self): use_mkl = "^mkl" in self.spec args = [ From 44370f39b59ddc62ad38b1c6228b9a62d4f509be Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Thu, 26 Oct 2023 17:48:20 -0500 Subject: [PATCH 339/408] plasma: add version 23.8.2 (#40728) --- var/spack/repos/builtin/packages/plasma/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/plasma/package.py b/var/spack/repos/builtin/packages/plasma/package.py index 6b92413fd7f598..5cccfe3ff4e6b5 100644 --- a/var/spack/repos/builtin/packages/plasma/package.py +++ b/var/spack/repos/builtin/packages/plasma/package.py @@ -19,11 +19,13 @@ class Plasma(CMakePackage): homepage = "https://github.com/icl-utk-edu/plasma/" url = "https://github.com/icl-utk-edu/plasma/releases/download/21.8.29/plasma-21.8.29.tar.gz" git = "https://github.com/icl-utk-edu/plasma" + maintainers("luszczek") tags = ["e4s"] version("develop", git=git) + version("23.8.2", sha256="2db34de0575f3e3d16531bdcf1caddef146f68e71335977a3e8ec193003ab943") version("22.9.29", sha256="78827898b7e3830eee2e388823b9180858279f77c5eda5aa1be173765c53ade5") version("21.8.29", sha256="e0bb4d9143c8540f9f46cbccac9ed0cbea12500a864e6954fce2fe94ea057a10") version("20.9.20", sha256="2144a77b739f8dd2f0dbe5b64d94cde0e916f55c4eb170facd168c0db7fc7970") From 43aac04cc75b3d69747c5563dff3d3d8ac07b79d Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Thu, 26 Oct 2023 23:12:20 -0700 Subject: [PATCH 340/408] akantu: use f-strings (#40466) Co-authored-by: Nicolas Richart --- var/spack/repos/builtin/packages/akantu/package.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/akantu/package.py b/var/spack/repos/builtin/packages/akantu/package.py index 2952f9bb4a1688..bba92edcc8e396 100644 --- a/var/spack/repos/builtin/packages/akantu/package.py +++ b/var/spack/repos/builtin/packages/akantu/package.py @@ -65,10 +65,8 @@ def cmake_args(self): "-DAKANTU_HEAT_TRANSFER:BOOL=ON", "-DAKANTU_SOLID_MECHANICS:BOOL=ON", "-DAKANTU_STRUCTURAL_MECHANICS:BOOL=OFF", - "-DAKANTU_PARALLEL:BOOL={0}".format("ON" if spec.satisfies("+mpi") else "OFF"), - "-DAKANTU_PYTHON_INTERFACE:BOOL={0}".format( - "ON" if spec.satisfies("+python") else "OFF" - ), + f"-DAKANTU_PARALLEL:BOOL={'ON' if spec.satisfies('+mpi') else 'OFF'}", + f"-DAKANTU_PYTHON_INTERFACE:BOOL={'ON' if spec.satisfies('+python') else 'OFF'}", ] if spec.satisfies("@:3.0"): @@ -84,14 +82,14 @@ def cmake_args(self): solvers = [] if spec.satisfies("external_solvers=mumps"): solvers.append("Mumps") - args.append("-DMUMPS_DIR:PATH=${0}".format(spec["mumps"].prefix)) + args.append(f"-DMUMPS_DIR:PATH=${spec['mumps'].prefix}") if spec.satisfies("external_solvers=petsc"): solvers.append("PETSc") if len(solvers) > 0: args.extend( [ - "-DAKANTU_IMPLICIT_SOLVER:STRING={0}".format("+".join(solvers)), + f"-DAKANTU_IMPLICIT_SOLVER:STRING={'+'.join(solvers)}", "-DAKANTU_IMPLICIT:BOOL=ON", ] ) From 91ab81f2b94ef8b925bfc48021183229fff9c1e6 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 27 Oct 2023 08:29:02 +0200 Subject: [PATCH 341/408] gromacs: fix version branch in intel fftw (#40489) --- var/spack/repos/builtin/packages/gromacs/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index e280234a0e45fa..d516add3779a1f 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -618,7 +618,7 @@ def cmake_args(self): # fftw-api@3 is provided by intel-mkl or intel-parllel-studio # we use the mkl interface of gromacs options.append("-DGMX_FFT_LIBRARY=mkl") - if not self.spec["mkl"].satisfies("@2023:"): + if self.spec.satisfies("@:2022"): options.append( "-DMKL_INCLUDE_DIR={0}".format(self.spec["mkl"].headers.directories[0]) ) From 4f97853eb14f8917b521d1a655f53bfa6e3c6e69 Mon Sep 17 00:00:00 2001 From: dmt4 Date: Fri, 27 Oct 2023 07:55:57 +0100 Subject: [PATCH 342/408] Fixes and options for package spglib (#40684) * Fix cmake_args for spglib v2.1.0+ * Add option to build fortran interface in package spglib * fix style as sugested by ci/prechecks/style * Enable fortran variant from v1.16.4 as suggested Co-authored-by: Rocco Meli --------- Co-authored-by: Rocco Meli --- var/spack/repos/builtin/packages/spglib/package.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/spglib/package.py b/var/spack/repos/builtin/packages/spglib/package.py index 2715f3f3c0295b..1d00091c13158c 100644 --- a/var/spack/repos/builtin/packages/spglib/package.py +++ b/var/spack/repos/builtin/packages/spglib/package.py @@ -48,10 +48,15 @@ class Spglib(CMakePackage): version("1.10.0", sha256="117fff308731784bea2ddaf3d076f0ecbf3981b31ea1c1bfd5ce4f057a5325b1") variant("openmp", default=True, description="Build with OpenMP support", when="@1.16.2:") + variant("fortran", default=True, description="Build Fortran interface", when="@1.16.4:") @property def libs(self): return find_libraries("libsymspg", root=self.prefix, shared=True, recursive=True) def cmake_args(self): - return [self.define_from_variant("USE_OMP", "openmp")] + pfx = "SPGLIB_" if self.spec.satisfies("@2.1.0:") else "" + return [ + self.define_from_variant(pfx + "USE_OMP", "openmp"), + self.define_from_variant(pfx + "WITH_Fortran", "fortran"), + ] From 95bc24d95e3e54c5e97229ab1b71aeeb36bdbc86 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 27 Oct 2023 09:43:01 +0200 Subject: [PATCH 343/408] ci: spack compiler find should list extra config scopes (#40727) otherwise it detected pre-configured compilers in an potentially different way. --- share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 579153bdfdc395..e5475a7bdc6ed5 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -140,8 +140,13 @@ default: - spack --version - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - spack env activate --without-view . - - spack compiler find - export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs" + - spack + --config-scope "${SPACK_CI_CONFIG_ROOT}" + --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}" + --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}" + ${CI_STACK_CONFIG_SCOPES} + compiler find - spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))" < "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" - spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" From 8d3c141b313600f50f6004f26aa445479eb59ab4 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 27 Oct 2023 09:51:12 +0200 Subject: [PATCH 344/408] gromacs: default to external blas & lapack (#40490) * gromacs: default to external blas & lapack * drop vendored lapack/blas altogether --- .../repos/builtin/packages/gromacs/package.py | 36 +++++-------------- 1 file changed, 8 insertions(+), 28 deletions(-) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index d516add3779a1f..7a4147a6eecab8 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -142,8 +142,6 @@ class Gromacs(CMakePackage, CudaPackage): msg="GMX_RELAXED_DOUBLE_PRECISION option removed for GROMACS 2021.", ) variant("hwloc", default=True, description="Use the hwloc portable hardware locality library") - variant("lapack", default=False, description="Enables an external LAPACK library") - variant("blas", default=False, description="Enables an external BLAS library") variant("cycle_subcounters", default=False, description="Enables cycle subcounters") variant("cp2k", default=False, description="CP2K QM/MM interface integration") @@ -151,16 +149,6 @@ class Gromacs(CMakePackage, CudaPackage): "+cp2k", when="@:2021", msg="CP2K QM/MM support have been introduced in GROMACS 2022" ) conflicts("+shared", when="+cp2k", msg="Enabling CP2K requires static build") - conflicts( - "~lapack", - when="+cp2k", - msg="GROMACS and CP2K should use the same lapack, please disable bundled lapack", - ) - conflicts( - "~blas", - when="+cp2k", - msg="GROMACS and CP2K should use the same blas, please disable bundled blas", - ) conflicts("%intel", when="@2022:", msg="GROMACS %intel support was removed in version 2022") conflicts("%gcc@:8", when="@2023:", msg="GROMACS requires GCC 9 or later since version 2023") conflicts( @@ -255,8 +243,8 @@ class Gromacs(CMakePackage, CudaPackage): depends_on("cmake@3.16.0:3", type="build", when="%fj") depends_on("cuda", when="+cuda") depends_on("sycl", when="+sycl") - depends_on("lapack", when="+lapack") - depends_on("blas", when="+blas") + depends_on("lapack") + depends_on("blas") depends_on("gcc", when="%oneapi ~intel_provided_gcc") depends_on("gcc", when="%intel ~intel_provided_gcc") @@ -504,21 +492,13 @@ def cmake_args(self): if "+cuda" in self.spec: options.append("-DCUDA_TOOLKIT_ROOT_DIR:STRING=" + self.spec["cuda"].prefix) - if "+lapack" in self.spec: - options.append("-DGMX_EXTERNAL_LAPACK:BOOL=ON") - if self.spec["lapack"].libs: - options.append( - "-DGMX_LAPACK_USER={0}".format(self.spec["lapack"].libs.joined(";")) - ) - else: - options.append("-DGMX_EXTERNAL_LAPACK:BOOL=OFF") + options.append("-DGMX_EXTERNAL_LAPACK:BOOL=ON") + if self.spec["lapack"].libs: + options.append("-DGMX_LAPACK_USER={0}".format(self.spec["lapack"].libs.joined(";"))) - if "+blas" in self.spec: - options.append("-DGMX_EXTERNAL_BLAS:BOOL=ON") - if self.spec["blas"].libs: - options.append("-DGMX_BLAS_USER={0}".format(self.spec["blas"].libs.joined(";"))) - else: - options.append("-DGMX_EXTERNAL_BLAS:BOOL=OFF") + options.append("-DGMX_EXTERNAL_BLAS:BOOL=ON") + if self.spec["blas"].libs: + options.append("-DGMX_BLAS_USER={0}".format(self.spec["blas"].libs.joined(";"))) if "+cp2k" in self.spec: options.append("-DGMX_CP2K:BOOL=ON") From 8bdd1a850c7ea3980c71e4c32711e969e39900b4 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Fri, 27 Oct 2023 06:29:15 -0500 Subject: [PATCH 345/408] strumpack: add version 7.2.0 (#40732) --- var/spack/repos/builtin/packages/strumpack/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/strumpack/package.py b/var/spack/repos/builtin/packages/strumpack/package.py index 15133630a80d59..fce0c4cd175f29 100644 --- a/var/spack/repos/builtin/packages/strumpack/package.py +++ b/var/spack/repos/builtin/packages/strumpack/package.py @@ -29,6 +29,7 @@ class Strumpack(CMakePackage, CudaPackage, ROCmPackage): test_requires_compiler = True version("master", branch="master") + version("7.2.0", sha256="6988c00c3213f13e53d75fb474102358f4fecf07a4b4304b7123d86fdc784639") version("7.1.3", sha256="c951f38ee7af20da3ff46429e38fcebd57fb6f12619b2c56040d6da5096abcb0") version("7.1.2", sha256="262a0193fa1682d0eaa90363f739e0be7a778d5deeb80e4d4ae12446082a39cc") version("7.1.1", sha256="56481a22955c2eeb40932777233fc227347743c75683d996cb598617dd2a8635") From 734e950697f46bbe11ae53ff26e2922ffbc653f9 Mon Sep 17 00:00:00 2001 From: Ashwin Kumar Karnad <46030335+iamashwin99@users.noreply.github.com> Date: Fri, 27 Oct 2023 14:24:44 +0200 Subject: [PATCH 346/408] octopus: split netcdf-c and netcdf-fortran dependency (#40685) --- var/spack/repos/builtin/packages/octopus/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/octopus/package.py b/var/spack/repos/builtin/packages/octopus/package.py index 5ce974edfa63e0..8a88711dad900e 100644 --- a/var/spack/repos/builtin/packages/octopus/package.py +++ b/var/spack/repos/builtin/packages/octopus/package.py @@ -93,13 +93,14 @@ class Octopus(AutotoolsPackage, CudaPackage): depends_on("libxc@2:4", when="@8:9") depends_on("libxc@5.1.0:", when="@10:") depends_on("libxc@5.1.0:", when="@develop") + depends_on("netcdf-fortran", when="+netcdf") # NetCDF fortran lib without mpi variant with when("+mpi"): # list all the parallel dependencies depends_on("fftw@3:+mpi+openmp", when="@8:9") # FFT library depends_on("fftw-api@3:+mpi+openmp", when="@10:") depends_on("libvdwxc+mpi", when="+libvdwxc") depends_on("arpack-ng+mpi", when="+arpack") depends_on("elpa+mpi", when="+elpa") - depends_on("netcdf-fortran ^netcdf-c+mpi", when="+netcdf") + depends_on("netcdf-c+mpi", when="+netcdf") # Link dependency of NetCDF fortran lib depends_on("berkeleygw@2.1+mpi", when="+berkeleygw") with when("~mpi"): # list all the serial dependencies @@ -108,7 +109,7 @@ class Octopus(AutotoolsPackage, CudaPackage): depends_on("libvdwxc~mpi", when="+libvdwxc") depends_on("arpack-ng~mpi", when="+arpack") depends_on("elpa~mpi", when="+elpa") - depends_on("netcdf-fortran ^netcdf-c~~mpi", when="+netcdf") + depends_on("netcdf-c~~mpi", when="+netcdf") # Link dependency of NetCDF fortran lib depends_on("berkeleygw@2.1~mpi", when="+berkeleygw") depends_on("etsf-io", when="+etsf-io") From 2ca3ab52b22ebb53a7dfc135509f3c2ed5a4e0ef Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 27 Oct 2023 15:30:04 +0200 Subject: [PATCH 347/408] OCI buildcache (#38358) Credits to @ChristianKniep for advocating the idea of OCI image layers being identical to spack buildcache tarballs. With this you can configure an OCI registry as a buildcache: ```console $ spack mirror add my_registry oci://user/image # Dockerhub $ spack mirror add my_registry oci://ghcr.io/haampie/spack-test # GHCR $ spack mirror set --push --oci-username ... --oci-password ... my_registry # set login credentials ``` which should result in this config: ```yaml mirrors: my_registry: url: oci://ghcr.io/haampie/spack-test push: access_pair: [, ] ``` It can be used like any other registry ``` spack buildcache push my_registry [specs...] ``` It will upload the Spack tarballs in parallel, as well as manifest + config files s.t. the binaries are compatible with `docker pull` or `skopeo copy`. In fact, a base image can be added to get a _runnable_ image: ```console $ spack buildcache push --base-image ubuntu:23.04 my_registry python Pushed ... as [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack $ docker run --rm -it [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack ``` which should really be a game changer for sharing binaries. Further, all content-addressable blobs that are downloaded and verified will be cached in Spack's download cache. This should make repeated `push` commands faster, as well as `push` followed by a separate `update-index` command. An end to end example of how to use this in Github Actions is here: **https://github.com/haampie/spack-oci-buildcache-example** TODO: - [x] Generate environment modifications in config so PATH is set up - [x] Enrich config with Spack's `spec` json (this is allowed in the OCI specification) - [x] When ^ is done, add logic to create an index in say `:index` by fetching all config files (using OCI distribution discovery API) - [x] Add logic to use object storage in an OCI registry in `spack install`. - [x] Make the user pick the base image for generated OCI images. - [x] Update buildcache install logic to deal with absolute paths in tarballs - [x] Merge with `spack buildcache` command - [x] Merge #37441 (included here) - [x] Merge #39077 (included here) - [x] #39187 + #39285 - [x] #39341 - [x] Not a blocker: #35737 fixes correctness run env for the generated container images NOTE: 1. `oci://` is unfortunately taken, so it's being abused in this PR to mean "oci type mirror". `skopeo` uses `docker://` which I'd like to avoid, given that classical docker v1 registries are not supported. 2. this is currently `https`-only, given that basic auth is used to login. I _could_ be convinced to allow http, but I'd prefer not to, given that for a `spack buildcache push` command multiple domains can be involved (auth server, source of base image, destination registry). Right now, no urllib http handler is added, so redirects to https and auth servers with http urls will simply result in a hard failure. CAVEATS: 1. Signing is not implemented in this PR. `gpg --clearsign` is not the nicest solution, since (a) the spec.json is merged into the image config, which must be valid json, and (b) it would be better to sign the manifest (referencing both config/spec file and tarball) using more conventional image signing tools 2. `spack.binary_distribution.push` is not yet implemented for the OCI buildcache, only `spack buildcache push` is. This is because I'd like to always push images + deps to the registry, so that it's `docker pull`-able, whereas in `spack ci` we really wanna push an individual package without its deps to say `pr-xyz`, while its deps reside in some `develop` buildcache. 3. The `push -j ...` flag only works for OCI buildcache, not for others --- lib/spack/docs/binary_caches.rst | 125 ++++ lib/spack/docs/conf.py | 1 + lib/spack/spack/binary_distribution.py | 421 +++++++++--- lib/spack/spack/cmd/buildcache.py | 493 ++++++++++++-- lib/spack/spack/cmd/common/arguments.py | 4 +- lib/spack/spack/cmd/mirror.py | 12 +- lib/spack/spack/fetch_strategy.py | 30 + lib/spack/spack/mirror.py | 58 +- lib/spack/spack/oci/__init__.py | 4 + lib/spack/spack/oci/image.py | 228 +++++++ lib/spack/spack/oci/oci.py | 381 +++++++++++ lib/spack/spack/oci/opener.py | 442 ++++++++++++ lib/spack/spack/parser.py | 21 +- lib/spack/spack/spec.py | 3 +- lib/spack/spack/stage.py | 12 +- lib/spack/spack/test/cmd/buildcache.py | 6 +- lib/spack/spack/test/conftest.py | 19 + lib/spack/spack/test/oci/image.py | 101 +++ lib/spack/spack/test/oci/integration_test.py | 148 ++++ lib/spack/spack/test/oci/mock_registry.py | 410 +++++++++++ lib/spack/spack/test/oci/urlopen.py | 672 +++++++++++++++++++ lib/spack/spack/util/crypto.py | 90 +-- share/spack/spack-completion.bash | 10 +- share/spack/spack-completion.fish | 30 +- 24 files changed, 3479 insertions(+), 242 deletions(-) create mode 100644 lib/spack/spack/oci/__init__.py create mode 100644 lib/spack/spack/oci/image.py create mode 100644 lib/spack/spack/oci/oci.py create mode 100644 lib/spack/spack/oci/opener.py create mode 100644 lib/spack/spack/test/oci/image.py create mode 100644 lib/spack/spack/test/oci/integration_test.py create mode 100644 lib/spack/spack/test/oci/mock_registry.py create mode 100644 lib/spack/spack/test/oci/urlopen.py diff --git a/lib/spack/docs/binary_caches.rst b/lib/spack/docs/binary_caches.rst index 280d957c086e56..5f11dd6bd6ab55 100644 --- a/lib/spack/docs/binary_caches.rst +++ b/lib/spack/docs/binary_caches.rst @@ -156,6 +156,131 @@ List of popular build caches * `Extreme-scale Scientific Software Stack (E4S) `_: `build cache `_ +----------------------------------------- +OCI / Docker V2 registries as build cache +----------------------------------------- + +Spack can also use OCI or Docker V2 registries such as Dockerhub, Quay.io, +Github Packages, GitLab Container Registry, JFrog Artifactory, and others +as build caches. This is a convenient way to share binaries using public +infrastructure, or to cache Spack built binaries in Github Actions and +GitLab CI. + +To get started, configure an OCI mirror using ``oci://`` as the scheme, +and optionally specify a username and password (or personal access token): + +.. code-block:: console + + $ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image + +Spack follows the naming conventions of Docker, with Dockerhub as the default +registry. To use Dockerhub, you can omit the registry domain: + +.. code-block:: console + + $ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image + +From here, you can use the mirror as any other build cache: + +.. code-block:: console + + $ spack buildcache push my_registry # push to the registry + $ spack install # install from the registry + +A unique feature of buildcaches on top of OCI registries is that it's incredibly +easy to generate get a runnable container image with the binaries installed. This +is a great way to make applications available to users without requiring them to +install Spack -- all you need is Docker, Podman or any other OCI-compatible container +runtime. + +To produce container images, all you need to do is add the ``--base-image`` flag +when pushing to the build cache: + +.. code-block:: console + + $ spack buildcache push --base-image ubuntu:20.04 my_registry ninja + Pushed to example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack + + $ docker run -it example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack + root@e4c2b6f6b3f4:/# ninja --version + 1.11.1 + +If ``--base-image`` is not specified, distroless images are produced. In practice, +you won't be able to run these as containers, since they don't come with libc and +other system dependencies. However, they are still compatible with tools like +``skopeo``, ``podman``, and ``docker`` for pulling and pushing. + +.. note:: + The docker ``overlayfs2`` storage driver is limited to 128 layers, above which a + ``max depth exceeded`` error may be produced when pulling the image. There + are `alternative drivers `_. + +------------------------------------ +Using a buildcache in GitHub Actions +------------------------------------ + +GitHub Actions is a popular CI/CD platform for building and testing software, +but each CI job has limited resources, making from source builds too slow for +many applications. Spack build caches can be used to share binaries between CI +runs, speeding up CI significantly. + +A typical workflow is to include a ``spack.yaml`` environment in your repository +that specifies the packages to install: + +.. code-block:: yaml + + spack: + specs: [pkg-x, pkg-y] + packages: + all: + require: target=x86_64_v2 + mirrors: + github_packages: oci://ghcr.io// + +And a GitHub action that sets up Spack, installs packages from the build cache +or from sources, and pushes newly built binaries to the build cache: + +.. code-block:: yaml + + name: Install Spack packages + + on: push + + env: + SPACK_COLOR: always + + jobs: + example: + runs-on: ubuntu-22.04 + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Install Spack + run: | + git clone --depth=1 https://github.com/spack/spack.git + echo "$PWD/spack/bin/" >> "$GITHUB_PATH" + + - name: Concretize + run: spack -e . concretize + + - name: Install + run: spack -e . install --no-check-signature --fail-fast + + - name: Push to buildcache + run: | + spack -e . mirror set --oci-username --oci-password "${{ secrets.GITHUB_TOKEN }}" github_packages + spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index github_packages + if: always() + +The first time this action runs, it will build the packages from source and +push them to the build cache. Subsequent runs will pull the binaries from the +build cache. The concretizer will ensure that prebuilt binaries are favored +over source builds. + +The build cache entries appear in the GitHub Packages section of your repository, +and contain instructions for pulling and running them with ``docker`` or ``podman``. + ---------- Relocation ---------- diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index f1bde9c9fbdfd0..250a600e7f75ec 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -204,6 +204,7 @@ def setup(sphinx): ("py:class", "clingo.Control"), ("py:class", "six.moves.urllib.parse.ParseResult"), ("py:class", "TextIO"), + ("py:class", "hashlib._Hash"), # Spack classes that are private and we don't want to expose ("py:class", "spack.provider_index._IndexBase"), ("py:class", "spack.repo._PrependFileLoader"), diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 7484fee09793c2..af04dfefb07113 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -5,11 +5,13 @@ import codecs import collections +import errno import hashlib import io import itertools import json import os +import pathlib import re import shutil import sys @@ -31,6 +33,7 @@ import llnl.util.tty as tty from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree +import spack.caches import spack.cmd import spack.config as config import spack.database as spack_db @@ -38,6 +41,9 @@ import spack.hooks import spack.hooks.sbang import spack.mirror +import spack.oci.image +import spack.oci.oci +import spack.oci.opener import spack.platforms import spack.relocate as relocate import spack.repo @@ -471,14 +477,18 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}): FetchIndexError """ # TODO: get rid of this request, handle 404 better - if not web_util.url_exists( + scheme = urllib.parse.urlparse(mirror_url).scheme + + if scheme != "oci" and not web_util.url_exists( url_util.join(mirror_url, _build_cache_relative_path, "index.json") ): return False - etag = cache_entry.get("etag", None) - if etag: - fetcher = EtagIndexFetcher(mirror_url, etag) + if scheme == "oci": + # TODO: Actually etag and OCI are not mutually exclusive... + fetcher = OCIIndexFetcher(mirror_url, cache_entry.get("index_hash", None)) + elif cache_entry.get("etag"): + fetcher = EtagIndexFetcher(mirror_url, cache_entry["etag"]) else: fetcher = DefaultIndexFetcher( mirror_url, local_hash=cache_entry.get("index_hash", None) @@ -622,21 +632,14 @@ def build_cache_prefix(prefix): def buildinfo_file_name(prefix): - """ - Filename of the binary package meta-data file - """ - return os.path.join(prefix, ".spack/binary_distribution") + """Filename of the binary package meta-data file""" + return os.path.join(prefix, ".spack", "binary_distribution") def read_buildinfo_file(prefix): - """ - Read buildinfo file - """ - filename = buildinfo_file_name(prefix) - with open(filename, "r") as inputfile: - content = inputfile.read() - buildinfo = syaml.load(content) - return buildinfo + """Read buildinfo file""" + with open(buildinfo_file_name(prefix), "r") as f: + return syaml.load(f) class BuildManifestVisitor(BaseDirectoryVisitor): @@ -819,18 +822,6 @@ def tarball_path_name(spec, ext): return os.path.join(tarball_directory_name(spec), tarball_name(spec, ext)) -def checksum_tarball(file): - # calculate sha256 hash of tar file - block_size = 65536 - hasher = hashlib.sha256() - with open(file, "rb") as tfile: - buf = tfile.read(block_size) - while len(buf) > 0: - hasher.update(buf) - buf = tfile.read(block_size) - return hasher.hexdigest() - - def select_signing_key(key=None): if key is None: keys = spack.util.gpg.signing_keys() @@ -1147,14 +1138,17 @@ def gzip_compressed_tarfile(path): # compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB # compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB # So we follow gzip. - with open(path, "wb") as fileobj, closing( - GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj) - ) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar: - yield tar + with open(path, "wb") as f, ChecksumWriter(f) as inner_checksum, closing( + GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=inner_checksum) + ) as gzip_file, ChecksumWriter(gzip_file) as outer_checksum, tarfile.TarFile( + name="", mode="w", fileobj=outer_checksum + ) as tar: + yield tar, inner_checksum, outer_checksum -def _tarinfo_name(p: str): - return p.lstrip("/") +def _tarinfo_name(absolute_path: str, *, _path=pathlib.PurePath) -> str: + """Compute tarfile entry name as the relative path from the (system) root.""" + return _path(*_path(absolute_path).parts[1:]).as_posix() def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None: @@ -1234,8 +1228,88 @@ def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None: dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical +class ChecksumWriter(io.BufferedIOBase): + """Checksum writer computes a checksum while writing to a file.""" + + myfileobj = None + + def __init__(self, fileobj, algorithm=hashlib.sha256): + self.fileobj = fileobj + self.hasher = algorithm() + self.length = 0 + + def hexdigest(self): + return self.hasher.hexdigest() + + def write(self, data): + if isinstance(data, (bytes, bytearray)): + length = len(data) + else: + data = memoryview(data) + length = data.nbytes + + if length > 0: + self.fileobj.write(data) + self.hasher.update(data) + + self.length += length + + return length + + def read(self, size=-1): + raise OSError(errno.EBADF, "read() on write-only object") + + def read1(self, size=-1): + raise OSError(errno.EBADF, "read1() on write-only object") + + def peek(self, n): + raise OSError(errno.EBADF, "peek() on write-only object") + + @property + def closed(self): + return self.fileobj is None + + def close(self): + fileobj = self.fileobj + if fileobj is None: + return + self.fileobj.close() + self.fileobj = None + + def flush(self): + self.fileobj.flush() + + def fileno(self): + return self.fileobj.fileno() + + def rewind(self): + raise OSError("Can't rewind while computing checksum") + + def readable(self): + return False + + def writable(self): + return True + + def seekable(self): + return True + + def tell(self): + return self.fileobj.tell() + + def seek(self, offset, whence=io.SEEK_SET): + # In principle forward seek is possible with b"0" padding, + # but this is not implemented. + if offset == 0 and whence == io.SEEK_CUR: + return + raise OSError("Can't seek while computing checksum") + + def readline(self, size=-1): + raise OSError(errno.EBADF, "readline() on write-only object") + + def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict): - with gzip_compressed_tarfile(tarfile_path) as tar: + with gzip_compressed_tarfile(tarfile_path) as (tar, inner_checksum, outer_checksum): # Tarball the install prefix tarfile_of_spec_prefix(tar, binaries_dir) @@ -1247,6 +1321,8 @@ def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict): tarinfo.mode = 0o644 tar.addfile(tarinfo, io.BytesIO(bstring)) + return inner_checksum.hexdigest(), outer_checksum.hexdigest() + class PushOptions(NamedTuple): #: Overwrite existing tarball/metadata files in buildcache @@ -1322,13 +1398,9 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option # create info for later relocation and create tar buildinfo = get_buildinfo_dict(spec) - _do_create_tarball(tarfile_path, binaries_dir, buildinfo) - - # get the sha256 checksum of the tarball - checksum = checksum_tarball(tarfile_path) + checksum, _ = _do_create_tarball(tarfile_path, binaries_dir, buildinfo) # add sha256 checksum to spec.json - with open(spec_file, "r") as inputfile: content = inputfile.read() if spec_file.endswith(".json"): @@ -1371,10 +1443,21 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option return None +class NotInstalledError(spack.error.SpackError): + """Raised when a spec is not installed but picked to be packaged.""" + + def __init__(self, specs: List[Spec]): + super().__init__( + "Cannot push non-installed packages", + ", ".join(s.cformat("{name}{@version}{/hash:7}") for s in specs), + ) + + def specs_to_be_packaged( specs: List[Spec], root: bool = True, dependencies: bool = True ) -> List[Spec]: """Return the list of nodes to be packaged, given a list of specs. + Raises NotInstalledError if a spec is not installed but picked to be packaged. Args: specs: list of root specs to be processed @@ -1382,19 +1465,35 @@ def specs_to_be_packaged( dependencies: include the dependencies of each spec in the nodes """ + if not root and not dependencies: return [] - elif dependencies: - nodes = traverse.traverse_nodes(specs, root=root, deptype="all") - else: - nodes = set(specs) - # Limit to installed non-externals. - packageable = lambda n: not n.external and n.installed - - # Mass install check + # Filter packageable roots with spack.store.STORE.db.read_transaction(): - return list(filter(packageable, nodes)) + if root: + # Error on uninstalled roots, when roots are requested + uninstalled_roots = list(s for s in specs if not s.installed) + if uninstalled_roots: + raise NotInstalledError(uninstalled_roots) + roots = specs + else: + roots = [] + + if dependencies: + # Error on uninstalled deps, when deps are requested + deps = list( + traverse.traverse_nodes( + specs, deptype="all", order="breadth", root=False, key=traverse.by_dag_hash + ) + ) + uninstalled_deps = list(s for s in deps if not s.installed) + if uninstalled_deps: + raise NotInstalledError(uninstalled_deps) + else: + deps = [] + + return [s for s in itertools.chain(roots, deps) if not s.external] def push(spec: Spec, mirror_url: str, options: PushOptions): @@ -1502,8 +1601,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): tarball = tarball_path_name(spec, ".spack") specfile_prefix = tarball_name(spec, ".spec") - mirrors_to_try = [] - # Note on try_first and try_next: # mirrors_for_spec mostly likely came from spack caching remote # mirror indices locally and adding their specs to a local data @@ -1516,63 +1613,116 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else [] try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first] - for url in try_first + try_next: - mirrors_to_try.append( - { - "specfile": url_util.join(url, _build_cache_relative_path, specfile_prefix), - "spackfile": url_util.join(url, _build_cache_relative_path, tarball), - } - ) + mirrors = try_first + try_next tried_to_verify_sigs = [] # Assumes we care more about finding a spec file by preferred ext # than by mirrory priority. This can be made less complicated as # we remove support for deprecated spec formats and buildcache layouts. - for ext in ["json.sig", "json"]: - for mirror_to_try in mirrors_to_try: - specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext) - spackfile_url = mirror_to_try["spackfile"] - local_specfile_stage = try_fetch(specfile_url) - if local_specfile_stage: - local_specfile_path = local_specfile_stage.save_filename - signature_verified = False - - if ext.endswith(".sig") and not unsigned: - # If we found a signed specfile at the root, try to verify - # the signature immediately. We will not download the - # tarball if we could not verify the signature. - tried_to_verify_sigs.append(specfile_url) - signature_verified = try_verify(local_specfile_path) - if not signature_verified: - tty.warn("Failed to verify: {0}".format(specfile_url)) - - if unsigned or signature_verified or not ext.endswith(".sig"): - # We will download the tarball in one of three cases: - # 1. user asked for --no-check-signature - # 2. user didn't ask for --no-check-signature, but we - # found a spec.json.sig and verified the signature already - # 3. neither of the first two cases are true, but this file - # is *not* a signed json (not a spec.json.sig file). That - # means we already looked at all the mirrors and either didn't - # find any .sig files or couldn't verify any of them. But it - # is still possible to find an old style binary package where - # the signature is a detached .asc file in the outer archive - # of the tarball, and in that case, the only way to know is to - # download the tarball. This is a deprecated use case, so if - # something goes wrong during the extraction process (can't - # verify signature, checksum doesn't match) we will fail at - # that point instead of trying to download more tarballs from - # the remaining mirrors, looking for one we can use. - tarball_stage = try_fetch(spackfile_url) - if tarball_stage: - return { - "tarball_stage": tarball_stage, - "specfile_stage": local_specfile_stage, - "signature_verified": signature_verified, - } + for try_signed in (True, False): + for mirror in mirrors: + # If it's an OCI index, do things differently, since we cannot compose URLs. + parsed = urllib.parse.urlparse(mirror) + + # TODO: refactor this to some "nice" place. + if parsed.scheme == "oci": + ref = spack.oci.image.ImageReference.from_string(mirror[len("oci://") :]).with_tag( + spack.oci.image.default_tag(spec) + ) + + # Fetch the manifest + try: + response = spack.oci.opener.urlopen( + urllib.request.Request( + url=ref.manifest_url(), + headers={"Accept": "application/vnd.oci.image.manifest.v1+json"}, + ) + ) + except Exception: + continue + + # Download the config = spec.json and the relevant tarball + try: + manifest = json.loads(response.read()) + spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"]) + tarball_digest = spack.oci.image.Digest.from_string( + manifest["layers"][-1]["digest"] + ) + except Exception: + continue + + with spack.oci.oci.make_stage( + ref.blob_url(spec_digest), spec_digest, keep=True + ) as local_specfile_stage: + try: + local_specfile_stage.fetch() + local_specfile_stage.check() + except Exception: + continue + local_specfile_stage.cache_local() + + with spack.oci.oci.make_stage( + ref.blob_url(tarball_digest), tarball_digest, keep=True + ) as tarball_stage: + try: + tarball_stage.fetch() + tarball_stage.check() + except Exception: + continue + tarball_stage.cache_local() + + return { + "tarball_stage": tarball_stage, + "specfile_stage": local_specfile_stage, + "signature_verified": False, + } - local_specfile_stage.destroy() + else: + ext = "json.sig" if try_signed else "json" + specfile_path = url_util.join(mirror, _build_cache_relative_path, specfile_prefix) + specfile_url = f"{specfile_path}.{ext}" + spackfile_url = url_util.join(mirror, _build_cache_relative_path, tarball) + local_specfile_stage = try_fetch(specfile_url) + if local_specfile_stage: + local_specfile_path = local_specfile_stage.save_filename + signature_verified = False + + if try_signed and not unsigned: + # If we found a signed specfile at the root, try to verify + # the signature immediately. We will not download the + # tarball if we could not verify the signature. + tried_to_verify_sigs.append(specfile_url) + signature_verified = try_verify(local_specfile_path) + if not signature_verified: + tty.warn("Failed to verify: {0}".format(specfile_url)) + + if unsigned or signature_verified or not try_signed: + # We will download the tarball in one of three cases: + # 1. user asked for --no-check-signature + # 2. user didn't ask for --no-check-signature, but we + # found a spec.json.sig and verified the signature already + # 3. neither of the first two cases are true, but this file + # is *not* a signed json (not a spec.json.sig file). That + # means we already looked at all the mirrors and either didn't + # find any .sig files or couldn't verify any of them. But it + # is still possible to find an old style binary package where + # the signature is a detached .asc file in the outer archive + # of the tarball, and in that case, the only way to know is to + # download the tarball. This is a deprecated use case, so if + # something goes wrong during the extraction process (can't + # verify signature, checksum doesn't match) we will fail at + # that point instead of trying to download more tarballs from + # the remaining mirrors, looking for one we can use. + tarball_stage = try_fetch(spackfile_url) + if tarball_stage: + return { + "tarball_stage": tarball_stage, + "specfile_stage": local_specfile_stage, + "signature_verified": signature_verified, + } + + local_specfile_stage.destroy() # Falling through the nested loops meeans we exhaustively searched # for all known kinds of spec files on all mirrors and did not find @@ -1805,7 +1955,7 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum ) # compute the sha256 checksum of the tarball - local_checksum = checksum_tarball(tarfile_path) + local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path) expected = remote_checksum["hash"] # if the checksums don't match don't install @@ -1866,6 +2016,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti spec_dict = sjson.load(content) bchecksum = spec_dict["binary_cache_checksum"] + filename = download_result["tarball_stage"].save_filename signature_verified = download_result["signature_verified"] tmpdir = None @@ -1898,7 +2049,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti ) # compute the sha256 checksum of the tarball - local_checksum = checksum_tarball(tarfile_path) + local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path) expected = bchecksum["hash"] # if the checksums don't match don't install @@ -2457,7 +2608,7 @@ def get_remote_hash(self): return None return remote_hash.decode("utf-8") - def conditional_fetch(self): + def conditional_fetch(self) -> FetchIndexResult: # Do an intermediate fetch for the hash # and a conditional fetch for the contents @@ -2471,12 +2622,12 @@ def conditional_fetch(self): try: response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) except urllib.error.URLError as e: - raise FetchIndexError("Could not fetch index from {}".format(url_index), e) + raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e try: result = codecs.getreader("utf-8")(response).read() except ValueError as e: - return FetchCacheError("Remote index {} is invalid".format(url_index), e) + raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e computed_hash = compute_hash(result) @@ -2508,7 +2659,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen): self.etag = etag self.urlopen = urlopen - def conditional_fetch(self): + def conditional_fetch(self) -> FetchIndexResult: # Just do a conditional fetch immediately url = url_util.join(self.url, _build_cache_relative_path, "index.json") headers = { @@ -2539,3 +2690,59 @@ def conditional_fetch(self): data=result, fresh=False, ) + + +class OCIIndexFetcher: + def __init__(self, url: str, local_hash, urlopen=None) -> None: + self.local_hash = local_hash + + # Remove oci:// prefix + assert url.startswith("oci://") + self.ref = spack.oci.image.ImageReference.from_string(url[6:]) + self.urlopen = urlopen or spack.oci.opener.urlopen + + def conditional_fetch(self) -> FetchIndexResult: + """Download an index from an OCI registry type mirror.""" + url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url() + try: + response = self.urlopen( + urllib.request.Request( + url=url_manifest, + headers={"Accept": "application/vnd.oci.image.manifest.v1+json"}, + ) + ) + except urllib.error.URLError as e: + raise FetchIndexError( + "Could not fetch manifest from {}".format(url_manifest), e + ) from e + + try: + manifest = json.loads(response.read()) + except Exception as e: + raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e + + # Get first blob hash, which should be the index.json + try: + index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"]) + except Exception as e: + raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e + + # Fresh? + if index_digest.digest == self.local_hash: + return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) + + # Otherwise fetch the blob / index.json + response = self.urlopen( + urllib.request.Request( + url=self.ref.blob_url(index_digest), + headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"}, + ) + ) + + result = codecs.getreader("utf-8")(response).read() + + # Make sure the blob we download has the advertised hash + if compute_hash(result) != index_digest.digest: + raise FetchIndexError(f"Remote index {url_manifest} is invalid") + + return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 13e77927add9ad..94cce16030be32 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -3,16 +3,19 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import argparse +import copy import glob +import hashlib import json +import multiprocessing.pool import os import shutil import sys import tempfile -from typing import List +import urllib.request +from typing import Dict, List, Optional, Tuple import llnl.util.tty as tty -import llnl.util.tty.color as clr from llnl.string import plural from llnl.util.lang import elide_list @@ -22,17 +25,37 @@ import spack.config import spack.environment as ev import spack.error +import spack.hash_types as ht import spack.mirror +import spack.oci.oci +import spack.oci.opener import spack.relocate import spack.repo import spack.spec +import spack.stage import spack.store +import spack.user_environment import spack.util.crypto import spack.util.url as url_util import spack.util.web as web_util +from spack.build_environment import determine_number_of_jobs from spack.cmd import display_specs +from spack.oci.image import ( + Digest, + ImageReference, + default_config, + default_index_tag, + default_manifest, + default_tag, + tag_is_spec, +) +from spack.oci.oci import ( + copy_missing_layers_with_retry, + get_manifest_and_config_with_retry, + upload_blob_with_retry, + upload_manifest_with_retry, +) from spack.spec import Spec, save_dependency_specfiles -from spack.stage import Stage description = "create, download and install binary packages" section = "packaging" @@ -58,7 +81,9 @@ def setup_parser(subparser: argparse.ArgumentParser): push_sign.add_argument( "--key", "-k", metavar="key", type=str, default=None, help="key for signing" ) - push.add_argument("mirror", type=str, help="mirror name, path, or URL") + push.add_argument( + "mirror", type=arguments.mirror_name_or_url, help="mirror name, path, or URL" + ) push.add_argument( "--update-index", "--rebuild-index", @@ -84,7 +109,10 @@ def setup_parser(subparser: argparse.ArgumentParser): action="store_true", help="stop pushing on first failure (default is best effort)", ) - arguments.add_common_arguments(push, ["specs"]) + push.add_argument( + "--base-image", default=None, help="specify the base image for the buildcache. " + ) + arguments.add_common_arguments(push, ["specs", "jobs"]) push.set_defaults(func=push_fn) install = subparsers.add_parser("install", help=install_fn.__doc__) @@ -268,7 +296,22 @@ def _matching_specs(specs: List[Spec]) -> List[Spec]: return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs] -def push_fn(args: argparse.Namespace): +def _format_spec(spec: Spec) -> str: + return spec.cformat("{name}{@version}{/hash:7}") + + +def _progress(i: int, total: int): + if total > 1: + digits = len(str(total)) + return f"[{i+1:{digits}}/{total}] " + return "" + + +def _make_pool(): + return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True)) + + +def push_fn(args): """create a binary package and push it to a mirror""" if args.spec_file: tty.warn( @@ -281,63 +324,80 @@ def push_fn(args: argparse.Namespace): else: specs = spack.cmd.require_active_env("buildcache push").all_specs() - mirror = arguments.mirror_name_or_url(args.mirror) - if args.allow_root: tty.warn( "The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22" ) - url = mirror.push_url + # Check if this is an OCI image. + try: + image_ref = spack.oci.oci.image_from_mirror(args.mirror) + except ValueError: + image_ref = None + + # For OCI images, we require dependencies to be pushed for now. + if image_ref: + if "dependencies" not in args.things_to_install: + tty.die("Dependencies must be pushed for OCI images.") + if not args.unsigned: + tty.warn( + "Code signing is currently not supported for OCI images. " + "Use --unsigned to silence this warning." + ) + # This is a list of installed, non-external specs. specs = bindist.specs_to_be_packaged( specs, root="package" in args.things_to_install, dependencies="dependencies" in args.things_to_install, ) + url = args.mirror.push_url + # When pushing multiple specs, print the url once ahead of time, as well as how # many specs are being pushed. if len(specs) > 1: tty.info(f"Selected {len(specs)} specs to push to {url}") - skipped = [] failed = [] - # tty printing - color = clr.get_color_when() - format_spec = lambda s: s.format("{name}{@version}{/hash:7}", color=color) - total_specs = len(specs) - digits = len(str(total_specs)) - - for i, spec in enumerate(specs): - try: - bindist.push_or_raise( - spec, - url, - bindist.PushOptions( - force=args.force, - unsigned=args.unsigned, - key=args.key, - regenerate_index=args.update_index, - ), - ) - - if total_specs > 1: - msg = f"[{i+1:{digits}}/{total_specs}] Pushed {format_spec(spec)}" - else: - msg = f"Pushed {format_spec(spec)} to {url}" + # TODO: unify this logic in the future. + if image_ref: + with tempfile.TemporaryDirectory( + dir=spack.stage.get_stage_root() + ) as tmpdir, _make_pool() as pool: + skipped = _push_oci(args, image_ref, specs, tmpdir, pool) + else: + skipped = [] + + for i, spec in enumerate(specs): + try: + bindist.push_or_raise( + spec, + url, + bindist.PushOptions( + force=args.force, + unsigned=args.unsigned, + key=args.key, + regenerate_index=args.update_index, + ), + ) - tty.info(msg) + msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}" + if len(specs) == 1: + msg += f" to {url}" + tty.info(msg) - except bindist.NoOverwriteException: - skipped.append(format_spec(spec)) + except bindist.NoOverwriteException: + skipped.append(_format_spec(spec)) - # Catch any other exception unless the fail fast option is set - except Exception as e: - if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)): - raise - failed.append((format_spec(spec), e)) + # Catch any other exception unless the fail fast option is set + except Exception as e: + if args.fail_fast or isinstance( + e, (bindist.PickKeyException, bindist.NoKeyException) + ): + raise + failed.append((_format_spec(spec), e)) if skipped: if len(specs) == 1: @@ -364,6 +424,341 @@ def push_fn(args: argparse.Namespace): ), ) + # Update the index if requested + # TODO: remove update index logic out of bindist; should be once after all specs are pushed + # not once per spec. + if image_ref and len(skipped) < len(specs) and args.update_index: + with tempfile.TemporaryDirectory( + dir=spack.stage.get_stage_root() + ) as tmpdir, _make_pool() as pool: + _update_index_oci(image_ref, tmpdir, pool) + + +def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]: + """Get the spack tarball layer digests and size if it exists""" + try: + manifest, config = get_manifest_and_config_with_retry(image_ref) + + return spack.oci.oci.Blob( + compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]), + uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]), + size=manifest["layers"][-1]["size"], + ) + except Exception: + return None + + +def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str): + filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz") + + # Create an oci.image.layer aka tarball of the package + compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename) + + blob = spack.oci.oci.Blob( + Digest.from_sha256(compressed_tarfile_checksum), + Digest.from_sha256(tarfile_checksum), + os.path.getsize(filename), + ) + + # Upload the blob + upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest) + + # delete the file + os.unlink(filename) + + return blob + + +def _retrieve_env_dict_from_config(config: dict) -> dict: + """Retrieve the environment variables from the image config file. + Sets a default value for PATH if it is not present. + + Args: + config (dict): The image config file. + + Returns: + dict: The environment variables. + """ + env = {"PATH": "/bin:/usr/bin"} + + if "Env" in config.get("config", {}): + for entry in config["config"]["Env"]: + key, value = entry.split("=", 1) + env[key] = value + return env + + +def _archspec_to_gooarch(spec: spack.spec.Spec) -> str: + name = spec.target.family.name + name_map = {"aarch64": "arm64", "x86_64": "amd64"} + return name_map.get(name, name) + + +def _put_manifest( + base_images: Dict[str, Tuple[dict, dict]], + checksums: Dict[str, spack.oci.oci.Blob], + spec: spack.spec.Spec, + image_ref: ImageReference, + tmpdir: str, +): + architecture = _archspec_to_gooarch(spec) + + dependencies = list( + reversed( + list( + s + for s in spec.traverse(order="topo", deptype=("link", "run"), root=True) + if not s.external + ) + ) + ) + + base_manifest, base_config = base_images[architecture] + env = _retrieve_env_dict_from_config(base_config) + + spack.user_environment.environment_modifications_for_specs(spec).apply_modifications(env) + + # Create an oci.image.config file + config = copy.deepcopy(base_config) + + # Add the diff ids of the dependencies + for s in dependencies: + config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest)) + + # Set the environment variables + config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()] + + # From the OCI v1.0 spec: + # > Any extra fields in the Image JSON struct are considered implementation + # > specific and MUST be ignored by any implementations which are unable to + # > interpret them. + # We use this to store the Spack spec, so we can use it to create an index. + spec_dict = spec.to_dict(hash=ht.dag_hash) + spec_dict["buildcache_layout_version"] = 1 + spec_dict["binary_cache_checksum"] = { + "hash_algorithm": "sha256", + "hash": checksums[spec.dag_hash()].compressed_digest.digest, + } + config.update(spec_dict) + + config_file = os.path.join(tmpdir, f"{spec.dag_hash()}.config.json") + + with open(config_file, "w") as f: + json.dump(config, f, separators=(",", ":")) + + config_file_checksum = Digest.from_sha256( + spack.util.crypto.checksum(hashlib.sha256, config_file) + ) + + # Upload the config file + upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum) + + oci_manifest = { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "schemaVersion": 2, + "config": { + "mediaType": base_manifest["config"]["mediaType"], + "digest": str(config_file_checksum), + "size": os.path.getsize(config_file), + }, + "layers": [ + *(layer for layer in base_manifest["layers"]), + *( + { + "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip", + "digest": str(checksums[s.dag_hash()].compressed_digest), + "size": checksums[s.dag_hash()].size, + } + for s in dependencies + ), + ], + "annotations": {"org.opencontainers.image.description": spec.format()}, + } + + image_ref_for_spec = image_ref.with_tag(default_tag(spec)) + + # Finally upload the manifest + upload_manifest_with_retry(image_ref_for_spec, oci_manifest=oci_manifest) + + # delete the config file + os.unlink(config_file) + + return image_ref_for_spec + + +def _push_oci( + args, + image_ref: ImageReference, + installed_specs_with_deps: List[Spec], + tmpdir: str, + pool: multiprocessing.pool.Pool, +) -> List[str]: + """Push specs to an OCI registry + + Args: + args: The command line arguments. + image_ref: The image reference. + installed_specs_with_deps: The installed specs to push, excluding externals, + including deps, ordered from roots to leaves. + + Returns: + List[str]: The list of skipped specs (already in the buildcache). + """ + + # Reverse the order + installed_specs_with_deps = list(reversed(installed_specs_with_deps)) + + # The base image to use for the package. When not set, we use + # the OCI registry only for storage, and do not use any base image. + base_image_ref: Optional[ImageReference] = ( + ImageReference.from_string(args.base_image) if args.base_image else None + ) + + # Spec dag hash -> blob + checksums: Dict[str, spack.oci.oci.Blob] = {} + + # arch -> (manifest, config) + base_images: Dict[str, Tuple[dict, dict]] = {} + + # Specs not uploaded because they already exist + skipped = [] + + if not args.force: + tty.info("Checking for existing specs in the buildcache") + to_be_uploaded = [] + + tags_to_check = (image_ref.with_tag(default_tag(s)) for s in installed_specs_with_deps) + available_blobs = pool.map(_get_spack_binary_blob, tags_to_check) + + for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs): + if maybe_blob is not None: + checksums[spec.dag_hash()] = maybe_blob + skipped.append(_format_spec(spec)) + else: + to_be_uploaded.append(spec) + else: + to_be_uploaded = installed_specs_with_deps + + if not to_be_uploaded: + return skipped + + tty.info( + f"{len(to_be_uploaded)} specs need to be pushed to {image_ref.domain}/{image_ref.name}" + ) + + # Upload blobs + new_blobs = pool.starmap( + _push_single_spack_binary_blob, ((image_ref, spec, tmpdir) for spec in to_be_uploaded) + ) + + # And update the spec to blob mapping + for spec, blob in zip(to_be_uploaded, new_blobs): + checksums[spec.dag_hash()] = blob + + # Copy base image layers, probably fine to do sequentially. + for spec in to_be_uploaded: + architecture = _archspec_to_gooarch(spec) + # Get base image details, if we don't have them yet + if architecture in base_images: + continue + if base_image_ref is None: + base_images[architecture] = (default_manifest(), default_config(architecture, "linux")) + else: + base_images[architecture] = copy_missing_layers_with_retry( + base_image_ref, image_ref, architecture + ) + + # Upload manifests + tty.info("Uploading manifests") + pushed_image_ref = pool.starmap( + _put_manifest, + ((base_images, checksums, spec, image_ref, tmpdir) for spec in to_be_uploaded), + ) + + # Print the image names of the top-level specs + for spec, ref in zip(to_be_uploaded, pushed_image_ref): + tty.info(f"Pushed {_format_spec(spec)} to {ref}") + + return skipped + + +def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]: + # Don't allow recursion here, since Spack itself always uploads + # vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json + _, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0) + + # Do very basic validation: if "spec" is a key in the config, it + # must be a Spec object too. + return config if "spec" in config else None + + +def _update_index_oci( + image_ref: ImageReference, tmpdir: str, pool: multiprocessing.pool.Pool +) -> None: + response = spack.oci.opener.urlopen(urllib.request.Request(url=image_ref.tags_url())) + spack.oci.opener.ensure_status(response, 200) + tags = json.load(response)["tags"] + + # Fetch all image config files in parallel + spec_dicts = pool.starmap( + _config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag)) + ) + + # Populate the database + db_root_dir = os.path.join(tmpdir, "db_root") + db = bindist.BuildCacheDatabase(db_root_dir) + + for spec_dict in spec_dicts: + spec = Spec.from_dict(spec_dict) + db.add(spec, directory_layout=None) + db.mark(spec, "in_buildcache", True) + + # Create the index.json file + index_json_path = os.path.join(tmpdir, "index.json") + with open(index_json_path, "w") as f: + db._write_to_file(f) + + # Create an empty config.json file + empty_config_json_path = os.path.join(tmpdir, "config.json") + with open(empty_config_json_path, "wb") as f: + f.write(b"{}") + + # Upload the index.json file + index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path)) + upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum) + + # Upload the config.json file + empty_config_digest = Digest.from_sha256( + spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path) + ) + upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest) + + # Push a manifest file that references the index.json file as a layer + # Notice that we push this as if it is an image, which it of course is not. + # When the ORAS spec becomes official, we can use that instead of a fake image. + # For now we just use the OCI image spec, so that we don't run into issues with + # automatic garbage collection of blobs that are not referenced by any image manifest. + oci_manifest = { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "schemaVersion": 2, + # Config is just an empty {} file for now, and irrelevant + "config": { + "mediaType": "application/vnd.oci.image.config.v1+json", + "digest": str(empty_config_digest), + "size": os.path.getsize(empty_config_json_path), + }, + # The buildcache index is the only layer, and is not a tarball, we lie here. + "layers": [ + { + "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip", + "digest": str(index_shasum), + "size": os.path.getsize(index_json_path), + } + ], + } + + upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest) + def install_fn(args): """install from a binary package""" @@ -522,7 +917,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None): local_path = os.path.join(tmpdir, os.path.basename(src_url)) try: - temp_stage = Stage(src_url, path=os.path.dirname(local_path)) + temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path)) try: temp_stage.create() temp_stage.fetch() @@ -616,6 +1011,20 @@ def manifest_copy(manifest_file_list): def update_index(mirror: spack.mirror.Mirror, update_keys=False): + # Special case OCI images for now. + try: + image_ref = spack.oci.oci.image_from_mirror(mirror) + except ValueError: + image_ref = None + + if image_ref: + with tempfile.TemporaryDirectory( + dir=spack.stage.get_stage_root() + ) as tmpdir, _make_pool() as pool: + _update_index_oci(image_ref, tmpdir, pool) + return + + # Otherwise, assume a normal mirror. url = mirror.push_url bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path())) diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py index 2b343923c5f9e9..9aa3edac479f50 100644 --- a/lib/spack/spack/cmd/common/arguments.py +++ b/lib/spack/spack/cmd/common/arguments.py @@ -543,7 +543,7 @@ def add_concretizer_args(subparser): ) -def add_s3_connection_args(subparser, add_help): +def add_connection_args(subparser, add_help): subparser.add_argument( "--s3-access-key-id", help="ID string to use to connect to this S3 mirror" ) @@ -559,6 +559,8 @@ def add_s3_connection_args(subparser, add_help): subparser.add_argument( "--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror" ) + subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror") + subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror") def use_buildcache(cli_arg_value): diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 6edae785a01769..1036dcbe917e35 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -111,7 +111,7 @@ def setup_parser(subparser): "and source use `--type binary --type source` (default)" ), ) - arguments.add_s3_connection_args(add_parser, False) + arguments.add_connection_args(add_parser, False) # Remove remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__) remove_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror") @@ -141,7 +141,7 @@ def setup_parser(subparser): default=spack.config.default_modify_scope(), help="configuration scope to modify", ) - arguments.add_s3_connection_args(set_url_parser, False) + arguments.add_connection_args(set_url_parser, False) # Set set_parser = sp.add_parser("set", help=mirror_set.__doc__) @@ -170,7 +170,7 @@ def setup_parser(subparser): default=spack.config.default_modify_scope(), help="configuration scope to modify", ) - arguments.add_s3_connection_args(set_parser, False) + arguments.add_connection_args(set_parser, False) # List list_parser = sp.add_parser("list", help=mirror_list.__doc__) @@ -192,6 +192,8 @@ def mirror_add(args): or args.s3_profile or args.s3_endpoint_url or args.type + or args.oci_username + or args.oci_password ): connection = {"url": args.url} if args.s3_access_key_id and args.s3_access_key_secret: @@ -202,6 +204,8 @@ def mirror_add(args): connection["profile"] = args.s3_profile if args.s3_endpoint_url: connection["endpoint_url"] = args.s3_endpoint_url + if args.oci_username and args.oci_password: + connection["access_pair"] = [args.oci_username, args.oci_password] if args.type: connection["binary"] = "binary" in args.type connection["source"] = "source" in args.type @@ -235,6 +239,8 @@ def _configure_mirror(args): changes["profile"] = args.s3_profile if args.s3_endpoint_url: changes["endpoint_url"] = args.s3_endpoint_url + if args.oci_username and args.oci_password: + changes["access_pair"] = [args.oci_username, args.oci_password] # argparse cannot distinguish between --binary and --no-binary when same dest :( # notice that set-url does not have these args, so getattr diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index aa96bbbe5106d9..a7b3d25043e5b9 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -28,6 +28,7 @@ import os.path import re import shutil +import urllib.error import urllib.parse from typing import List, Optional @@ -41,6 +42,7 @@ import spack.config import spack.error +import spack.oci.opener import spack.url import spack.util.crypto as crypto import spack.util.git @@ -537,6 +539,34 @@ def fetch(self): tty.msg("Using cached archive: {0}".format(path)) +class OCIRegistryFetchStrategy(URLFetchStrategy): + def __init__(self, url=None, checksum=None, **kwargs): + super().__init__(url, checksum, **kwargs) + + self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen) + + @_needs_stage + def fetch(self): + file = self.stage.save_filename + tty.msg(f"Fetching {self.url}") + + try: + response = self._urlopen(self.url) + except urllib.error.URLError as e: + # clean up archive on failure. + if self.archive_file: + os.remove(self.archive_file) + if os.path.lexists(file): + os.remove(file) + raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e + + if os.path.lexists(file): + os.remove(file) + + with open(file, "wb") as f: + shutil.copyfileobj(response, f) + + class VCSFetchStrategy(FetchStrategy): """Superclass for version control system fetch strategies. diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 32037502c580a1..d5425772cdd3be 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -18,7 +18,7 @@ import sys import traceback import urllib.parse -from typing import Optional, Union +from typing import List, Optional, Union import llnl.url import llnl.util.tty as tty @@ -27,18 +27,18 @@ import spack.caches import spack.config import spack.error -import spack.fetch_strategy as fs +import spack.fetch_strategy import spack.mirror +import spack.oci.image import spack.spec import spack.util.path import spack.util.spack_json as sjson import spack.util.spack_yaml as syaml import spack.util.url as url_util -from spack.util.spack_yaml import syaml_dict -from spack.version import VersionList +import spack.version #: What schemes do we support -supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs") +supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs", "oci") def _url_or_path_to_url(url_or_path: str) -> str: @@ -230,12 +230,12 @@ def _get_value(self, attribute: str, direction: str): value = self._data.get(direction, {}) # Return top-level entry if only a URL was set. - if isinstance(value, str): - return self._data.get(attribute, None) + if isinstance(value, str) or attribute not in value: + return self._data.get(attribute) - return self._data.get(direction, {}).get(attribute, None) + return value[attribute] - def get_url(self, direction: str): + def get_url(self, direction: str) -> str: if direction not in ("fetch", "push"): raise ValueError(f"direction must be either 'fetch' or 'push', not {direction}") @@ -255,18 +255,21 @@ def get_url(self, direction: str): elif "url" in info: url = info["url"] - return _url_or_path_to_url(url) if url else None + if not url: + raise ValueError(f"Mirror {self.name} has no URL configured") - def get_access_token(self, direction: str): + return _url_or_path_to_url(url) + + def get_access_token(self, direction: str) -> Optional[str]: return self._get_value("access_token", direction) - def get_access_pair(self, direction: str): + def get_access_pair(self, direction: str) -> Optional[List]: return self._get_value("access_pair", direction) - def get_profile(self, direction: str): + def get_profile(self, direction: str) -> Optional[str]: return self._get_value("profile", direction) - def get_endpoint_url(self, direction: str): + def get_endpoint_url(self, direction: str) -> Optional[str]: return self._get_value("endpoint_url", direction) @@ -330,7 +333,7 @@ def from_json(stream, name=None): raise sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)) from e def to_dict(self, recursive=False): - return syaml_dict( + return syaml.syaml_dict( sorted( ((k, (v.to_dict() if recursive else v)) for (k, v) in self._mirrors.items()), key=operator.itemgetter(0), @@ -372,7 +375,7 @@ def __len__(self): def _determine_extension(fetcher): - if isinstance(fetcher, fs.URLFetchStrategy): + if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy): if fetcher.expand_archive: # If we fetch with a URLFetchStrategy, use URL's archive type ext = llnl.url.determine_url_file_extension(fetcher.url) @@ -437,6 +440,19 @@ def __iter__(self): yield self.cosmetic_path +class OCIImageLayout: + """Follow the OCI Image Layout Specification to archive blobs + + Paths are of the form `blobs//` + """ + + def __init__(self, digest: spack.oci.image.Digest) -> None: + self.storage_path = os.path.join("blobs", digest.algorithm, digest.digest) + + def __iter__(self): + yield self.storage_path + + def mirror_archive_paths(fetcher, per_package_ref, spec=None): """Returns a ``MirrorReference`` object which keeps track of the relative storage path of the resource associated with the specified ``fetcher``.""" @@ -482,7 +498,7 @@ def get_all_versions(specs): for version in pkg_cls.versions: version_spec = spack.spec.Spec(pkg_cls.name) - version_spec.versions = VersionList([version]) + version_spec.versions = spack.version.VersionList([version]) version_specs.append(version_spec) return version_specs @@ -521,7 +537,7 @@ def get_matching_versions(specs, num_versions=1): # Generate only versions that satisfy the spec. if spec.concrete or v.intersects(spec.versions): s = spack.spec.Spec(pkg.name) - s.versions = VersionList([v]) + s.versions = spack.version.VersionList([v]) s.variants = spec.variants.copy() # This is needed to avoid hanging references during the # concretization phase @@ -591,14 +607,14 @@ def add(mirror: Mirror, scope=None): """Add a named mirror in the given scope""" mirrors = spack.config.get("mirrors", scope=scope) if not mirrors: - mirrors = syaml_dict() + mirrors = syaml.syaml_dict() if mirror.name in mirrors: tty.die("Mirror with name {} already exists.".format(mirror.name)) items = [(n, u) for n, u in mirrors.items()] items.insert(0, (mirror.name, mirror.to_dict())) - mirrors = syaml_dict(items) + mirrors = syaml.syaml_dict(items) spack.config.set("mirrors", mirrors, scope=scope) @@ -606,7 +622,7 @@ def remove(name, scope): """Remove the named mirror in the given scope""" mirrors = spack.config.get("mirrors", scope=scope) if not mirrors: - mirrors = syaml_dict() + mirrors = syaml.syaml_dict() if name not in mirrors: tty.die("No mirror with name %s" % name) diff --git a/lib/spack/spack/oci/__init__.py b/lib/spack/spack/oci/__init__.py new file mode 100644 index 00000000000000..af304aecb70f37 --- /dev/null +++ b/lib/spack/spack/oci/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/spack/oci/image.py b/lib/spack/spack/oci/image.py new file mode 100644 index 00000000000000..1954bf013d6142 --- /dev/null +++ b/lib/spack/spack/oci/image.py @@ -0,0 +1,228 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import re +import urllib.parse +from typing import Optional, Union + +import spack.spec + +# all the building blocks +alphanumeric = r"[a-z0-9]+" +separator = r"(?:[._]|__|[-]+)" +localhost = r"localhost" +domainNameComponent = r"(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])" +optionalPort = r"(?::[0-9]+)?" +tag = r"[\w][\w.-]{0,127}" +digestPat = r"[A-Za-z][A-Za-z0-9]*(?:[-_+.][A-Za-z][A-Za-z0-9]*)*[:][0-9a-fA-F]{32,}" +ipv6address = r"\[(?:[a-fA-F0-9:]+)\]" + +# domain name +domainName = rf"{domainNameComponent}(?:\.{domainNameComponent})*" +host = rf"(?:{domainName}|{ipv6address})" +domainAndPort = rf"{host}{optionalPort}" + +# image name +pathComponent = rf"{alphanumeric}(?:{separator}{alphanumeric})*" +remoteName = rf"{pathComponent}(?:\/{pathComponent})*" +namePat = rf"(?:{domainAndPort}\/)?{remoteName}" + +# Regex for a full image reference, with 3 groups: name, tag, digest +referencePat = re.compile(rf"^({namePat})(?::({tag}))?(?:@({digestPat}))?$") + +# Regex for splitting the name into domain and path components +anchoredNameRegexp = re.compile(rf"^(?:({domainAndPort})\/)?({remoteName})$") + + +def ensure_sha256_checksum(oci_blob: str): + """Validate that the reference is of the format sha256: + Return the checksum if valid, raise ValueError otherwise.""" + if ":" not in oci_blob: + raise ValueError(f"Invalid OCI blob format: {oci_blob}") + alg, checksum = oci_blob.split(":", 1) + if alg != "sha256": + raise ValueError(f"Unsupported OCI blob checksum algorithm: {alg}") + if len(checksum) != 64: + raise ValueError(f"Invalid OCI blob checksum length: {len(checksum)}") + return checksum + + +class Digest: + """Represents a digest in the format :. + Currently only supports sha256 digests.""" + + __slots__ = ["algorithm", "digest"] + + def __init__(self, *, algorithm: str, digest: str) -> None: + self.algorithm = algorithm + self.digest = digest + + def __eq__(self, __value: object) -> bool: + if not isinstance(__value, Digest): + return NotImplemented + return self.algorithm == __value.algorithm and self.digest == __value.digest + + @classmethod + def from_string(cls, string: str) -> "Digest": + return cls(algorithm="sha256", digest=ensure_sha256_checksum(string)) + + @classmethod + def from_sha256(cls, digest: str) -> "Digest": + return cls(algorithm="sha256", digest=digest) + + def __str__(self) -> str: + return f"{self.algorithm}:{self.digest}" + + +class ImageReference: + """A parsed image of the form domain/name:tag[@digest]. + The digest is optional, and domain and tag are automatically + filled out with defaults when parsed from string.""" + + __slots__ = ["domain", "name", "tag", "digest"] + + def __init__( + self, *, domain: str, name: str, tag: str = "latest", digest: Optional[Digest] = None + ): + self.domain = domain + self.name = name + self.tag = tag + self.digest = digest + + @classmethod + def from_string(cls, string) -> "ImageReference": + match = referencePat.match(string) + if not match: + raise ValueError(f"Invalid image reference: {string}") + + image, tag, digest = match.groups() + + assert isinstance(image, str) + assert isinstance(tag, (str, type(None))) + assert isinstance(digest, (str, type(None))) + + match = anchoredNameRegexp.match(image) + + # This can never happen, since the regex is implied + # by the regex above. It's just here to make mypy happy. + assert match, f"Invalid image reference: {string}" + + domain, name = match.groups() + + assert isinstance(domain, (str, type(None))) + assert isinstance(name, str) + + # Fill out defaults like docker would do... + # Based on github.com/distribution/distribution: allow short names like "ubuntu" + # and "user/repo" to be interpreted as "library/ubuntu" and "user/repo:latest + # Not sure if Spack should follow Docker, but it's what people expect... + if not domain: + domain = "index.docker.io" + name = f"library/{name}" + elif ( + "." not in domain + and ":" not in domain + and domain != "localhost" + and domain == domain.lower() + ): + name = f"{domain}/{name}" + domain = "index.docker.io" + + if not tag: + tag = "latest" + + # sha256 is currently the only algorithm that + # we implement, even though the spec allows for more + if isinstance(digest, str): + digest = Digest.from_string(digest) + + return cls(domain=domain, name=name, tag=tag, digest=digest) + + def manifest_url(self) -> str: + digest_or_tag = self.digest or self.tag + return f"https://{self.domain}/v2/{self.name}/manifests/{digest_or_tag}" + + def blob_url(self, digest: Union[str, Digest]) -> str: + if isinstance(digest, str): + digest = Digest.from_string(digest) + return f"https://{self.domain}/v2/{self.name}/blobs/{digest}" + + def with_digest(self, digest: Union[str, Digest]) -> "ImageReference": + if isinstance(digest, str): + digest = Digest.from_string(digest) + return ImageReference(domain=self.domain, name=self.name, tag=self.tag, digest=digest) + + def with_tag(self, tag: str) -> "ImageReference": + return ImageReference(domain=self.domain, name=self.name, tag=tag, digest=self.digest) + + def uploads_url(self, digest: Optional[Digest] = None) -> str: + url = f"https://{self.domain}/v2/{self.name}/blobs/uploads/" + if digest: + url += f"?digest={digest}" + return url + + def tags_url(self) -> str: + return f"https://{self.domain}/v2/{self.name}/tags/list" + + def endpoint(self, path: str = "") -> str: + return urllib.parse.urljoin(f"https://{self.domain}/v2/", path) + + def __str__(self) -> str: + s = f"{self.domain}/{self.name}" + if self.tag: + s += f":{self.tag}" + if self.digest: + s += f"@{self.digest}" + return s + + def __eq__(self, __value: object) -> bool: + if not isinstance(__value, ImageReference): + return NotImplemented + return ( + self.domain == __value.domain + and self.name == __value.name + and self.tag == __value.tag + and self.digest == __value.digest + ) + + +def _ensure_valid_tag(tag: str) -> str: + """Ensure a tag is valid for an OCI registry.""" + sanitized = re.sub(r"[^\w.-]", "_", tag) + if len(sanitized) > 128: + return sanitized[:64] + sanitized[-64:] + return sanitized + + +def default_tag(spec: "spack.spec.Spec") -> str: + """Return a valid, default image tag for a spec.""" + return _ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack") + + +#: Default OCI index tag +default_index_tag = "index.spack" + + +def tag_is_spec(tag: str) -> bool: + """Check if a tag is likely a Spec""" + return tag.endswith(".spack") and tag != default_index_tag + + +def default_config(architecture: str, os: str): + return { + "architecture": architecture, + "os": os, + "rootfs": {"type": "layers", "diff_ids": []}, + "config": {"Env": []}, + } + + +def default_manifest(): + return { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "schemaVersion": 2, + "config": {"mediaType": "application/vnd.oci.image.config.v1+json"}, + "layers": [], + } diff --git a/lib/spack/spack/oci/oci.py b/lib/spack/spack/oci/oci.py new file mode 100644 index 00000000000000..4e5e196cd10db9 --- /dev/null +++ b/lib/spack/spack/oci/oci.py @@ -0,0 +1,381 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import hashlib +import json +import os +import time +import urllib.error +import urllib.parse +import urllib.request +from http.client import HTTPResponse +from typing import NamedTuple, Tuple +from urllib.request import Request + +import llnl.util.tty as tty + +import spack.binary_distribution +import spack.config +import spack.error +import spack.fetch_strategy +import spack.mirror +import spack.oci.opener +import spack.repo +import spack.spec +import spack.stage +import spack.traverse +import spack.util.crypto + +from .image import Digest, ImageReference + + +class Blob(NamedTuple): + compressed_digest: Digest + uncompressed_digest: Digest + size: int + + +def create_tarball(spec: spack.spec.Spec, tarfile_path): + buildinfo = spack.binary_distribution.get_buildinfo_dict(spec) + return spack.binary_distribution._do_create_tarball(tarfile_path, spec.prefix, buildinfo) + + +def _log_upload_progress(digest: Digest, size: int, elapsed: float): + elapsed = max(elapsed, 0.001) # guard against division by zero + tty.info(f"Uploaded {digest} ({elapsed:.2f}s, {size / elapsed / 1024 / 1024:.2f} MB/s)") + + +def with_query_param(url: str, param: str, value: str) -> str: + """Add a query parameter to a URL + + Args: + url: The URL to add the parameter to. + param: The parameter name. + value: The parameter value. + + Returns: + The URL with the parameter added. + """ + parsed = urllib.parse.urlparse(url) + query = urllib.parse.parse_qs(parsed.query) + if param in query: + query[param].append(value) + else: + query[param] = [value] + return urllib.parse.urlunparse( + parsed._replace(query=urllib.parse.urlencode(query, doseq=True)) + ) + + +def upload_blob( + ref: ImageReference, + file: str, + digest: Digest, + force: bool = False, + small_file_size: int = 0, + _urlopen: spack.oci.opener.MaybeOpen = None, +) -> bool: + """Uploads a blob to an OCI registry + + We only do monolithic uploads, even though it's very simple to do chunked. + Observed problems with chunked uploads: + (1) it's slow, many sequential requests, (2) some registries set an *unknown* + max chunk size, and the spec doesn't say how to obtain it + + Args: + ref: The image reference. + file: The file to upload. + digest: The digest of the file. + force: Whether to force upload the blob, even if it already exists. + small_file_size: For files at most this size, attempt + to do a single POST request instead of POST + PUT. + Some registries do no support single requests, and others + do not specify what size they support in single POST. + For now this feature is disabled by default (0KB) + + Returns: + True if the blob was uploaded, False if it already existed. + """ + _urlopen = _urlopen or spack.oci.opener.urlopen + + # Test if the blob already exists, if so, early exit. + if not force and blob_exists(ref, digest, _urlopen): + return False + + start = time.time() + + with open(file, "rb") as f: + file_size = os.fstat(f.fileno()).st_size + + # For small blobs, do a single POST request. + # The spec says that registries MAY support this + if file_size <= small_file_size: + request = Request( + url=ref.uploads_url(digest), + method="POST", + data=f, + headers={ + "Content-Type": "application/octet-stream", + "Content-Length": str(file_size), + }, + ) + else: + request = Request( + url=ref.uploads_url(), method="POST", headers={"Content-Length": "0"} + ) + + response = _urlopen(request) + + # Created the blob in one go. + if response.status == 201: + _log_upload_progress(digest, file_size, time.time() - start) + return True + + # Otherwise, do another PUT request. + spack.oci.opener.ensure_status(response, 202) + assert "Location" in response.headers + + # Can be absolute or relative, joining handles both + upload_url = with_query_param( + ref.endpoint(response.headers["Location"]), "digest", str(digest) + ) + f.seek(0) + + response = _urlopen( + Request( + url=upload_url, + method="PUT", + data=f, + headers={ + "Content-Type": "application/octet-stream", + "Content-Length": str(file_size), + }, + ) + ) + + spack.oci.opener.ensure_status(response, 201) + + # print elapsed time and # MB/s + _log_upload_progress(digest, file_size, time.time() - start) + return True + + +def upload_manifest( + ref: ImageReference, + oci_manifest: dict, + tag: bool = True, + _urlopen: spack.oci.opener.MaybeOpen = None, +): + """Uploads a manifest/index to a registry + + Args: + ref: The image reference. + oci_manifest: The OCI manifest or index. + tag: When true, use the tag, otherwise use the digest, + this is relevant for multi-arch images, where the + tag is an index, referencing the manifests by digest. + + Returns: + The digest and size of the uploaded manifest. + """ + _urlopen = _urlopen or spack.oci.opener.urlopen + + data = json.dumps(oci_manifest, separators=(",", ":")).encode() + digest = Digest.from_sha256(hashlib.sha256(data).hexdigest()) + size = len(data) + + if not tag: + ref = ref.with_digest(digest) + + response = _urlopen( + Request( + url=ref.manifest_url(), + method="PUT", + data=data, + headers={"Content-Type": oci_manifest["mediaType"]}, + ) + ) + + spack.oci.opener.ensure_status(response, 201) + return digest, size + + +def image_from_mirror(mirror: spack.mirror.Mirror) -> ImageReference: + """Given an OCI based mirror, extract the URL and image name from it""" + url = mirror.push_url + if not url.startswith("oci://"): + raise ValueError(f"Mirror {mirror} is not an OCI mirror") + return ImageReference.from_string(url[6:]) + + +def blob_exists( + ref: ImageReference, digest: Digest, _urlopen: spack.oci.opener.MaybeOpen = None +) -> bool: + """Checks if a blob exists in an OCI registry""" + try: + _urlopen = _urlopen or spack.oci.opener.urlopen + response = _urlopen(Request(url=ref.blob_url(digest), method="HEAD")) + return response.status == 200 + except urllib.error.HTTPError as e: + if e.getcode() == 404: + return False + raise + + +def copy_missing_layers( + src: ImageReference, + dst: ImageReference, + architecture: str, + _urlopen: spack.oci.opener.MaybeOpen = None, +) -> Tuple[dict, dict]: + """Copy image layers from src to dst for given architecture. + + Args: + src: The source image reference. + dst: The destination image reference. + architecture: The architecture (when referencing an index) + + Returns: + Tuple of manifest and config of the base image. + """ + _urlopen = _urlopen or spack.oci.opener.urlopen + manifest, config = get_manifest_and_config(src, architecture, _urlopen=_urlopen) + + # Get layer digests + digests = [Digest.from_string(layer["digest"]) for layer in manifest["layers"]] + + # Filter digests that are don't exist in the registry + missing_digests = [ + digest for digest in digests if not blob_exists(dst, digest, _urlopen=_urlopen) + ] + + if not missing_digests: + return manifest, config + + # Pull missing blobs, push them to the registry + with spack.stage.StageComposite.from_iterable( + make_stage(url=src.blob_url(digest), digest=digest, _urlopen=_urlopen) + for digest in missing_digests + ) as stages: + stages.fetch() + stages.check() + stages.cache_local() + + for stage, digest in zip(stages, missing_digests): + # No need to check existince again, force=True. + upload_blob( + dst, file=stage.save_filename, force=True, digest=digest, _urlopen=_urlopen + ) + + return manifest, config + + +#: OCI manifest content types (including docker type) +manifest_content_type = [ + "application/vnd.oci.image.manifest.v1+json", + "application/vnd.docker.distribution.manifest.v2+json", +] + +#: OCI index content types (including docker type) +index_content_type = [ + "application/vnd.oci.image.index.v1+json", + "application/vnd.docker.distribution.manifest.list.v2+json", +] + +#: All OCI manifest / index content types +all_content_type = manifest_content_type + index_content_type + + +def get_manifest_and_config( + ref: ImageReference, + architecture="amd64", + recurse=3, + _urlopen: spack.oci.opener.MaybeOpen = None, +) -> Tuple[dict, dict]: + """Recursively fetch manifest and config for a given image reference + with a given architecture. + + Args: + ref: The image reference. + architecture: The architecture (when referencing an index) + recurse: How many levels of index to recurse into. + + Returns: + A tuple of (manifest, config)""" + + _urlopen = _urlopen or spack.oci.opener.urlopen + + # Get manifest + response: HTTPResponse = _urlopen( + Request(url=ref.manifest_url(), headers={"Accept": ", ".join(all_content_type)}) + ) + + # Recurse when we find an index + if response.headers["Content-Type"] in index_content_type: + if recurse == 0: + raise Exception("Maximum recursion depth reached while fetching OCI manifest") + + index = json.load(response) + manifest_meta = next( + manifest + for manifest in index["manifests"] + if manifest["platform"]["architecture"] == architecture + ) + + return get_manifest_and_config( + ref.with_digest(manifest_meta["digest"]), + architecture=architecture, + recurse=recurse - 1, + _urlopen=_urlopen, + ) + + # Otherwise, require a manifest + if response.headers["Content-Type"] not in manifest_content_type: + raise Exception(f"Unknown content type {response.headers['Content-Type']}") + + manifest = json.load(response) + + # Download, verify and cache config file + config_digest = Digest.from_string(manifest["config"]["digest"]) + with make_stage(ref.blob_url(config_digest), config_digest, _urlopen=_urlopen) as stage: + stage.fetch() + stage.check() + stage.cache_local() + with open(stage.save_filename, "rb") as f: + config = json.load(f) + + return manifest, config + + +#: Same as upload_manifest, but with retry wrapper +upload_manifest_with_retry = spack.oci.opener.default_retry(upload_manifest) + +#: Same as upload_blob, but with retry wrapper +upload_blob_with_retry = spack.oci.opener.default_retry(upload_blob) + +#: Same as get_manifest_and_config, but with retry wrapper +get_manifest_and_config_with_retry = spack.oci.opener.default_retry(get_manifest_and_config) + +#: Same as copy_missing_layers, but with retry wrapper +copy_missing_layers_with_retry = spack.oci.opener.default_retry(copy_missing_layers) + + +def make_stage( + url: str, digest: Digest, keep: bool = False, _urlopen: spack.oci.opener.MaybeOpen = None +) -> spack.stage.Stage: + _urlopen = _urlopen or spack.oci.opener.urlopen + fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy( + url, checksum=digest.digest, _urlopen=_urlopen + ) + # Use blobs// as the cache path, which follows + # the OCI Image Layout Specification. What's missing though, + # is the `oci-layout` and `index.json` files, which are + # required by the spec. + return spack.stage.Stage( + fetch_strategy, + mirror_paths=spack.mirror.OCIImageLayout(digest), + name=digest.digest, + keep=keep, + ) diff --git a/lib/spack/spack/oci/opener.py b/lib/spack/spack/oci/opener.py new file mode 100644 index 00000000000000..792598578d3204 --- /dev/null +++ b/lib/spack/spack/oci/opener.py @@ -0,0 +1,442 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +"""All the logic for OCI fetching and authentication""" + +import base64 +import json +import re +import time +import urllib.error +import urllib.parse +import urllib.request +from enum import Enum, auto +from http.client import HTTPResponse +from typing import Callable, Dict, Iterable, List, NamedTuple, Optional, Tuple +from urllib.request import Request + +import llnl.util.lang + +import spack.config +import spack.mirror +import spack.parser +import spack.repo +import spack.util.web + +from .image import ImageReference + + +def _urlopen(): + opener = create_opener() + + def dispatch_open(fullurl, data=None, timeout=None): + timeout = timeout or spack.config.get("config:connect_timeout", 10) + return opener.open(fullurl, data, timeout) + + return dispatch_open + + +OpenType = Callable[..., HTTPResponse] +MaybeOpen = Optional[OpenType] + +#: Opener that automatically uses OCI authentication based on mirror config +urlopen: OpenType = llnl.util.lang.Singleton(_urlopen) + + +SP = r" " +OWS = r"[ \t]*" +BWS = OWS +HTAB = r"\t" +VCHAR = r"\x21-\x7E" +tchar = r"[!#$%&'*+\-.^_`|~0-9A-Za-z]" +token = rf"{tchar}+" +obs_text = r"\x80-\xFF" +qdtext = rf"[{HTAB}{SP}\x21\x23-\x5B\x5D-\x7E{obs_text}]" +quoted_pair = rf"\\([{HTAB}{SP}{VCHAR}{obs_text}])" +quoted_string = rf'"(?:({qdtext}*)|{quoted_pair})*"' + + +class TokenType(spack.parser.TokenBase): + AUTH_PARAM = rf"({token}){BWS}={BWS}({token}|{quoted_string})" + # TOKEN68 = r"([A-Za-z0-9\-._~+/]+=*)" # todo... support this? + TOKEN = rf"{tchar}+" + EQUALS = rf"{BWS}={BWS}" + COMMA = rf"{OWS},{OWS}" + SPACE = r" +" + EOF = r"$" + ANY = r"." + + +TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType] + +ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES)) + + +class State(Enum): + CHALLENGE = auto() + AUTH_PARAM_LIST_START = auto() + AUTH_PARAM = auto() + NEXT_IN_LIST = auto() + AUTH_PARAM_OR_SCHEME = auto() + + +def tokenize(input: str): + scanner = ALL_TOKENS.scanner(input) # type: ignore[attr-defined] + + for match in iter(scanner.match, None): # type: ignore[var-annotated] + yield spack.parser.Token( + TokenType.__members__[match.lastgroup], # type: ignore[attr-defined] + match.group(), # type: ignore[attr-defined] + match.start(), # type: ignore[attr-defined] + match.end(), # type: ignore[attr-defined] + ) + + +class Challenge: + __slots__ = ["scheme", "params"] + + def __init__( + self, scheme: Optional[str] = None, params: Optional[List[Tuple[str, str]]] = None + ) -> None: + self.scheme = scheme or "" + self.params = params or [] + + def __repr__(self) -> str: + return f"Challenge({self.scheme}, {self.params})" + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, Challenge) + and self.scheme == other.scheme + and self.params == other.params + ) + + +def parse_www_authenticate(input: str): + """Very basic parsing of www-authenticate parsing (RFC7235 section 4.1) + Notice: this omits token68 support.""" + + # auth-scheme = token + # auth-param = token BWS "=" BWS ( token / quoted-string ) + # challenge = auth-scheme [ 1*SP ( token68 / #auth-param ) ] + # WWW-Authenticate = 1#challenge + + challenges: List[Challenge] = [] + + _unquote = re.compile(quoted_pair).sub + unquote = lambda s: _unquote(r"\1", s[1:-1]) + + mode: State = State.CHALLENGE + tokens = tokenize(input) + + current_challenge = Challenge() + + def extract_auth_param(input: str) -> Tuple[str, str]: + key, value = input.split("=", 1) + key = key.rstrip() + value = value.lstrip() + if value.startswith('"'): + value = unquote(value) + return key, value + + while True: + token: spack.parser.Token = next(tokens) + + if mode == State.CHALLENGE: + if token.kind == TokenType.EOF: + raise ValueError(token) + elif token.kind == TokenType.TOKEN: + current_challenge.scheme = token.value + mode = State.AUTH_PARAM_LIST_START + else: + raise ValueError(token) + + elif mode == State.AUTH_PARAM_LIST_START: + if token.kind == TokenType.EOF: + challenges.append(current_challenge) + break + elif token.kind == TokenType.COMMA: + # Challenge without param list, followed by another challenge. + challenges.append(current_challenge) + current_challenge = Challenge() + mode = State.CHALLENGE + elif token.kind == TokenType.SPACE: + # A space means it must be followed by param list + mode = State.AUTH_PARAM + else: + raise ValueError(token) + + elif mode == State.AUTH_PARAM: + if token.kind == TokenType.EOF: + raise ValueError(token) + elif token.kind == TokenType.AUTH_PARAM: + key, value = extract_auth_param(token.value) + current_challenge.params.append((key, value)) + mode = State.NEXT_IN_LIST + else: + raise ValueError(token) + + elif mode == State.NEXT_IN_LIST: + if token.kind == TokenType.EOF: + challenges.append(current_challenge) + break + elif token.kind == TokenType.COMMA: + mode = State.AUTH_PARAM_OR_SCHEME + else: + raise ValueError(token) + + elif mode == State.AUTH_PARAM_OR_SCHEME: + if token.kind == TokenType.EOF: + raise ValueError(token) + elif token.kind == TokenType.TOKEN: + challenges.append(current_challenge) + current_challenge = Challenge(token.value) + mode = State.AUTH_PARAM_LIST_START + elif token.kind == TokenType.AUTH_PARAM: + key, value = extract_auth_param(token.value) + current_challenge.params.append((key, value)) + mode = State.NEXT_IN_LIST + + return challenges + + +class RealmServiceScope(NamedTuple): + realm: str + service: str + scope: str + + +class UsernamePassword(NamedTuple): + username: str + password: str + + +def get_bearer_challenge(challenges: List[Challenge]) -> Optional[RealmServiceScope]: + # Find a challenge that we can handle (currently only Bearer) + challenge = next((c for c in challenges if c.scheme == "Bearer"), None) + + if challenge is None: + return None + + # Get realm / service / scope from challenge + realm = next((v for k, v in challenge.params if k == "realm"), None) + service = next((v for k, v in challenge.params if k == "service"), None) + scope = next((v for k, v in challenge.params if k == "scope"), None) + + if realm is None or service is None or scope is None: + return None + + return RealmServiceScope(realm, service, scope) + + +class OCIAuthHandler(urllib.request.BaseHandler): + def __init__(self, credentials_provider: Callable[[str], Optional[UsernamePassword]]): + """ + Args: + credentials_provider: A function that takes a domain and may return a UsernamePassword. + """ + self.credentials_provider = credentials_provider + + # Cached bearer tokens for a given domain. + self.cached_tokens: Dict[str, str] = {} + + def obtain_bearer_token(self, registry: str, challenge: RealmServiceScope, timeout) -> str: + # See https://docs.docker.com/registry/spec/auth/token/ + + query = urllib.parse.urlencode( + {"service": challenge.service, "scope": challenge.scope, "client_id": "spack"} + ) + + parsed = urllib.parse.urlparse(challenge.realm)._replace( + query=query, fragment="", params="" + ) + + # Don't send credentials over insecure transport. + if parsed.scheme != "https": + raise ValueError( + f"Cannot login to {registry} over insecure {parsed.scheme} connection" + ) + + request = Request(urllib.parse.urlunparse(parsed)) + + # I guess we shouldn't cache this, since we don't know + # the context in which it's used (may depend on config) + pair = self.credentials_provider(registry) + + if pair is not None: + encoded = base64.b64encode(f"{pair.username}:{pair.password}".encode("utf-8")).decode( + "utf-8" + ) + request.add_unredirected_header("Authorization", f"Basic {encoded}") + + # Do a GET request. + response = self.parent.open(request, timeout=timeout) + + # Read the response and parse the JSON + response_json = json.load(response) + + # Get the token from the response + token = response_json["token"] + + # Remember the last obtained token for this registry + # Note: we should probably take into account realm, service and scope + # so we can store multiple tokens for the same registry. + self.cached_tokens[registry] = token + + return token + + def https_request(self, req: Request): + # Eagerly add the bearer token to the request if no + # auth header is set yet, to avoid 401s in multiple + # requests to the same registry. + + # Use has_header, not .headers, since there are two + # types of headers (redirected and unredirected) + if req.has_header("Authorization"): + return req + + parsed = urllib.parse.urlparse(req.full_url) + token = self.cached_tokens.get(parsed.netloc) + + if not token: + return req + + req.add_unredirected_header("Authorization", f"Bearer {token}") + return req + + def http_error_401(self, req: Request, fp, code, msg, headers): + # Login failed, avoid infinite recursion where we go back and + # forth between auth server and registry + if hasattr(req, "login_attempted"): + raise urllib.error.HTTPError( + req.full_url, code, f"Failed to login to {req.full_url}: {msg}", headers, fp + ) + + # On 401 Unauthorized, parse the WWW-Authenticate header + # to determine what authentication is required + if "WWW-Authenticate" not in headers: + raise urllib.error.HTTPError( + req.full_url, + code, + "Cannot login to registry, missing WWW-Authenticate header", + headers, + fp, + ) + + header_value = headers["WWW-Authenticate"] + + try: + challenge = get_bearer_challenge(parse_www_authenticate(header_value)) + except ValueError as e: + raise urllib.error.HTTPError( + req.full_url, + code, + f"Cannot login to registry, malformed WWW-Authenticate header: {header_value}", + headers, + fp, + ) from e + + # If there is no bearer challenge, we can't handle it + if not challenge: + raise urllib.error.HTTPError( + req.full_url, + code, + f"Cannot login to registry, unsupported authentication scheme: {header_value}", + headers, + fp, + ) + + # Get the token from the auth handler + try: + token = self.obtain_bearer_token( + registry=urllib.parse.urlparse(req.get_full_url()).netloc, + challenge=challenge, + timeout=req.timeout, + ) + except ValueError as e: + raise urllib.error.HTTPError( + req.full_url, + code, + f"Cannot login to registry, failed to obtain bearer token: {e}", + headers, + fp, + ) from e + + # Add the token to the request + req.add_unredirected_header("Authorization", f"Bearer {token}") + setattr(req, "login_attempted", True) + + return self.parent.open(req, timeout=req.timeout) + + +def credentials_from_mirrors( + domain: str, *, mirrors: Optional[Iterable[spack.mirror.Mirror]] = None +) -> Optional[UsernamePassword]: + """Filter out OCI registry credentials from a list of mirrors.""" + + mirrors = mirrors or spack.mirror.MirrorCollection().values() + + for mirror in mirrors: + # Prefer push credentials over fetch. Unlikely that those are different + # but our config format allows it. + for direction in ("push", "fetch"): + pair = mirror.get_access_pair(direction) + if pair is None: + continue + url = mirror.get_url(direction) + if not url.startswith("oci://"): + continue + try: + parsed = ImageReference.from_string(url[6:]) + except ValueError: + continue + if parsed.domain == domain: + return UsernamePassword(*pair) + return None + + +def create_opener(): + """Create an opener that can handle OCI authentication.""" + opener = urllib.request.OpenerDirector() + for handler in [ + urllib.request.UnknownHandler(), + urllib.request.HTTPSHandler(), + spack.util.web.SpackHTTPDefaultErrorHandler(), + urllib.request.HTTPRedirectHandler(), + urllib.request.HTTPErrorProcessor(), + OCIAuthHandler(credentials_from_mirrors), + ]: + opener.add_handler(handler) + return opener + + +def ensure_status(response: HTTPResponse, status: int): + """Raise an error if the response status is not the expected one.""" + if response.status == status: + return + + raise urllib.error.HTTPError( + response.geturl(), response.status, response.reason, response.info(), None + ) + + +def default_retry(f, retries: int = 3, sleep=None): + sleep = sleep or time.sleep + + def wrapper(*args, **kwargs): + for i in range(retries): + try: + return f(*args, **kwargs) + except urllib.error.HTTPError as e: + # Retry on internal server errors, and rate limit errors + # Potentially this could take into account the Retry-After header + # if registries support it + if i + 1 != retries and (500 <= e.code < 600 or e.code == 429): + # Exponential backoff + sleep(2**i) + continue + raise + + return wrapper diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py index 7e3532e9488ea2..55eee4f1544586 100644 --- a/lib/spack/spack/parser.py +++ b/lib/spack/spack/parser.py @@ -66,7 +66,6 @@ import spack.error import spack.spec -import spack.variant import spack.version IS_WINDOWS = sys.platform == "win32" @@ -164,7 +163,7 @@ class Token: __slots__ = "kind", "value", "start", "end" def __init__( - self, kind: TokenType, value: str, start: Optional[int] = None, end: Optional[int] = None + self, kind: TokenBase, value: str, start: Optional[int] = None, end: Optional[int] = None ): self.kind = kind self.value = value @@ -264,8 +263,8 @@ def tokens(self) -> List[Token]: return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str))) def next_spec( - self, initial_spec: Optional[spack.spec.Spec] = None - ) -> Optional[spack.spec.Spec]: + self, initial_spec: Optional["spack.spec.Spec"] = None + ) -> Optional["spack.spec.Spec"]: """Return the next spec parsed from text. Args: @@ -298,7 +297,7 @@ def next_spec( return root_spec - def all_specs(self) -> List[spack.spec.Spec]: + def all_specs(self) -> List["spack.spec.Spec"]: """Return all the specs that remain to be parsed""" return list(iter(self.next_spec, None)) @@ -313,7 +312,9 @@ def __init__(self, ctx): self.has_compiler = False self.has_version = False - def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]: + def parse( + self, initial_spec: Optional["spack.spec.Spec"] = None + ) -> Optional["spack.spec.Spec"]: """Parse a single spec node from a stream of tokens Args: @@ -414,7 +415,7 @@ class FileParser: def __init__(self, ctx): self.ctx = ctx - def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec: + def parse(self, initial_spec: "spack.spec.Spec") -> "spack.spec.Spec": """Parse a spec tree from a specfile. Args: @@ -437,7 +438,7 @@ def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec: return initial_spec -def parse(text: str) -> List[spack.spec.Spec]: +def parse(text: str) -> List["spack.spec.Spec"]: """Parse text into a list of strings Args: @@ -450,8 +451,8 @@ def parse(text: str) -> List[spack.spec.Spec]: def parse_one_or_raise( - text: str, initial_spec: Optional[spack.spec.Spec] = None -) -> spack.spec.Spec: + text: str, initial_spec: Optional["spack.spec.Spec"] = None +) -> "spack.spec.Spec": """Parse exactly one spec from text and return it, or raise Args: diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 5345c1c03ef5a6..bfe453fa36e950 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -75,6 +75,7 @@ import spack.deptypes as dt import spack.error import spack.hash_types as ht +import spack.parser import spack.patch import spack.paths import spack.platforms @@ -1321,8 +1322,6 @@ def __init__( self.external_path = external_path self.external_module = external_module """ - import spack.parser - # Copy if spec_like is a Spec. if isinstance(spec_like, Spec): self._dup(spec_like) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 690a45e7c5106e..03689c39bacfd6 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -37,6 +37,7 @@ import spack.fetch_strategy as fs import spack.mirror import spack.paths +import spack.resource import spack.spec import spack.stage import spack.util.lock @@ -455,6 +456,7 @@ def fetch(self, mirror_only=False, err_msg=None): mirror_urls = [ url_util.join(mirror.fetch_url, rel_path) for mirror in spack.mirror.MirrorCollection(source=True).values() + if not mirror.fetch_url.startswith("oci://") for rel_path in self.mirror_paths ] @@ -658,8 +660,14 @@ def destroy(self): class ResourceStage(Stage): - def __init__(self, url_or_fetch_strategy, root, resource, **kwargs): - super().__init__(url_or_fetch_strategy, **kwargs) + def __init__( + self, + fetch_strategy: fs.FetchStrategy, + root: Stage, + resource: spack.resource.Resource, + **kwargs, + ): + super().__init__(fetch_strategy, **kwargs) self.root_stage = root self.resource = resource diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py index 6c9b8c4cf552cb..55ec605913b3f4 100644 --- a/lib/spack/spack/test/cmd/buildcache.py +++ b/lib/spack/spack/test/cmd/buildcache.py @@ -326,4 +326,8 @@ def fake_push(node, push_url, options): buildcache(*buildcache_create_args) - assert packages_to_push == expected + # Order is not guaranteed, so we can't just compare lists + assert set(packages_to_push) == set(expected) + + # Ensure no duplicates + assert len(set(packages_to_push)) == len(packages_to_push) diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index c4b3df92edf17f..3505d7213046f2 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -31,6 +31,7 @@ import spack.binary_distribution import spack.caches +import spack.cmd.buildcache import spack.compilers import spack.config import spack.database @@ -1948,3 +1949,21 @@ def pytest_runtest_setup(item): not_on_windows_marker = item.get_closest_marker(name="not_on_windows") if not_on_windows_marker and sys.platform == "win32": pytest.skip(*not_on_windows_marker.args) + + +@pytest.fixture(scope="function") +def disable_parallel_buildcache_push(monkeypatch): + class MockPool: + def map(self, func, args): + return [func(a) for a in args] + + def starmap(self, func, args): + return [func(*a) for a in args] + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + monkeypatch.setattr(spack.cmd.buildcache, "_make_pool", MockPool) diff --git a/lib/spack/spack/test/oci/image.py b/lib/spack/spack/test/oci/image.py new file mode 100644 index 00000000000000..17899d1f4385f7 --- /dev/null +++ b/lib/spack/spack/test/oci/image.py @@ -0,0 +1,101 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import re + +import pytest + +import spack.spec +from spack.oci.image import Digest, ImageReference, default_tag, tag + + +@pytest.mark.parametrize( + "image_ref, expected", + [ + ( + f"example.com:1234/a/b/c:tag@sha256:{'a'*64}", + ("example.com:1234", "a/b/c", "tag", Digest.from_sha256("a" * 64)), + ), + ("example.com:1234/a/b/c:tag", ("example.com:1234", "a/b/c", "tag", None)), + ("example.com:1234/a/b/c", ("example.com:1234", "a/b/c", "latest", None)), + ( + f"example.com:1234/a/b/c@sha256:{'a'*64}", + ("example.com:1234", "a/b/c", "latest", Digest.from_sha256("a" * 64)), + ), + # ipv4 + ("1.2.3.4:1234/a/b/c:tag", ("1.2.3.4:1234", "a/b/c", "tag", None)), + # ipv6 + ("[2001:db8::1]:1234/a/b/c:tag", ("[2001:db8::1]:1234", "a/b/c", "tag", None)), + # Follow docker rules for parsing + ("ubuntu:22.04", ("index.docker.io", "library/ubuntu", "22.04", None)), + ("myname/myimage:abc", ("index.docker.io", "myname/myimage", "abc", None)), + ("myname:1234/myimage:abc", ("myname:1234", "myimage", "abc", None)), + ("localhost/myimage:abc", ("localhost", "myimage", "abc", None)), + ("localhost:1234/myimage:abc", ("localhost:1234", "myimage", "abc", None)), + ], +) +def test_name_parsing(image_ref, expected): + x = ImageReference.from_string(image_ref) + assert (x.domain, x.name, x.tag, x.digest) == expected + + +@pytest.mark.parametrize( + "image_ref", + [ + # wrong order of tag and sha + f"example.com:1234/a/b/c@sha256:{'a'*64}:tag", + # double tag + "example.com:1234/a/b/c:tag:tag", + # empty tag + "example.com:1234/a/b/c:", + # empty digest + "example.com:1234/a/b/c@sha256:", + # unsupport digest algorithm + f"example.com:1234/a/b/c@sha512:{'a'*128}", + # invalid digest length + f"example.com:1234/a/b/c@sha256:{'a'*63}", + # whitespace + "example.com:1234/a/b/c :tag", + "example.com:1234/a/b/c: tag", + "example.com:1234/a/b/c:tag ", + " example.com:1234/a/b/c:tag", + # broken ipv4 + "1.2..3:1234/a/b/c:tag", + ], +) +def test_parsing_failure(image_ref): + with pytest.raises(ValueError): + ImageReference.from_string(image_ref) + + +def test_digest(): + valid_digest = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + + # Test string roundtrip + assert str(Digest.from_string(f"sha256:{valid_digest}")) == f"sha256:{valid_digest}" + + # Invalid digest length + with pytest.raises(ValueError): + Digest.from_string("sha256:abcdef") + + # Missing algorithm + with pytest.raises(ValueError): + Digest.from_string(valid_digest) + + +@pytest.mark.parametrize( + "spec", + [ + # Standard case + "short-name@=1.2.3", + # Unsupported characters in git version + f"git-version@{1:040x}=develop", + # Too long of a name + f"{'too-long':x<256}@=1.2.3", + ], +) +def test_default_tag(spec: str): + """Make sure that computed image tags are valid.""" + assert re.fullmatch(tag, default_tag(spack.spec.Spec(spec))) diff --git a/lib/spack/spack/test/oci/integration_test.py b/lib/spack/spack/test/oci/integration_test.py new file mode 100644 index 00000000000000..b2f9366c3a5fbf --- /dev/null +++ b/lib/spack/spack/test/oci/integration_test.py @@ -0,0 +1,148 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +# These are slow integration tests that do concretization, install, tarballing +# and compression. They still use an in-memory OCI registry. + +import hashlib +import json +import os +from contextlib import contextmanager + +import spack.oci.opener +from spack.binary_distribution import gzip_compressed_tarfile +from spack.main import SpackCommand +from spack.oci.image import Digest, ImageReference, default_config, default_manifest +from spack.oci.oci import blob_exists, get_manifest_and_config, upload_blob, upload_manifest +from spack.test.oci.mock_registry import DummyServer, InMemoryOCIRegistry, create_opener + +buildcache = SpackCommand("buildcache") +mirror = SpackCommand("mirror") + + +@contextmanager +def oci_servers(*servers: DummyServer): + old_opener = spack.oci.opener.urlopen + spack.oci.opener.urlopen = create_opener(*servers).open + yield + spack.oci.opener.urlopen = old_opener + + +def test_buildcache_push_command(mutable_database, disable_parallel_buildcache_push): + with oci_servers(InMemoryOCIRegistry("example.com")): + mirror("add", "oci-test", "oci://example.com/image") + + # Push the package(s) to the OCI registry + buildcache("push", "--update-index", "oci-test", "mpileaks^mpich") + + # Remove mpileaks from the database + matches = mutable_database.query_local("mpileaks^mpich") + assert len(matches) == 1 + spec = matches[0] + spec.package.do_uninstall() + + # Reinstall mpileaks from the OCI registry + buildcache("install", "--unsigned", "mpileaks^mpich") + + # Now it should be installed again + assert spec.installed + + # And let's check that the bin/mpileaks executable is there + assert os.path.exists(os.path.join(spec.prefix, "bin", "mpileaks")) + + +def test_buildcache_push_with_base_image_command( + mutable_database, tmpdir, disable_parallel_buildcache_push +): + """Test that we can push a package with a base image to an OCI registry. + + This test is a bit involved, cause we have to create a small base image.""" + + registry_src = InMemoryOCIRegistry("src.example.com") + registry_dst = InMemoryOCIRegistry("dst.example.com") + + base_image = ImageReference.from_string("src.example.com/my-base-image:latest") + + with oci_servers(registry_src, registry_dst): + mirror("add", "oci-test", "oci://dst.example.com/image") + + # TODO: simplify creation of images... + # We create a rootfs.tar.gz, a config file and a manifest file, + # and upload those. + + config, manifest = default_config(architecture="amd64", os="linux"), default_manifest() + + # Create a small rootfs + rootfs = tmpdir.join("rootfs") + rootfs.ensure(dir=True) + rootfs.join("bin").ensure(dir=True) + rootfs.join("bin", "sh").ensure(file=True) + + # Create a tarball of it. + tarball = tmpdir.join("base.tar.gz") + with gzip_compressed_tarfile(tarball) as (tar, tar_gz_checksum, tar_checksum): + tar.add(rootfs, arcname=".") + + tar_gz_digest = Digest.from_sha256(tar_gz_checksum.hexdigest()) + tar_digest = Digest.from_sha256(tar_checksum.hexdigest()) + + # Save the config file + config["rootfs"]["diff_ids"] = [str(tar_digest)] + config_file = tmpdir.join("config.json") + with open(config_file, "w") as f: + f.write(json.dumps(config)) + + config_digest = Digest.from_sha256( + hashlib.sha256(open(config_file, "rb").read()).hexdigest() + ) + + # Register the layer in the manifest + manifest["layers"].append( + { + "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip", + "digest": str(tar_gz_digest), + "size": tarball.size(), + } + ) + manifest["config"]["digest"] = str(config_digest) + manifest["config"]["size"] = config_file.size() + + # Upload the layer and config file + upload_blob(base_image, tarball, tar_gz_digest) + upload_blob(base_image, config_file, config_digest) + + # Upload the manifest + upload_manifest(base_image, manifest) + + # END TODO + + # Finally... use it as a base image + buildcache("push", "--base-image", str(base_image), "oci-test", "mpileaks^mpich") + + # Figure out what tag was produced + tag = next(tag for _, tag in registry_dst.manifests.keys() if tag.startswith("mpileaks-")) + assert tag is not None + + # Fetch the manifest and config + dst_image = ImageReference.from_string(f"dst.example.com/image:{tag}") + retrieved_manifest, retrieved_config = get_manifest_and_config(dst_image) + + # Check that the base image layer is first. + assert retrieved_manifest["layers"][0]["digest"] == str(tar_gz_digest) + assert retrieved_config["rootfs"]["diff_ids"][0] == str(tar_digest) + + # And also check that we have layers for each link-run dependency + matches = mutable_database.query_local("mpileaks^mpich") + assert len(matches) == 1 + spec = matches[0] + + num_runtime_deps = len(list(spec.traverse(root=True, deptype=("link", "run")))) + + # One base layer + num_runtime_deps + assert len(retrieved_manifest["layers"]) == 1 + num_runtime_deps + + # And verify that all layers including the base layer are present + for layer in retrieved_manifest["layers"]: + assert blob_exists(dst_image, digest=Digest.from_string(layer["digest"])) diff --git a/lib/spack/spack/test/oci/mock_registry.py b/lib/spack/spack/test/oci/mock_registry.py new file mode 100644 index 00000000000000..ec3e85c333ab7e --- /dev/null +++ b/lib/spack/spack/test/oci/mock_registry.py @@ -0,0 +1,410 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +import base64 +import email.message +import hashlib +import io +import json +import re +import urllib.error +import urllib.parse +import urllib.request +import uuid +from typing import Callable, Dict, List, Optional, Pattern, Tuple +from urllib.request import Request + +from spack.oci.image import Digest +from spack.oci.opener import OCIAuthHandler + + +class MockHTTPResponse(io.IOBase): + """This is a mock HTTP response, which implements part of http.client.HTTPResponse""" + + def __init__(self, status, reason, headers=None, body=None): + self.msg = None + self.version = 11 + self.url = None + self.headers = email.message.EmailMessage() + self.status = status + self.code = status + self.reason = reason + self.debuglevel = 0 + self._body = body + + if headers is not None: + for key, value in headers.items(): + self.headers[key] = value + + @classmethod + def with_json(cls, status, reason, headers=None, body=None): + """Create a mock HTTP response with JSON string as body""" + body = io.BytesIO(json.dumps(body).encode("utf-8")) + return cls(status, reason, headers, body) + + def read(self, *args, **kwargs): + return self._body.read(*args, **kwargs) + + def getheader(self, name, default=None): + self.headers.get(name, default) + + def getheaders(self): + return self.headers.items() + + def fileno(self): + return 0 + + def getcode(self): + return self.status + + def info(self): + return self.headers + + +class MiddlewareError(Exception): + """Thrown in a handler to return a response early.""" + + def __init__(self, response: MockHTTPResponse): + self.response = response + + +class Router: + """This class is a small router for requests to the OCI registry. + + It is used to dispatch requests to a handler, and middleware can be + used to transform requests, as well as return responses early + (e.g. for authentication).""" + + def __init__(self) -> None: + self.routes: List[Tuple[str, Pattern, Callable]] = [] + self.middleware: List[Callable[[Request], Request]] = [] + + def handle(self, req: Request) -> MockHTTPResponse: + """Dispatch a request to a handler.""" + result = urllib.parse.urlparse(req.full_url) + + # Apply middleware + try: + for handler in self.middleware: + req = handler(req) + except MiddlewareError as e: + return e.response + + for method, path_regex, handler in self.routes: + if method != req.get_method(): + continue + match = re.fullmatch(path_regex, result.path) + if not match: + continue + + return handler(req, **match.groupdict()) + + return MockHTTPResponse(404, "Not found") + + def register(self, method, path: str, handler: Callable): + self.routes.append((method, re.compile(path), handler)) + + def add_middleware(self, handler: Callable[[Request], Request]): + self.middleware.append(handler) + + +class DummyServer: + def __init__(self, domain: str) -> None: + # The domain of the server, e.g. "registry.example.com" + self.domain = domain + + # List of (method, url) tuples + self.requests: List[Tuple[str, str]] = [] + + # Dispatches requests to handlers + self.router = Router() + + # Always install a request logger + self.router.add_middleware(self.log_request) + + def handle(self, req: Request) -> MockHTTPResponse: + return self.router.handle(req) + + def log_request(self, req: Request): + path = urllib.parse.urlparse(req.full_url).path + self.requests.append((req.get_method(), path)) + return req + + def clear_log(self): + self.requests = [] + + +class InMemoryOCIRegistry(DummyServer): + """This implements the basic OCI registry API, but in memory. + + It supports two types of blob uploads: + 1. POST + PUT: the client first starts a session with POST, then does a large PUT request + 2. POST: the client does a single POST request with the whole blob + + Option 2 is not supported by all registries, so we allow to disable it, + with allow_single_post=False. + + A third option is to use the chunked upload, but this is not implemented here, because + it's typically a major performance hit in upload speed, so we're not using it in Spack.""" + + def __init__(self, domain: str, allow_single_post: bool = True) -> None: + super().__init__(domain) + self.router.register("GET", r"/v2/", self.index) + self.router.register("HEAD", r"/v2/(?P.+)/blobs/(?P.+)", self.head_blob) + self.router.register("POST", r"/v2/(?P.+)/blobs/uploads/", self.start_session) + self.router.register("PUT", r"/upload", self.put_session) + self.router.register("PUT", r"/v2/(?P.+)/manifests/(?P.+)", self.put_manifest) + self.router.register("GET", r"/v2/(?P.+)/manifests/(?P.+)", self.get_manifest) + self.router.register("GET", r"/v2/(?P.+)/blobs/(?P.+)", self.get_blob) + self.router.register("GET", r"/v2/(?P.+)/tags/list", self.list_tags) + + # If True, allow single POST upload, not all registries support this + self.allow_single_post = allow_single_post + + # Used for POST + PUT upload. This is a map from session ID to image name + self.sessions: Dict[str, str] = {} + + # Set of sha256:... digests that are known to the registry + self.blobs: Dict[str, bytes] = {} + + # Map from (name, tag) to manifest + self.manifests: Dict[Tuple[str, str], Dict] = {} + + def index(self, req: Request): + return MockHTTPResponse.with_json(200, "OK", body={}) + + def head_blob(self, req: Request, name: str, digest: str): + if digest in self.blobs: + return MockHTTPResponse(200, "OK", headers={"Content-Length": "1234"}) + return MockHTTPResponse(404, "Not found") + + def get_blob(self, req: Request, name: str, digest: str): + if digest in self.blobs: + return MockHTTPResponse(200, "OK", body=io.BytesIO(self.blobs[digest])) + return MockHTTPResponse(404, "Not found") + + def start_session(self, req: Request, name: str): + id = str(uuid.uuid4()) + self.sessions[id] = name + + # Check if digest is present (single monolithic upload) + result = urllib.parse.urlparse(req.full_url) + query = urllib.parse.parse_qs(result.query) + + if self.allow_single_post and "digest" in query: + return self.handle_upload( + req, name=name, digest=Digest.from_string(query["digest"][0]) + ) + + return MockHTTPResponse(202, "Accepted", headers={"Location": f"/upload?uuid={id}"}) + + def put_session(self, req: Request): + # Do the upload. + result = urllib.parse.urlparse(req.full_url) + query = urllib.parse.parse_qs(result.query) + + # uuid param should be preserved, and digest should be present + assert "uuid" in query and len(query["uuid"]) == 1 + assert "digest" in query and len(query["digest"]) == 1 + + id = query["uuid"][0] + assert id in self.sessions + + name, digest = self.sessions[id], Digest.from_string(query["digest"][0]) + + response = self.handle_upload(req, name=name, digest=digest) + + # End the session + del self.sessions[id] + + return response + + def put_manifest(self, req: Request, name: str, ref: str): + # In requests, Python runs header.capitalize(). + content_type = req.get_header("Content-type") + assert content_type in ( + "application/vnd.oci.image.manifest.v1+json", + "application/vnd.oci.image.index.v1+json", + ) + + index_or_manifest = json.loads(self._require_data(req)) + + # Verify that we have all blobs (layers for manifest, manifests for index) + if content_type == "application/vnd.oci.image.manifest.v1+json": + for layer in index_or_manifest["layers"]: + assert layer["digest"] in self.blobs, "Missing blob while uploading manifest" + + else: + for manifest in index_or_manifest["manifests"]: + assert ( + name, + manifest["digest"], + ) in self.manifests, "Missing manifest while uploading index" + + self.manifests[(name, ref)] = index_or_manifest + + return MockHTTPResponse( + 201, "Created", headers={"Location": f"/v2/{name}/manifests/{ref}"} + ) + + def get_manifest(self, req: Request, name: str, ref: str): + if (name, ref) not in self.manifests: + return MockHTTPResponse(404, "Not found") + + manifest_or_index = self.manifests[(name, ref)] + + return MockHTTPResponse.with_json( + 200, + "OK", + headers={"Content-type": manifest_or_index["mediaType"]}, + body=manifest_or_index, + ) + + def _require_data(self, req: Request) -> bytes: + """Extract request.data, it's type remains a mystery""" + assert req.data is not None + + if hasattr(req.data, "read"): + return req.data.read() + elif isinstance(req.data, bytes): + return req.data + + raise ValueError("req.data should be bytes or have a read() method") + + def handle_upload(self, req: Request, name: str, digest: Digest): + """Verify the digest, save the blob, return created status""" + data = self._require_data(req) + assert hashlib.sha256(data).hexdigest() == digest.digest + self.blobs[str(digest)] = data + return MockHTTPResponse(201, "Created", headers={"Location": f"/v2/{name}/blobs/{digest}"}) + + def list_tags(self, req: Request, name: str): + # List all tags, exclude digests. + tags = [_tag for _name, _tag in self.manifests.keys() if _name == name and ":" not in _tag] + tags.sort() + return MockHTTPResponse.with_json(200, "OK", body={"tags": tags}) + + +class DummyServerUrllibHandler(urllib.request.BaseHandler): + """Glue between urllib and DummyServer, routing requests to + the correct mock server for a given domain.""" + + def __init__(self) -> None: + self.servers: Dict[str, DummyServer] = {} + + def add_server(self, domain: str, api: DummyServer): + self.servers[domain] = api + return self + + def https_open(self, req: Request): + domain = urllib.parse.urlparse(req.full_url).netloc + + if domain not in self.servers: + return MockHTTPResponse(404, "Not found") + + return self.servers[domain].handle(req) + + +class InMemoryOCIRegistryWithAuth(InMemoryOCIRegistry): + """This is another in-memory OCI registry, but it requires authentication.""" + + def __init__( + self, domain, token: Optional[str], realm: str, allow_single_post: bool = True + ) -> None: + super().__init__(domain, allow_single_post) + self.token = token # token to accept + self.realm = realm # url to the authorization server + self.router.add_middleware(self.authenticate) + + def authenticate(self, req: Request): + # Any request needs an Authorization header + authorization = req.get_header("Authorization") + + if authorization is None: + raise MiddlewareError(self.unauthorized()) + + # Ensure that the token is correct + assert authorization.startswith("Bearer ") + token = authorization[7:] + + if token != self.token: + raise MiddlewareError(self.unauthorized()) + + return req + + def unauthorized(self): + return MockHTTPResponse( + 401, + "Unauthorized", + { + "www-authenticate": f'Bearer realm="{self.realm}",' + f'service="{self.domain}",' + 'scope="repository:spack-registry:pull,push"' + }, + ) + + +class MockBearerTokenServer(DummyServer): + """Simulates a basic server that hands out bearer tokens + at the /login endpoint for the following services: + public.example.com, which doesn't require Basic Auth + private.example.com, which requires Basic Auth, with user:pass + """ + + def __init__(self, domain: str) -> None: + super().__init__(domain) + self.router.register("GET", "/login", self.login) + + def login(self, req: Request): + url = urllib.parse.urlparse(req.full_url) + query_params = urllib.parse.parse_qs(url.query) + + # Verify query params, from the www-authenticate header + assert query_params["client_id"] == ["spack"] + assert len(query_params["service"]) == 1 + assert query_params["scope"] == ["repository:spack-registry:pull,push"] + + service = query_params["service"][0] + + if service == "public.example.com": + return self.public_auth(req) + elif service == "private.example.com": + return self.private_auth(req) + + return MockHTTPResponse(404, "Not found") + + def public_auth(self, req: Request): + # No need to login with username and password for the public registry + assert req.get_header("Authorization") is None + return MockHTTPResponse.with_json(200, "OK", body={"token": "public_token"}) + + def private_auth(self, req: Request): + # For the private registry we need to login with username and password + auth_value = req.get_header("Authorization") + + if ( + auth_value is None + or not auth_value.startswith("Basic ") + or base64.b64decode(auth_value[6:]) != b"user:pass" + ): + return MockHTTPResponse(401, "Unauthorized") + + return MockHTTPResponse.with_json(200, "OK", body={"token": "private_token"}) + + +def create_opener(*servers: DummyServer, credentials_provider=None): + """Creates a mock opener, that can be used to fake requests to a list + of servers.""" + opener = urllib.request.OpenerDirector() + handler = DummyServerUrllibHandler() + for server in servers: + handler.add_server(server.domain, server) + opener.add_handler(handler) + opener.add_handler(urllib.request.HTTPDefaultErrorHandler()) + opener.add_handler(urllib.request.HTTPErrorProcessor()) + if credentials_provider is not None: + opener.add_handler(OCIAuthHandler(credentials_provider)) + return opener diff --git a/lib/spack/spack/test/oci/urlopen.py b/lib/spack/spack/test/oci/urlopen.py new file mode 100644 index 00000000000000..16efdfe12d9673 --- /dev/null +++ b/lib/spack/spack/test/oci/urlopen.py @@ -0,0 +1,672 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +import hashlib +import json +import urllib.error +import urllib.parse +import urllib.request +from urllib.request import Request + +import pytest + +import spack.mirror +from spack.oci.image import Digest, ImageReference, default_config, default_manifest +from spack.oci.oci import ( + copy_missing_layers, + get_manifest_and_config, + image_from_mirror, + upload_blob, + upload_manifest, +) +from spack.oci.opener import ( + Challenge, + RealmServiceScope, + UsernamePassword, + credentials_from_mirrors, + default_retry, + get_bearer_challenge, + parse_www_authenticate, +) +from spack.test.oci.mock_registry import ( + DummyServer, + DummyServerUrllibHandler, + InMemoryOCIRegistry, + InMemoryOCIRegistryWithAuth, + MiddlewareError, + MockBearerTokenServer, + MockHTTPResponse, + create_opener, +) + + +def test_parse_www_authenticate(): + """Test parsing of valid WWW-Authenticate header, check whether it's + decomposed into a list of challenges with correct scheme and parameters + according to RFC 7235 section 4.1""" + www_authenticate = 'Bearer realm="https://spack.io/authenticate",service="spack-registry",scope="repository:spack-registry:pull,push"' + assert parse_www_authenticate(www_authenticate) == [ + Challenge( + "Bearer", + [ + ("realm", "https://spack.io/authenticate"), + ("service", "spack-registry"), + ("scope", "repository:spack-registry:pull,push"), + ], + ) + ] + + assert parse_www_authenticate("Bearer") == [Challenge("Bearer")] + assert parse_www_authenticate("MethodA, MethodB,MethodC") == [ + Challenge("MethodA"), + Challenge("MethodB"), + Challenge("MethodC"), + ] + + assert parse_www_authenticate( + 'Digest realm="Digest Realm", nonce="1234567890", algorithm=MD5, qop="auth"' + ) == [ + Challenge( + "Digest", + [ + ("realm", "Digest Realm"), + ("nonce", "1234567890"), + ("algorithm", "MD5"), + ("qop", "auth"), + ], + ) + ] + + assert parse_www_authenticate( + r'Newauth realm="apps", type=1, title="Login to \"apps\"", Basic realm="simple"' + ) == [ + Challenge("Newauth", [("realm", "apps"), ("type", "1"), ("title", 'Login to "apps"')]), + Challenge("Basic", [("realm", "simple")]), + ] + + +@pytest.mark.parametrize( + "invalid_str", + [ + # Not comma separated + "SchemeA SchemeB SchemeC", + # Unexpected eof + "SchemeA, SchemeB, SchemeC, ", + # Invalid auth param or scheme + r"Scheme x=y, ", + # Unexpected eof + "Scheme key=", + # Invalid token + r'"Bearer"', + # Invalid token + r'Scheme"xyz"', + # No auth param + r"Scheme ", + ], +) +def test_invalid_www_authenticate(invalid_str): + with pytest.raises(ValueError): + parse_www_authenticate(invalid_str) + + +def test_get_bearer_challenge(): + """Test extracting Bearer challenge from a list of challenges""" + + # Only an incomplete bearer challenge, missing service and scope, not usable. + assert ( + get_bearer_challenge( + [ + Challenge("Bearer", [("realm", "https://spack.io/authenticate")]), + Challenge("Basic", [("realm", "simple")]), + Challenge( + "Digest", + [ + ("realm", "Digest Realm"), + ("nonce", "1234567890"), + ("algorithm", "MD5"), + ("qop", "auth"), + ], + ), + ] + ) + is None + ) + + # Multiple challenges, should pick the bearer one. + assert get_bearer_challenge( + [ + Challenge( + "Dummy", + [("realm", "https://example.com/"), ("service", "service"), ("scope", "scope")], + ), + Challenge( + "Bearer", + [ + ("realm", "https://spack.io/authenticate"), + ("service", "spack-registry"), + ("scope", "repository:spack-registry:pull,push"), + ], + ), + ] + ) == RealmServiceScope( + "https://spack.io/authenticate", "spack-registry", "repository:spack-registry:pull,push" + ) + + +@pytest.mark.parametrize( + "image_ref,token", + [ + ("public.example.com/spack-registry:latest", "public_token"), + ("private.example.com/spack-registry:latest", "private_token"), + ], +) +def test_automatic_oci_authentication(image_ref, token): + image = ImageReference.from_string(image_ref) + + def credentials_provider(domain: str): + return UsernamePassword("user", "pass") if domain == "private.example.com" else None + + opener = create_opener( + InMemoryOCIRegistryWithAuth( + image.domain, token=token, realm="https://auth.example.com/login" + ), + MockBearerTokenServer("auth.example.com"), + credentials_provider=credentials_provider, + ) + + # Run this twice, as it will triggers a code path that caches the bearer token + assert opener.open(image.endpoint()).status == 200 + assert opener.open(image.endpoint()).status == 200 + + +def test_wrong_credentials(): + """Test that when wrong credentials are rejected by the auth server, we + get a 401 error.""" + credentials_provider = lambda domain: UsernamePassword("wrong", "wrong") + image = ImageReference.from_string("private.example.com/image") + opener = create_opener( + InMemoryOCIRegistryWithAuth( + image.domain, token="something", realm="https://auth.example.com/login" + ), + MockBearerTokenServer("auth.example.com"), + credentials_provider=credentials_provider, + ) + + with pytest.raises(urllib.error.HTTPError) as e: + opener.open(image.endpoint()) + + assert e.value.getcode() == 401 + + +def test_wrong_bearer_token_returned_by_auth_server(): + """When the auth server returns a wrong bearer token, we should get a 401 error + when the request we attempt fails. We shouldn't go in circles getting a 401 from + the registry, then a non-working token from the auth server, then a 401 from the + registry, etc.""" + image = ImageReference.from_string("private.example.com/image") + opener = create_opener( + InMemoryOCIRegistryWithAuth( + image.domain, + token="other_token_than_token_server_provides", + realm="https://auth.example.com/login", + ), + MockBearerTokenServer("auth.example.com"), + credentials_provider=lambda domain: UsernamePassword("user", "pass"), + ) + + with pytest.raises(urllib.error.HTTPError) as e: + opener.open(image.endpoint()) + + assert e.value.getcode() == 401 + + +class TrivialAuthServer(DummyServer): + """A trivial auth server that hands out a bearer token at GET /login.""" + + def __init__(self, domain: str, token: str) -> None: + super().__init__(domain) + self.router.register("GET", "/login", self.login) + self.token = token + + def login(self, req: Request): + return MockHTTPResponse.with_json(200, "OK", body={"token": self.token}) + + +def test_registry_with_short_lived_bearer_tokens(): + """An issued bearer token is mostly opaque to the client, but typically + it embeds a short-lived expiration date. To speed up requests to a registry, + it's good not to authenticate on every request, but to cache the bearer token, + however: we have to deal with the case of an expired bearer token. + + Here we test that when the bearer token expires, we authenticate again, and + when the token is still valid, we don't re-authenticate.""" + + image = ImageReference.from_string("private.example.com/image") + credentials_provider = lambda domain: UsernamePassword("user", "pass") + + auth_server = TrivialAuthServer("auth.example.com", token="token") + registry_server = InMemoryOCIRegistryWithAuth( + image.domain, token="token", realm="https://auth.example.com/login" + ) + urlopen = create_opener( + registry_server, auth_server, credentials_provider=credentials_provider + ).open + + # First request, should work with token "token" + assert urlopen(image.endpoint()).status == 200 + + # Invalidate the token on the registry + registry_server.token = "new_token" + auth_server.token = "new_token" + + # Second request: reusing the cached token should fail + # but in the background we will get a new token from the auth server + assert urlopen(image.endpoint()).status == 200 + + # Subsequent requests should work with the same token, let's do two more + assert urlopen(image.endpoint()).status == 200 + assert urlopen(image.endpoint()).status == 200 + + # And finally, we should see that we've issues exactly two requests to the auth server + assert auth_server.requests == [("GET", "/login"), ("GET", "/login")] + + # Whereas we've done more requests to the registry + assert registry_server.requests == [ + ("GET", "/v2/"), # 1: without bearer token + ("GET", "/v2/"), # 2: retry with bearer token + ("GET", "/v2/"), # 3: with incorrect bearer token + ("GET", "/v2/"), # 4: retry with new bearer token + ("GET", "/v2/"), # 5: with recyled correct bearer token + ("GET", "/v2/"), # 6: with recyled correct bearer token + ] + + +class InMemoryRegistryWithUnsupportedAuth(InMemoryOCIRegistry): + """A registry that does set a WWW-Authenticate header, but + with a challenge we don't support.""" + + def __init__(self, domain: str, allow_single_post: bool = True, www_authenticate=None) -> None: + self.www_authenticate = www_authenticate + super().__init__(domain, allow_single_post) + self.router.add_middleware(self.unsupported_auth_method) + + def unsupported_auth_method(self, req: Request): + headers = {} + if self.www_authenticate: + headers["WWW-Authenticate"] = self.www_authenticate + raise MiddlewareError(MockHTTPResponse(401, "Unauthorized", headers=headers)) + + +@pytest.mark.parametrize( + "www_authenticate,error_message", + [ + # missing service and scope + ('Bearer realm="https://auth.example.com/login"', "unsupported authentication scheme"), + # we don't do basic auth + ('Basic realm="https://auth.example.com/login"', "unsupported authentication scheme"), + # multiple unsupported challenges + ( + "CustomChallenge method=unsupported, OtherChallenge method=x,param=y", + "unsupported authentication scheme", + ), + # no challenge + (None, "missing WWW-Authenticate header"), + # malformed challenge, missing quotes + ("Bearer realm=https://auth.example.com", "malformed WWW-Authenticate header"), + # http instead of https + ('Bearer realm="http://auth.example.com",scope=x,service=y', "insecure http connection"), + ], +) +def test_auth_method_we_cannot_handle_is_error(www_authenticate, error_message): + # We can only handle WWW-Authenticate with a Bearer challenge + image = ImageReference.from_string("private.example.com/image") + urlopen = create_opener( + InMemoryRegistryWithUnsupportedAuth(image.domain, www_authenticate=www_authenticate), + TrivialAuthServer("auth.example.com", token="token"), + credentials_provider=lambda domain: UsernamePassword("user", "pass"), + ).open + + with pytest.raises(urllib.error.HTTPError, match=error_message) as e: + urlopen(image.endpoint()) + assert e.value.getcode() == 401 + + +# Parametrize over single POST vs POST + PUT. +@pytest.mark.parametrize("client_single_request", [True, False]) +@pytest.mark.parametrize("server_single_request", [True, False]) +def test_oci_registry_upload(tmpdir, client_single_request, server_single_request): + opener = urllib.request.OpenerDirector() + opener.add_handler( + DummyServerUrllibHandler().add_server( + "example.com", InMemoryOCIRegistry(server_single_request) + ) + ) + opener.add_handler(urllib.request.HTTPDefaultErrorHandler()) + opener.add_handler(urllib.request.HTTPErrorProcessor()) + + # Create a small blob + blob = tmpdir.join("blob") + blob.write("Hello world!") + + image = ImageReference.from_string("example.com/image:latest") + digest = Digest.from_sha256(hashlib.sha256(blob.read_binary()).hexdigest()) + + # Set small file size larger than the blob iff we're doing single request + small_file_size = 1024 if client_single_request else 0 + + # Upload once, should actually upload + assert upload_blob( + ref=image, + file=blob.strpath, + digest=digest, + small_file_size=small_file_size, + _urlopen=opener.open, + ) + + # Second time should exit as it exists + assert not upload_blob( + ref=image, + file=blob.strpath, + digest=digest, + small_file_size=small_file_size, + _urlopen=opener.open, + ) + + # Force upload should upload again + assert upload_blob( + ref=image, + file=blob.strpath, + digest=digest, + force=True, + small_file_size=small_file_size, + _urlopen=opener.open, + ) + + +def test_copy_missing_layers(tmpdir, config): + """Test copying layers from one registry to another. + Creates 3 blobs, 1 config and 1 manifest in registry A + and copies layers to registry B. Then checks that all + layers are present in registry B. Finally it runs the copy + again and checks that no new layers are uploaded.""" + + # NOTE: config fixture is used to disable default source mirrors + # which are used in Stage(...). Otherwise this test doesn't really + # rely on globals. + + src = ImageReference.from_string("a.example.com/image:x") + dst = ImageReference.from_string("b.example.com/image:y") + + src_registry = InMemoryOCIRegistry(src.domain) + dst_registry = InMemoryOCIRegistry(dst.domain) + + urlopen = create_opener(src_registry, dst_registry).open + + # TODO: make it a bit easier to create bunch of blobs + config + manifest? + + # Create a few blobs and a config file + blobs = [tmpdir.join(f"blob{i}") for i in range(3)] + + for i, blob in enumerate(blobs): + blob.write(f"Blob {i}") + + digests = [ + Digest.from_sha256(hashlib.sha256(blob.read_binary()).hexdigest()) for blob in blobs + ] + + config = default_config(architecture="amd64", os="linux") + configfile = tmpdir.join("config.json") + configfile.write(json.dumps(config)) + config_digest = Digest.from_sha256(hashlib.sha256(configfile.read_binary()).hexdigest()) + + for blob, digest in zip(blobs, digests): + upload_blob(src, blob.strpath, digest, _urlopen=urlopen) + upload_blob(src, configfile.strpath, config_digest, _urlopen=urlopen) + + # Then create a manifest referencing them + manifest = default_manifest() + + for blob, digest in zip(blobs, digests): + manifest["layers"].append( + { + "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip", + "digest": str(digest), + "size": blob.size(), + } + ) + + manifest["config"] = { + "mediaType": "application/vnd.oci.image.config.v1+json", + "digest": str(config_digest), + "size": configfile.size(), + } + + upload_manifest(src, manifest, _urlopen=urlopen) + + # Finally, copy the image from src to dst + copy_missing_layers(src, dst, architecture="amd64", _urlopen=urlopen) + + # Check that all layers (not config) were copied and identical + assert len(dst_registry.blobs) == len(blobs) + for blob, digest in zip(blobs, digests): + assert dst_registry.blobs.get(str(digest)) == blob.read_binary() + + is_upload = lambda method, path: method == "POST" and path == "/v2/image/blobs/uploads/" + is_exists = lambda method, path: method == "HEAD" and path.startswith("/v2/image/blobs/") + + # Check that exactly 3 uploads were initiated, and that we don't do + # double existence checks when uploading. + assert sum(is_upload(method, path) for method, path in dst_registry.requests) == 3 + assert sum(is_exists(method, path) for method, path in dst_registry.requests) == 3 + + # Check that re-uploading skips existing layers. + dst_registry.clear_log() + copy_missing_layers(src, dst, architecture="amd64", _urlopen=urlopen) + + # Check that no uploads were initiated, only existence checks were done. + assert sum(is_upload(method, path) for method, path in dst_registry.requests) == 0 + assert sum(is_exists(method, path) for method, path in dst_registry.requests) == 3 + + +def test_image_from_mirror(): + mirror = spack.mirror.Mirror("oci://example.com/image") + assert image_from_mirror(mirror) == ImageReference.from_string("example.com/image") + + +def test_image_reference_str(): + """Test that with_digest() works with Digest and str.""" + digest_str = f"sha256:{1234:064x}" + digest = Digest.from_string(digest_str) + + img = ImageReference.from_string("example.com/image") + + assert str(img.with_digest(digest)) == f"example.com/image:latest@{digest}" + assert str(img.with_digest(digest_str)) == f"example.com/image:latest@{digest}" + assert str(img.with_tag("hello")) == "example.com/image:hello" + assert str(img.with_tag("hello").with_digest(digest)) == f"example.com/image:hello@{digest}" + + +@pytest.mark.parametrize( + "image", + [ + # white space issue + " example.com/image", + # not alpha-numeric + "hello#world:latest", + ], +) +def test_image_reference_invalid(image): + with pytest.raises(ValueError, match="Invalid image reference"): + ImageReference.from_string(image) + + +def test_default_credentials_provider(): + """The default credentials provider uses a collection of configured + mirrors.""" + + mirrors = [ + # OCI mirror with push credentials + spack.mirror.Mirror( + {"url": "oci://a.example.com/image", "push": {"access_pair": ["user.a", "pass.a"]}} + ), + # Not an OCI mirror + spack.mirror.Mirror( + {"url": "https://b.example.com/image", "access_pair": ["user.b", "pass.b"]} + ), + # No credentials + spack.mirror.Mirror("oci://c.example.com/image"), + # Top-level credentials + spack.mirror.Mirror( + {"url": "oci://d.example.com/image", "access_pair": ["user.d", "pass.d"]} + ), + # Dockerhub short reference + spack.mirror.Mirror( + {"url": "oci://user/image", "access_pair": ["dockerhub_user", "dockerhub_pass"]} + ), + # Localhost (not a dockerhub short reference) + spack.mirror.Mirror( + {"url": "oci://localhost/image", "access_pair": ["user.localhost", "pass.localhost"]} + ), + ] + + assert credentials_from_mirrors("a.example.com", mirrors=mirrors) == UsernamePassword( + "user.a", "pass.a" + ) + assert credentials_from_mirrors("b.example.com", mirrors=mirrors) is None + assert credentials_from_mirrors("c.example.com", mirrors=mirrors) is None + assert credentials_from_mirrors("d.example.com", mirrors=mirrors) == UsernamePassword( + "user.d", "pass.d" + ) + assert credentials_from_mirrors("index.docker.io", mirrors=mirrors) == UsernamePassword( + "dockerhub_user", "dockerhub_pass" + ) + assert credentials_from_mirrors("localhost", mirrors=mirrors) == UsernamePassword( + "user.localhost", "pass.localhost" + ) + + +def test_manifest_index(tmpdir): + """Test obtaining manifest + config from a registry + that has an index""" + urlopen = create_opener(InMemoryOCIRegistry("registry.example.com")).open + + img = ImageReference.from_string("registry.example.com/image") + + # Create two config files and manifests, for different architectures + manifest_descriptors = [] + manifest_and_config = {} + for arch in ("amd64", "arm64"): + file = tmpdir.join(f"config_{arch}.json") + config = default_config(architecture=arch, os="linux") + file.write(json.dumps(config)) + config_digest = Digest.from_sha256(hashlib.sha256(file.read_binary()).hexdigest()) + assert upload_blob(img, file, config_digest, _urlopen=urlopen) + manifest = { + "schemaVersion": 2, + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "config": { + "mediaType": "application/vnd.oci.image.config.v1+json", + "digest": str(config_digest), + "size": file.size(), + }, + "layers": [], + } + manifest_digest, manifest_size = upload_manifest( + img, manifest, tag=False, _urlopen=urlopen + ) + + manifest_descriptors.append( + { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "platform": {"architecture": arch, "os": "linux"}, + "digest": str(manifest_digest), + "size": manifest_size, + } + ) + + manifest_and_config[arch] = (manifest, config) + + # And a single index. + index = { + "schemaVersion": 2, + "mediaType": "application/vnd.oci.image.index.v1+json", + "manifests": manifest_descriptors, + } + + upload_manifest(img, index, tag=True, _urlopen=urlopen) + + # Check that we fetcht the correct manifest and config for each architecture + for arch in ("amd64", "arm64"): + assert ( + get_manifest_and_config(img, architecture=arch, _urlopen=urlopen) + == manifest_and_config[arch] + ) + + # Also test max recursion + with pytest.raises(Exception, match="Maximum recursion depth reached"): + get_manifest_and_config(img, architecture="amd64", recurse=0, _urlopen=urlopen) + + +class BrokenServer(DummyServer): + """Dummy server that returns 500 and 429 errors twice before succeeding""" + + def __init__(self, domain: str) -> None: + super().__init__(domain) + self.router.register("GET", r"/internal-server-error/", self.internal_server_error_twice) + self.router.register("GET", r"/rate-limit/", self.rate_limit_twice) + self.router.register("GET", r"/not-found/", self.not_found) + self.count_500 = 0 + self.count_429 = 0 + + def internal_server_error_twice(self, request: Request): + self.count_500 += 1 + if self.count_500 < 3: + return MockHTTPResponse(500, "Internal Server Error") + else: + return MockHTTPResponse(200, "OK") + + def rate_limit_twice(self, request: Request): + self.count_429 += 1 + if self.count_429 < 3: + return MockHTTPResponse(429, "Rate Limit Exceeded") + else: + return MockHTTPResponse(200, "OK") + + def not_found(self, request: Request): + return MockHTTPResponse(404, "Not Found") + + +@pytest.mark.parametrize( + "url,max_retries,expect_failure,expect_requests", + [ + # 500s should be retried + ("https://example.com/internal-server-error/", 2, True, 2), + ("https://example.com/internal-server-error/", 5, False, 3), + # 429s should be retried + ("https://example.com/rate-limit/", 2, True, 2), + ("https://example.com/rate-limit/", 5, False, 3), + # 404s shouldn't be retried + ("https://example.com/not-found/", 3, True, 1), + ], +) +def test_retry(url, max_retries, expect_failure, expect_requests): + server = BrokenServer("example.com") + urlopen = create_opener(server).open + sleep_time = [] + dont_sleep = lambda t: sleep_time.append(t) # keep track of sleep times + + try: + response = default_retry(urlopen, retries=max_retries, sleep=dont_sleep)(url) + except urllib.error.HTTPError as e: + if not expect_failure: + assert False, f"Unexpected HTTPError: {e}" + else: + if expect_failure: + assert False, "Expected HTTPError, but none was raised" + assert response.status == 200 + + assert len(server.requests) == expect_requests + assert sleep_time == [2**i for i in range(expect_requests - 1)] diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py index 8eebcc92bc383a..2765a6042e26f4 100644 --- a/lib/spack/spack/util/crypto.py +++ b/lib/spack/spack/util/crypto.py @@ -4,10 +4,12 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import hashlib -from typing import Any, Callable, Dict # novm +from typing import BinaryIO, Callable, Dict, Optional import llnl.util.tty as tty +HashFactory = Callable[[], "hashlib._Hash"] + #: Set of hash algorithms that Spack can use, mapped to digest size in bytes hashes = {"sha256": 32, "md5": 16, "sha1": 20, "sha224": 28, "sha384": 48, "sha512": 64} # Note: keys are ordered by popularity for earliest return in ``hash_key in version_dict`` checks. @@ -23,7 +25,7 @@ #: cache of hash functions generated -_hash_functions: Dict[str, Callable[[], Any]] = {} +_hash_functions: Dict[str, HashFactory] = {} class DeprecatedHash: @@ -44,55 +46,57 @@ def __call__(self, disable_alert=False): return hashlib.new(self.hash_alg) -def hash_fun_for_algo(algo): +def hash_fun_for_algo(algo: str) -> HashFactory: """Get a function that can perform the specified hash algorithm.""" - hash_gen = _hash_functions.get(algo) - if hash_gen is None: - if algo in _deprecated_hash_algorithms: - try: - hash_gen = DeprecatedHash(algo, tty.debug, disable_security_check=False) - - # call once to get a ValueError if usedforsecurity is needed - hash_gen(disable_alert=True) - except ValueError: - # Some systems may support the 'usedforsecurity' option - # so try with that (but display a warning when it is used) - hash_gen = DeprecatedHash(algo, tty.warn, disable_security_check=True) - else: - hash_gen = getattr(hashlib, algo) - _hash_functions[algo] = hash_gen - - return hash_gen - - -def hash_algo_for_digest(hexdigest): + fun = _hash_functions.get(algo) + if fun: + return fun + elif algo not in _deprecated_hash_algorithms: + _hash_functions[algo] = getattr(hashlib, algo) + else: + try: + deprecated_fun = DeprecatedHash(algo, tty.debug, disable_security_check=False) + + # call once to get a ValueError if usedforsecurity is needed + deprecated_fun(disable_alert=True) + except ValueError: + # Some systems may support the 'usedforsecurity' option + # so try with that (but display a warning when it is used) + deprecated_fun = DeprecatedHash(algo, tty.warn, disable_security_check=True) + _hash_functions[algo] = deprecated_fun + return _hash_functions[algo] + + +def hash_algo_for_digest(hexdigest: str) -> str: """Gets name of the hash algorithm for a hex digest.""" - bytes = len(hexdigest) / 2 - if bytes not in _size_to_hash: - raise ValueError("Spack knows no hash algorithm for this digest: %s" % hexdigest) - return _size_to_hash[bytes] + algo = _size_to_hash.get(len(hexdigest) // 2) + if algo is None: + raise ValueError(f"Spack knows no hash algorithm for this digest: {hexdigest}") + return algo -def hash_fun_for_digest(hexdigest): +def hash_fun_for_digest(hexdigest: str) -> HashFactory: """Gets a hash function corresponding to a hex digest.""" return hash_fun_for_algo(hash_algo_for_digest(hexdigest)) -def checksum(hashlib_algo, filename, **kwargs): - """Returns a hex digest of the filename generated using an - algorithm from hashlib. - """ - block_size = kwargs.get("block_size", 2**20) +def checksum_stream(hashlib_algo: HashFactory, fp: BinaryIO, *, block_size: int = 2**20) -> str: + """Returns a hex digest of the stream generated using given algorithm from hashlib.""" hasher = hashlib_algo() - with open(filename, "rb") as file: - while True: - data = file.read(block_size) - if not data: - break - hasher.update(data) + while True: + data = fp.read(block_size) + if not data: + break + hasher.update(data) return hasher.hexdigest() +def checksum(hashlib_algo: HashFactory, filename: str, *, block_size: int = 2**20) -> str: + """Returns a hex digest of the filename generated using an algorithm from hashlib.""" + with open(filename, "rb") as f: + return checksum_stream(hashlib_algo, f, block_size=block_size) + + class Checker: """A checker checks files against one particular hex digest. It will automatically determine what hashing algorithm @@ -115,18 +119,18 @@ class Checker: a 1MB (2**20 bytes) buffer. """ - def __init__(self, hexdigest, **kwargs): + def __init__(self, hexdigest: str, **kwargs) -> None: self.block_size = kwargs.get("block_size", 2**20) self.hexdigest = hexdigest - self.sum = None + self.sum: Optional[str] = None self.hash_fun = hash_fun_for_digest(hexdigest) @property - def hash_name(self): + def hash_name(self) -> str: """Get the name of the hash function this Checker is using.""" return self.hash_fun().name.lower() - def check(self, filename): + def check(self, filename: str) -> bool: """Read the file with the specified name and check its checksum against self.hexdigest. Return True if they match, False otherwise. Actual checksum is stored in self.sum. diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 890948892a1a7d..84b6c3dc1ff3f4 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -571,7 +571,7 @@ _spack_buildcache() { _spack_buildcache_push() { if $list_options then - SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast" + SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast --base-image -j --jobs" else _mirrors fi @@ -580,7 +580,7 @@ _spack_buildcache_push() { _spack_buildcache_create() { if $list_options then - SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast" + SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast --base-image -j --jobs" else _mirrors fi @@ -1391,7 +1391,7 @@ _spack_mirror_destroy() { _spack_mirror_add() { if $list_options then - SPACK_COMPREPLY="-h --help --scope --type --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url" + SPACK_COMPREPLY="-h --help --scope --type --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password" else _mirrors fi @@ -1418,7 +1418,7 @@ _spack_mirror_rm() { _spack_mirror_set_url() { if $list_options then - SPACK_COMPREPLY="-h --help --push --fetch --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url" + SPACK_COMPREPLY="-h --help --push --fetch --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password" else _mirrors fi @@ -1427,7 +1427,7 @@ _spack_mirror_set_url() { _spack_mirror_set() { if $list_options then - SPACK_COMPREPLY="-h --help --push --fetch --type --url --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url" + SPACK_COMPREPLY="-h --help --push --fetch --type --url --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password" else _mirrors fi diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index a09cdfa83716ea..ee9011e11c4857 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -696,7 +696,7 @@ complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -f -a complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -d 'show this help message and exit' # spack buildcache push -set -g __fish_spack_optspecs_spack_buildcache_push h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast +set -g __fish_spack_optspecs_spack_buildcache_push h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast base-image= j/jobs= complete -c spack -n '__fish_spack_using_command_pos_remainder 1 buildcache push' -f -k -a '(__fish_spack_specs)' complete -c spack -n '__fish_spack_using_command buildcache push' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command buildcache push' -s h -l help -d 'show this help message and exit' @@ -716,9 +716,13 @@ complete -c spack -n '__fish_spack_using_command buildcache push' -l only -r -f complete -c spack -n '__fish_spack_using_command buildcache push' -l only -r -d 'select the buildcache mode. The default is to build a cache for the package along with all its dependencies. Alternatively, one can decide to build a cache for only the package or only the dependencies' complete -c spack -n '__fish_spack_using_command buildcache push' -l fail-fast -f -a fail_fast complete -c spack -n '__fish_spack_using_command buildcache push' -l fail-fast -d 'stop pushing on first failure (default is best effort)' +complete -c spack -n '__fish_spack_using_command buildcache push' -l base-image -r -f -a base_image +complete -c spack -n '__fish_spack_using_command buildcache push' -l base-image -r -d 'specify the base image for the buildcache. ' +complete -c spack -n '__fish_spack_using_command buildcache push' -s j -l jobs -r -f -a jobs +complete -c spack -n '__fish_spack_using_command buildcache push' -s j -l jobs -r -d 'explicitly set number of parallel jobs' # spack buildcache create -set -g __fish_spack_optspecs_spack_buildcache_create h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast +set -g __fish_spack_optspecs_spack_buildcache_create h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast base-image= j/jobs= complete -c spack -n '__fish_spack_using_command_pos_remainder 1 buildcache create' -f -k -a '(__fish_spack_specs)' complete -c spack -n '__fish_spack_using_command buildcache create' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command buildcache create' -s h -l help -d 'show this help message and exit' @@ -738,6 +742,10 @@ complete -c spack -n '__fish_spack_using_command buildcache create' -l only -r - complete -c spack -n '__fish_spack_using_command buildcache create' -l only -r -d 'select the buildcache mode. The default is to build a cache for the package along with all its dependencies. Alternatively, one can decide to build a cache for only the package or only the dependencies' complete -c spack -n '__fish_spack_using_command buildcache create' -l fail-fast -f -a fail_fast complete -c spack -n '__fish_spack_using_command buildcache create' -l fail-fast -d 'stop pushing on first failure (default is best effort)' +complete -c spack -n '__fish_spack_using_command buildcache create' -l base-image -r -f -a base_image +complete -c spack -n '__fish_spack_using_command buildcache create' -l base-image -r -d 'specify the base image for the buildcache. ' +complete -c spack -n '__fish_spack_using_command buildcache create' -s j -l jobs -r -f -a jobs +complete -c spack -n '__fish_spack_using_command buildcache create' -s j -l jobs -r -d 'explicitly set number of parallel jobs' # spack buildcache install set -g __fish_spack_optspecs_spack_buildcache_install h/help f/force m/multiple u/unsigned o/otherarch @@ -2139,7 +2147,7 @@ complete -c spack -n '__fish_spack_using_command mirror destroy' -l mirror-url - complete -c spack -n '__fish_spack_using_command mirror destroy' -l mirror-url -r -d 'find mirror to destroy by url' # spack mirror add -set -g __fish_spack_optspecs_spack_mirror_add h/help scope= type= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= +set -g __fish_spack_optspecs_spack_mirror_add h/help scope= type= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password= complete -c spack -n '__fish_spack_using_command_pos 0 mirror add' -f complete -c spack -n '__fish_spack_using_command mirror add' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command mirror add' -s h -l help -d 'show this help message and exit' @@ -2157,6 +2165,10 @@ complete -c spack -n '__fish_spack_using_command mirror add' -l s3-profile -r -f complete -c spack -n '__fish_spack_using_command mirror add' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror add' -l s3-endpoint-url -r -f -a s3_endpoint_url complete -c spack -n '__fish_spack_using_command mirror add' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-username -r -f -a oci_username +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-username -r -d 'username to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-password -r -f -a oci_password +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-password -r -d 'password to use to connect to this OCI mirror' # spack mirror remove set -g __fish_spack_optspecs_spack_mirror_remove h/help scope= @@ -2175,7 +2187,7 @@ complete -c spack -n '__fish_spack_using_command mirror rm' -l scope -r -f -a '_ complete -c spack -n '__fish_spack_using_command mirror rm' -l scope -r -d 'configuration scope to modify' # spack mirror set-url -set -g __fish_spack_optspecs_spack_mirror_set_url h/help push fetch scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= +set -g __fish_spack_optspecs_spack_mirror_set_url h/help push fetch scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password= complete -c spack -n '__fish_spack_using_command_pos 0 mirror set-url' -f -a '(__fish_spack_mirrors)' complete -c spack -n '__fish_spack_using_command mirror set-url' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command mirror set-url' -s h -l help -d 'show this help message and exit' @@ -2195,9 +2207,13 @@ complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-profile - complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-endpoint-url -r -f -a s3_endpoint_url complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-username -r -f -a oci_username +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-username -r -d 'username to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-password -r -f -a oci_password +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-password -r -d 'password to use to connect to this OCI mirror' # spack mirror set -set -g __fish_spack_optspecs_spack_mirror_set h/help push fetch type= url= scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= +set -g __fish_spack_optspecs_spack_mirror_set h/help push fetch type= url= scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password= complete -c spack -n '__fish_spack_using_command_pos 0 mirror set' -f -a '(__fish_spack_mirrors)' complete -c spack -n '__fish_spack_using_command mirror set' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command mirror set' -s h -l help -d 'show this help message and exit' @@ -2221,6 +2237,10 @@ complete -c spack -n '__fish_spack_using_command mirror set' -l s3-profile -r -f complete -c spack -n '__fish_spack_using_command mirror set' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set' -l s3-endpoint-url -r -f -a s3_endpoint_url complete -c spack -n '__fish_spack_using_command mirror set' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-username -r -f -a oci_username +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-username -r -d 'username to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-password -r -f -a oci_password +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-password -r -d 'password to use to connect to this OCI mirror' # spack mirror list set -g __fish_spack_optspecs_spack_mirror_list h/help scope= From d215f0da476d1338550bd16635d5595c9f22f978 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Fri, 27 Oct 2023 13:40:44 -0400 Subject: [PATCH 348/408] External finding: update default paths; treat .bat as executable on Windows (#39850) .bat or .exe files can be considered executable on Windows. This PR expands the regex for detectable packages to allow for the detection of packages that vendor .bat wrappers (intel mpi for example). Additional changes: * Outside of Windows, when searching for executables `path_hints=None` was used to indicate that default path hints should be provided, and `[]` was taken to mean that no defaults should be chosen (in that case, nothing is searched); behavior on Windows has now been updated to match. * Above logic for handling of `path_hints=[]` has also been extended to library search (for both Linux and Windows). * All exceptions for external packages were documented as timeout errors: this commit adds a distinction for other types of errors in warning messages to the user. --- lib/spack/spack/detection/path.py | 96 ++++++++++++++++------------ lib/spack/spack/test/cmd/external.py | 43 +------------ lib/spack/spack/test/detection.py | 30 +++++++++ lib/spack/spack/util/path.py | 2 +- 4 files changed, 89 insertions(+), 82 deletions(-) create mode 100644 lib/spack/spack/test/detection.py diff --git a/lib/spack/spack/detection/path.py b/lib/spack/spack/detection/path.py index 4de703ac97b0f3..6531ed62da0ef5 100644 --- a/lib/spack/spack/detection/path.py +++ b/lib/spack/spack/detection/path.py @@ -39,12 +39,21 @@ DETECTION_TIMEOUT = 120 -def common_windows_package_paths() -> List[str]: +def common_windows_package_paths(pkg_cls=None) -> List[str]: + """Get the paths for common package installation location on Windows + that are outside the PATH + Returns [] on unix + """ + if sys.platform != "win32": + return [] paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages() paths.extend(find_win32_additional_install_paths()) paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths()) paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths()) paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths()) + if pkg_cls: + paths.extend(compute_windows_user_path_for_package(pkg_cls)) + paths.extend(compute_windows_program_path_for_package(pkg_cls)) return paths @@ -62,8 +71,6 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]: path_hints: list of paths to be searched. If None the list will be constructed based on the PATH environment variable. """ - if sys.platform == "win32": - path_hints.extend(common_windows_package_paths()) search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints) return path_to_dict(search_paths) @@ -88,30 +95,42 @@ def libraries_in_ld_and_system_library_path( DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment variables as well as the standard system library paths. """ - path_hints = ( - path_hints - or spack.util.environment.get_path("LD_LIBRARY_PATH") + default_lib_search_paths = ( + spack.util.environment.get_path("LD_LIBRARY_PATH") + spack.util.environment.get_path("DYLD_LIBRARY_PATH") + spack.util.environment.get_path("DYLD_FALLBACK_LIBRARY_PATH") + spack.util.ld_so_conf.host_dynamic_linker_search_paths() ) + path_hints = path_hints if path_hints is not None else default_lib_search_paths + search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints) return path_to_dict(search_paths) -def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]: - path_hints.extend(spack.util.environment.get_path("PATH")) - search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints) +def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[str, str]: + """Get the paths of all libraries available from the system PATH paths. + + For more details, see `libraries_in_ld_and_system_library_path` regarding + return type and contents. + + Args: + path_hints: list of paths to be searched. If None the list will be + constructed based on the set of PATH environment + variables as well as the standard system library paths. + """ + search_hints = ( + path_hints if path_hints is not None else spack.util.environment.get_path("PATH") + ) + search_paths = llnl.util.filesystem.search_paths_for_libraries(*search_hints) # on Windows, some libraries (.dlls) are found in the bin directory or sometimes # at the search root. Add both of those options to the search scheme - search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*path_hints)) - search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths()) - search_paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths()) - search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths()) - search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths()) - # SDK and WGL should be handled by above, however on occasion the WDK is in an atypical - # location, so we handle that case specifically. - search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths()) + search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*search_hints)) + if path_hints is None: + # if no user provided path was given, add defaults to the search + search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths()) + # SDK and WGL should be handled by above, however on occasion the WDK is in an atypical + # location, so we handle that case specifically. + search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths()) return path_to_dict(search_paths) @@ -125,19 +144,8 @@ def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]: class Finder: """Inspects the file-system looking for packages. Guesses places where to look using PATH.""" - def path_hints( - self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None - ) -> List[str]: - """Returns the list of paths to be searched. - - Args: - pkg: package being detected - initial_guess: initial list of paths from caller - """ - result = initial_guess or [] - result.extend(compute_windows_user_path_for_package(pkg)) - result.extend(compute_windows_program_path_for_package(pkg)) - return result + def default_path_hints(self) -> List[str]: + return [] def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]: """Returns the list of patterns used to match candidate files. @@ -245,6 +253,8 @@ def find( Args: pkg_name: package being detected initial_guess: initial list of paths to search from the caller + if None, default paths are searched. If this + is an empty list, nothing will be searched. """ import spack.repo @@ -252,13 +262,18 @@ def find( patterns = self.search_patterns(pkg=pkg_cls) if not patterns: return [] - path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess) - candidates = self.candidate_files(patterns=patterns, paths=path_hints) + if initial_guess is None: + initial_guess = self.default_path_hints() + initial_guess.extend(common_windows_package_paths(pkg_cls)) + candidates = self.candidate_files(patterns=patterns, paths=initial_guess) result = self.detect_specs(pkg=pkg_cls, paths=candidates) return result class ExecutablesFinder(Finder): + def default_path_hints(self) -> List[str]: + return spack.util.environment.get_path("PATH") + def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]: result = [] if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"): @@ -298,7 +313,7 @@ def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str] libraries_by_path = ( libraries_in_ld_and_system_library_path(path_hints=paths) if sys.platform != "win32" - else libraries_in_windows_paths(paths) + else libraries_in_windows_paths(path_hints=paths) ) patterns = [re.compile(x) for x in patterns] result = [] @@ -334,21 +349,16 @@ def by_path( # TODO: Packages should be able to define both .libraries and .executables in the future # TODO: determine_spec_details should get all relevant libraries and executables in one call executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder() - - executables_path_guess = ( - spack.util.environment.get_path("PATH") if path_hints is None else path_hints - ) - libraries_path_guess = [] if path_hints is None else path_hints detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {} result = collections.defaultdict(list) with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor: for pkg in packages_to_search: executable_future = executor.submit( - executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess + executables_finder.find, pkg_name=pkg, initial_guess=path_hints ) library_future = executor.submit( - libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess + libraries_finder.find, pkg_name=pkg, initial_guess=path_hints ) detected_specs_by_package[pkg] = executable_future, library_future @@ -359,9 +369,13 @@ def by_path( if detected: _, unqualified_name = spack.repo.partition_package_name(pkg_name) result[unqualified_name].extend(detected) - except Exception: + except concurrent.futures.TimeoutError: llnl.util.tty.debug( f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached" ) + except Exception as e: + llnl.util.tty.debug( + f"[EXTERNAL DETECTION] Skipping {pkg_name}: exception occured {e}" + ) return result diff --git a/lib/spack/spack/test/cmd/external.py b/lib/spack/spack/test/cmd/external.py index e94d6efe5c4d4d..e9a387aac03e66 100644 --- a/lib/spack/spack/test/cmd/external.py +++ b/lib/spack/spack/test/cmd/external.py @@ -28,21 +28,12 @@ def _mock_search(path_hints=None): return _factory -@pytest.fixture -def _platform_executables(monkeypatch): - def _win_exe_ext(): - return ".bat" - - monkeypatch.setattr(spack.util.path, "win_exe_ext", _win_exe_ext) - - def define_plat_exe(exe): if sys.platform == "win32": exe += ".bat" return exe -@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850") def test_find_external_single_package(mock_executable): cmake_path = mock_executable("cmake", output="echo cmake version 1.foo") search_dir = cmake_path.parent.parent @@ -54,7 +45,7 @@ def test_find_external_single_package(mock_executable): assert len(detected_spec) == 1 and detected_spec[0].spec == Spec("cmake@1.foo") -def test_find_external_two_instances_same_package(mock_executable, _platform_executables): +def test_find_external_two_instances_same_package(mock_executable): # Each of these cmake instances is created in a different prefix # In Windows, quoted strings are echo'd with quotes includes # we need to avoid that for proper regex. @@ -236,32 +227,7 @@ def test_list_detectable_packages(mutable_config, mutable_mock_repo): assert external.returncode == 0 -@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850") -def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _platform_executables): - # Prepare an environment to detect a fake gcc - gcc_exe = mock_executable("gcc", output="echo 4.2.1") - prefix = os.path.dirname(gcc_exe) - monkeypatch.setenv("PATH", prefix) - - # Find the external spec - external("find", "gcc") - - # Check entries in 'packages.yaml' - packages_yaml = spack.config.get("packages") - assert "gcc" in packages_yaml - assert "externals" in packages_yaml["gcc"] - externals = packages_yaml["gcc"]["externals"] - assert len(externals) == 1 - external_gcc = externals[0] - assert external_gcc["spec"] == "gcc@4.2.1 languages=c" - assert external_gcc["prefix"] == os.path.dirname(prefix) - assert "extra_attributes" in external_gcc - extra_attributes = external_gcc["extra_attributes"] - assert "prefix" not in extra_attributes - assert extra_attributes["compilers"]["c"] == str(gcc_exe) - - -def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables): +def test_overriding_prefix(mock_executable, mutable_config, monkeypatch): gcc_exe = mock_executable("gcc", output="echo 4.2.1") search_dir = gcc_exe.parent @@ -282,10 +248,7 @@ def _determine_variants(cls, exes, version_str): assert gcc.external_path == os.path.sep + os.path.join("opt", "gcc", "bin") -@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850") -def test_new_entries_are_reported_correctly( - mock_executable, mutable_config, monkeypatch, _platform_executables -): +def test_new_entries_are_reported_correctly(mock_executable, mutable_config, monkeypatch): # Prepare an environment to detect a fake gcc gcc_exe = mock_executable("gcc", output="echo 4.2.1") prefix = os.path.dirname(gcc_exe) diff --git a/lib/spack/spack/test/detection.py b/lib/spack/spack/test/detection.py new file mode 100644 index 00000000000000..6218bc87578f7c --- /dev/null +++ b/lib/spack/spack/test/detection.py @@ -0,0 +1,30 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import collections + +import spack.detection +import spack.spec + + +def test_detection_update_config(mutable_config): + # mock detected package + detected_packages = collections.defaultdict(list) + detected_packages["cmake"] = [ + spack.detection.common.DetectedPackage( + spec=spack.spec.Spec("cmake@3.27.5"), prefix="/usr/bin" + ) + ] + + # update config for new package + spack.detection.common.update_configuration(detected_packages) + # Check entries in 'packages.yaml' + packages_yaml = spack.config.get("packages") + assert "cmake" in packages_yaml + assert "externals" in packages_yaml["cmake"] + externals = packages_yaml["cmake"]["externals"] + assert len(externals) == 1 + external_gcc = externals[0] + assert external_gcc["spec"] == "cmake@3.27.5" + assert external_gcc["prefix"] == "/usr/bin" diff --git a/lib/spack/spack/util/path.py b/lib/spack/spack/util/path.py index a46443c0831189..e2aee48df1e2cd 100644 --- a/lib/spack/spack/util/path.py +++ b/lib/spack/spack/util/path.py @@ -98,7 +98,7 @@ def replacements(): def win_exe_ext(): - return ".exe" + return r"(?:\.bat|\.exe)" def sanitize_filename(filename: str) -> str: From c976e8e0181ba9deb3372c5a565245529123992e Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Fri, 27 Oct 2023 11:08:33 -0700 Subject: [PATCH 349/408] mgard@2020-10-01 %oneapi@2023: turn of c++11-narrowing via cxxflags (#40743) --- var/spack/repos/builtin/packages/mgard/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/mgard/package.py b/var/spack/repos/builtin/packages/mgard/package.py index b58f4c0bba0aa6..411dd0c9b9c6e2 100644 --- a/var/spack/repos/builtin/packages/mgard/package.py +++ b/var/spack/repos/builtin/packages/mgard/package.py @@ -59,6 +59,12 @@ class Mgard(CMakePackage, CudaPackage): ) conflicts("%gcc@:7", when="@2022-11-18:", msg="requires std::optional and other c++17 things") + def flag_handler(self, name, flags): + if name == "cxxflags": + if self.spec.satisfies("@2020-10-01 %oneapi@2023:"): + flags.append("-Wno-error=c++11-narrowing") + return (flags, None, None) + def cmake_args(self): spec = self.spec args = ["-DBUILD_TESTING=OFF"] From 75a48821a53e515d7bf07a907e8164a1eea4eb4b Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Fri, 27 Oct 2023 11:15:11 -0700 Subject: [PATCH 350/408] e4s ci stacks: add exago specs (#40712) * e4s ci: add exago +cuda, +rocm builds * exago: rename 5-18-2022-snapshot to snapshot.5-18-2022 * disable exago +rocm for non-external rocm ci install * note that hiop +rocm fails to find hip libraries when they are spack-installed --- share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml | 1 + .../gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml | 2 ++ share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml | 3 +++ var/spack/repos/builtin/packages/exago/package.py | 2 +- 4 files changed, 7 insertions(+), 1 deletion(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index 95f8d37e0436bc..718f1d23d336b2 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -199,6 +199,7 @@ spack: - caliper +cuda cuda_arch=70 - chai ~benchmarks ~tests +cuda cuda_arch=70 ^umpire ~shared - ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp ~paraview +cuda cuda_arch=70 + - exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=70 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=70 #^raja@0.14.0 - flecsi +cuda cuda_arch=70 - ginkgo +cuda cuda_arch=70 - heffte +cuda cuda_arch=70 diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml index c11dcf6ae1a551..b30236a717453f 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml @@ -250,6 +250,7 @@ spack: - caliper +rocm amdgpu_target=gfx908 - chai ~benchmarks +rocm amdgpu_target=gfx908 - ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx908 + - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx908 ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx908 - gasnet +rocm amdgpu_target=gfx908 - ginkgo +rocm amdgpu_target=gfx908 - heffte +rocm amdgpu_target=gfx908 @@ -290,6 +291,7 @@ spack: - caliper +rocm amdgpu_target=gfx90a - chai ~benchmarks +rocm amdgpu_target=gfx90a - ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx90a + - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx90a ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx90a - gasnet +rocm amdgpu_target=gfx90a - ginkgo +rocm amdgpu_target=gfx90a - heffte +rocm amdgpu_target=gfx90a diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index ea9bd5fe70b72f..eb689234552cab 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -205,6 +205,7 @@ spack: - cusz +cuda cuda_arch=80 - dealii +cuda cuda_arch=80 - ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=80 # +ascent fails because fides fetch error + - exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=80 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=80 #^raja@0.14.0 - flecsi +cuda cuda_arch=80 - ginkgo +cuda cuda_arch=80 - heffte +cuda cuda_arch=80 @@ -327,6 +328,7 @@ spack: - paraview +rocm amdgpu_target=gfx908 # - vtk-m ~openmp +rocm amdgpu_target=gfx908 # vtk-m: https://github.com/spack/spack/issues/40268 # -- + # - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx908 ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx908 # hiop: CMake Error at cmake/FindHiopHipLibraries.cmake:23 (find_package) # - lbann ~cuda +rocm amdgpu_target=gfx908 # aluminum: https://github.com/spack/spack/issues/38807 # - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898 @@ -367,6 +369,7 @@ spack: - paraview +rocm amdgpu_target=gfx90a # - vtk-m ~openmp +rocm amdgpu_target=gfx90a # vtk-m: https://github.com/spack/spack/issues/40268 # -- + # - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx90a ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx90a # hiop: CMake Error at cmake/FindHiopHipLibraries.cmake:23 (find_package) # - lbann ~cuda +rocm amdgpu_target=gfx90a # aluminum: https://github.com/spack/spack/issues/38807 # - papi +rocm amdgpu_target=gfx90a # papi: https://github.com/spack/spack/issues/27898 diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index ab48bab3776b86..8db0f7f16fbefe 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -50,7 +50,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): version("main", branch="main", submodules=True) version("develop", branch="develop", submodules=True) version( - "5-18-2022-snapshot", + "snapshot.5-18-2022", tag="5-18-2022-snapshot", commit="3eb58335db71bb72341153a7867eb607402067ca", submodules=True, From 124671ffd1d9f6e8b3058228d3058e92b840fff9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= Date: Fri, 27 Oct 2023 20:33:48 +0100 Subject: [PATCH 351/408] hipsycl: restrict compatibility with llvm for v0.8.0 (#40736) --- var/spack/repos/builtin/packages/hipsycl/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/hipsycl/package.py b/var/spack/repos/builtin/packages/hipsycl/package.py index e8a5ba9201dd06..b6b30c2e5cfa7a 100644 --- a/var/spack/repos/builtin/packages/hipsycl/package.py +++ b/var/spack/repos/builtin/packages/hipsycl/package.py @@ -39,6 +39,9 @@ class Hipsycl(CMakePackage): depends_on("python@3:") depends_on("llvm@8: +clang", when="~cuda") depends_on("llvm@9: +clang", when="+cuda") + # hipSYCL 0.8.0 supported only LLVM 8-10: + # (https://github.com/AdaptiveCpp/AdaptiveCpp/blob/v0.8.0/CMakeLists.txt#L29-L37) + depends_on("llvm@8:10", when="@0.8.0") # https://github.com/OpenSYCL/OpenSYCL/pull/918 was introduced after 0.9.4 conflicts("^llvm@16:", when="@:0.9.4") # LLVM PTX backend requires cuda7:10.1 (https://tinyurl.com/v82k5qq) From 6e098bef1778ee2598f553dd6c6b729198393084 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Fri, 27 Oct 2023 19:58:50 -0400 Subject: [PATCH 352/408] MSVC: detection from registry (#38500) Typically MSVC is detected via the VSWhere program. However, this may not be available, or may be installed in an unpredictable location. This PR adds an additional approach via Windows Registry queries to determine VS install location root. Additionally: * Construct vs_install_paths after class-definition time (move it to variable-access time). * Skip over keys for which a user does not have read permissions when performing searches (previously the presence of these keys would have caused an error, regardless of whether they were needed). * Extend helper functionality with option for regex matching on registry keys vs. exact string matching. * Some internal refactoring: remove boolean parameters in some cases where the function was always called with the same value (e.g. `find_subkey`) --- lib/spack/spack/detection/common.py | 2 +- .../spack/operating_systems/windows_os.py | 107 ++++++++++------ lib/spack/spack/util/windows_registry.py | 114 +++++++++++++++--- 3 files changed, 168 insertions(+), 55 deletions(-) diff --git a/lib/spack/spack/detection/common.py b/lib/spack/spack/detection/common.py index 0e873c3f555095..6fba021b336b0c 100644 --- a/lib/spack/spack/detection/common.py +++ b/lib/spack/spack/detection/common.py @@ -269,7 +269,7 @@ def find_windows_compiler_root_paths() -> List[str]: At the moment simply returns location of VS install paths from VSWhere But should be extended to include more information as relevant""" - return list(winOs.WindowsOs.vs_install_paths) + return list(winOs.WindowsOs().vs_install_paths) @staticmethod def find_windows_compiler_cmake_paths() -> List[str]: diff --git a/lib/spack/spack/operating_systems/windows_os.py b/lib/spack/spack/operating_systems/windows_os.py index 0c3930e99c48f1..fa767d71fb1c22 100755 --- a/lib/spack/spack/operating_systems/windows_os.py +++ b/lib/spack/spack/operating_systems/windows_os.py @@ -5,10 +5,12 @@ import glob import os +import pathlib import platform import subprocess from spack.error import SpackError +from spack.util import windows_registry as winreg from spack.version import Version from ._operating_system import OperatingSystem @@ -31,43 +33,6 @@ class WindowsOs(OperatingSystem): 10. """ - # Find MSVC directories using vswhere - comp_search_paths = [] - vs_install_paths = [] - root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") - if root: - try: - extra_args = {"encoding": "mbcs", "errors": "strict"} - paths = subprocess.check_output( # type: ignore[call-overload] # novermin - [ - os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), - "-prerelease", - "-requires", - "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", - "-property", - "installationPath", - "-products", - "*", - ], - **extra_args, - ).strip() - vs_install_paths = paths.split("\n") - msvc_paths = [os.path.join(path, "VC", "Tools", "MSVC") for path in vs_install_paths] - for p in msvc_paths: - comp_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64"))) - if os.getenv("ONEAPI_ROOT"): - comp_search_paths.extend( - glob.glob( - os.path.join( - str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin" - ) - ) - ) - except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): - pass - if comp_search_paths: - compiler_search_paths = comp_search_paths - def __init__(self): plat_ver = windows_version() if plat_ver < Version("10"): @@ -76,3 +41,71 @@ def __init__(self): def __str__(self): return self.name + + @property + def vs_install_paths(self): + vs_install_paths = [] + root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") + if root: + try: + extra_args = {"encoding": "mbcs", "errors": "strict"} + paths = subprocess.check_output( # type: ignore[call-overload] # novermin + [ + os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), + "-prerelease", + "-requires", + "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "-property", + "installationPath", + "-products", + "*", + ], + **extra_args, + ).strip() + vs_install_paths = paths.split("\n") + except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): + pass + return vs_install_paths + + @property + def msvc_paths(self): + return [os.path.join(path, "VC", "Tools", "MSVC") for path in self.vs_install_paths] + + @property + def compiler_search_paths(self): + # First Strategy: Find MSVC directories using vswhere + _compiler_search_paths = [] + for p in self.msvc_paths: + _compiler_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64"))) + if os.getenv("ONEAPI_ROOT"): + _compiler_search_paths.extend( + glob.glob( + os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin") + ) + ) + # Second strategy: Find MSVC via the registry + msft = winreg.WindowsRegistryView( + "SOFTWARE\\WOW6432Node\\Microsoft", winreg.HKEY.HKEY_LOCAL_MACHINE + ) + vs_entries = msft.find_subkeys(r"VisualStudio_.*") + vs_paths = [] + + def clean_vs_path(path): + path = path.split(",")[0].lstrip("@") + return str((pathlib.Path(path).parent / "..\\..").resolve()) + + for entry in vs_entries: + try: + val = entry.get_subkey("Capabilities").get_value("ApplicationDescription").value + vs_paths.append(clean_vs_path(val)) + except FileNotFoundError as e: + if hasattr(e, "winerror"): + if e.winerror == 2: + pass + else: + raise + else: + raise + + _compiler_search_paths.extend(vs_paths) + return _compiler_search_paths diff --git a/lib/spack/spack/util/windows_registry.py b/lib/spack/spack/util/windows_registry.py index 5cc0edd8bf5271..cfc16724563287 100644 --- a/lib/spack/spack/util/windows_registry.py +++ b/lib/spack/spack/util/windows_registry.py @@ -8,6 +8,7 @@ """ import os +import re import sys from contextlib import contextmanager @@ -68,8 +69,19 @@ def _gather_subkey_info(self): sub_keys, _, _ = winreg.QueryInfoKey(self.hkey) for i in range(sub_keys): sub_name = winreg.EnumKey(self.hkey, i) - sub_handle = winreg.OpenKeyEx(self.hkey, sub_name, access=winreg.KEY_READ) - self._keys.append(RegistryKey(os.path.join(self.path, sub_name), sub_handle)) + try: + sub_handle = winreg.OpenKeyEx(self.hkey, sub_name, access=winreg.KEY_READ) + self._keys.append(RegistryKey(os.path.join(self.path, sub_name), sub_handle)) + except OSError as e: + if hasattr(e, "winerror"): + if e.winerror == 5: + # This is a permission error, we can't read this key + # move on + pass + else: + raise + else: + raise def _gather_value_info(self): """Compose all values for this key into a dict of form value name: RegistryValue Object""" @@ -161,6 +173,15 @@ def __init__(self, key, root_key=HKEY.HKEY_CURRENT_USER): self.root = root_key self._reg = None + class KeyMatchConditions: + @staticmethod + def regex_matcher(subkey_name): + return lambda x: re.match(subkey_name, x.name) + + @staticmethod + def name_matcher(subkey_name): + return lambda x: subkey_name == x.name + @contextmanager def invalid_reg_ref_error_handler(self): try: @@ -193,6 +214,10 @@ def _valid_reg_check(self): return False return True + def _regex_match_subkeys(self, subkey): + r_subkey = re.compile(subkey) + return [key for key in self.get_subkeys() if r_subkey.match(key.name)] + @property def reg(self): if not self._reg: @@ -218,51 +243,106 @@ def get_subkeys(self): with self.invalid_reg_ref_error_handler(): return self.reg.subkeys + def get_matching_subkeys(self, subkey_name): + """Returns all subkeys regex matching subkey name + + Note: this method obtains only direct subkeys of the given key and does not + desced to transtitve subkeys. For this behavior, see `find_matching_subkeys`""" + self._regex_match_subkeys(subkey_name) + def get_values(self): if not self._valid_reg_check(): raise RegistryError("Cannot query values from invalid key %s" % self.key) with self.invalid_reg_ref_error_handler(): return self.reg.values - def _traverse_subkeys(self, stop_condition): + def _traverse_subkeys(self, stop_condition, collect_all_matching=False): """Perform simple BFS of subkeys, returning the key that successfully triggers the stop condition. Args: stop_condition: lambda or function pointer that takes a single argument a key and returns a boolean value based on that key + collect_all_matching: boolean value, if True, the traversal collects and returns + all keys meeting stop condition. If false, once stop + condition is met, the key that triggered the condition ' + is returned. Return: the key if stop_condition is triggered, or None if not """ + collection = [] if not self._valid_reg_check(): raise RegistryError("Cannot query values from invalid key %s" % self.key) with self.invalid_reg_ref_error_handler(): queue = self.reg.subkeys for key in queue: if stop_condition(key): - return key + if collect_all_matching: + collection.append(key) + else: + return key queue.extend(key.subkeys) - return None + return collection if collection else None + + def _find_subkey_s(self, search_key, collect_all_matching=False): + """Retrieve one or more keys regex matching `search_key`. + One key will be returned unless `collect_all_matching` is enabled, + in which case call matches are returned. + + Args: + search_key (str): regex string represeting a subkey name structure + to be matched against. + Cannot be provided alongside `direct_subkey` + collect_all_matching (bool): No-op if `direct_subkey` is specified + Return: + the desired subkey as a RegistryKey object, or none + """ + return self._traverse_subkeys(search_key, collect_all_matching=collect_all_matching) - def find_subkey(self, subkey_name, recursive=True): - """If non recursive, this method is the same as get subkey with error handling - Otherwise perform a BFS of subkeys until desired key is found + def find_subkey(self, subkey_name): + """Perform a BFS of subkeys until desired key is found Returns None or RegistryKey object corresponding to requested key name Args: - subkey_name (str): string representing subkey to be searched for - recursive (bool): optional argument, if True, subkey need not be a direct - sub key of this registry entry, and this method will - search all subkeys recursively. - Default is True + subkey_name (str) Return: the desired subkey as a RegistryKey object, or none + + For more details, see the WindowsRegistryView._find_subkey_s method docstring """ + return self._find_subkey_s( + WindowsRegistryView.KeyMatchConditions.name_matcher(subkey_name) + ) - if not recursive: - return self.get_subkey(subkey_name) + def find_matching_subkey(self, subkey_name): + """Perform a BFS of subkeys until a key matching subkey name regex is found + Returns None or the first RegistryKey object corresponding to requested key name - else: - return self._traverse_subkeys(lambda x: x.name == subkey_name) + Args: + subkey_name (str) + Return: + the desired subkey as a RegistryKey object, or none + + For more details, see the WindowsRegistryView._find_subkey_s method docstring + """ + return self._find_subkey_s( + WindowsRegistryView.KeyMatchConditions.regex_matcher(subkey_name) + ) + + def find_subkeys(self, subkey_name): + """Exactly the same as find_subkey, except this function tries to match + a regex to multiple keys + + Args: + subkey_name (str) + Return: + the desired subkeys as a list of RegistryKey object, or none + + For more details, see the WindowsRegistryView._find_subkey_s method docstring + """ + kwargs = {"collect_all_matching": True} + return self._find_subkey_s( + WindowsRegistryView.KeyMatchConditions.regex_matcher(subkey_name), **kwargs + ) def find_value(self, val_name, recursive=True): """ From 47c850b4580c09466046d3a92fe71d746e6258a7 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sat, 28 Oct 2023 06:17:32 -0500 Subject: [PATCH 353/408] py-numpy: add v1.26 (#40057) --- .../builtin/packages/dxt-explorer/package.py | 2 +- .../repos/builtin/packages/py-gpaw/package.py | 2 +- .../builtin/packages/py-numpy/package.py | 175 +++++++++++++----- .../repos/builtin/packages/py-pip/package.py | 2 + .../repos/builtin/packages/py-pyfr/package.py | 2 +- .../builtin/packages/py-pyzmq/package.py | 3 + .../builtin/packages/py-scipy/package.py | 170 +++++++---------- .../builtin/packages/py-tomopy/package.py | 2 +- 8 files changed, 202 insertions(+), 156 deletions(-) diff --git a/var/spack/repos/builtin/packages/dxt-explorer/package.py b/var/spack/repos/builtin/packages/dxt-explorer/package.py index 4f7df14c186af9..90ef64818346c4 100644 --- a/var/spack/repos/builtin/packages/dxt-explorer/package.py +++ b/var/spack/repos/builtin/packages/dxt-explorer/package.py @@ -26,5 +26,5 @@ class DxtExplorer(PythonPackage): depends_on("darshan-util", type=("run")) - depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") depends_on("py-pandas", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-gpaw/package.py b/var/spack/repos/builtin/packages/py-gpaw/package.py index 0f5072e927c534..f6759fb279ea53 100644 --- a/var/spack/repos/builtin/packages/py-gpaw/package.py +++ b/var/spack/repos/builtin/packages/py-gpaw/package.py @@ -35,7 +35,7 @@ class PyGpaw(PythonPackage): depends_on("py-ase@3.19.0:", type=("build", "run"), when="@20.1.0") depends_on("py-ase@3.20.1:", type=("build", "run"), when="@20.10.0") depends_on("py-ase@3.21.0:", type=("build", "run"), when="@21.1.0") - depends_on("py-numpy +blas +lapack", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) depends_on("py-scipy", type=("build", "run")) depends_on("libxc@3:4.3.4") depends_on("blas") diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index e5ffea879c4cd7..8ee118d98e917e 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -5,16 +5,13 @@ import platform import subprocess +from typing import Tuple from spack.package import * class PyNumpy(PythonPackage): - """NumPy is the fundamental package for scientific computing with Python. - It contains among other things: a powerful N-dimensional array object, - sophisticated (broadcasting) functions, tools for integrating C/C++ and - Fortran code, and useful linear algebra, Fourier transform, and random - number capabilities""" + """Fundamental package for array computing in Python.""" homepage = "https://numpy.org/" pypi = "numpy/numpy-1.23.0.tar.gz" @@ -23,6 +20,8 @@ class PyNumpy(PythonPackage): maintainers("adamjstewart", "rgommers") version("main", branch="main") + version("1.26.1", sha256="c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe") + version("1.26.0", sha256="f93fc78fe8bf15afe2b8d6b6499f1c73953169fad1e9a8dd086cdff3190e7fdf") version("1.25.2", sha256="fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760") version("1.25.1", sha256="9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf") version("1.25.0", sha256="f1accae9a28dc3cda46a91de86acf69de0d1b5f4edd44a9b0c3ceb8036dfff19") @@ -87,11 +86,8 @@ class PyNumpy(PythonPackage): version("1.14.6", sha256="1250edf6f6c43e1d7823f0967416bc18258bb271dc536298eb0ea00a9e45b80a") version("1.14.5", sha256="a4a433b3a264dbc9aa9c7c241e87c0358a503ea6394f8737df1683c7c9a102ac") - variant("blas", default=True, description="Build with BLAS support") - variant("lapack", default=True, description="Build with LAPACK support") - - # Based on wheel availability on PyPI - depends_on("python@3.9:3.11", when="@1.25:", type=("build", "link", "run")) + depends_on("python@3.9:3.12", when="@1.26:", type=("build", "link", "run")) + depends_on("python@3.9:3.11", when="@1.25", type=("build", "link", "run")) depends_on("python@3.8:3.11", when="@1.23.2:1.24", type=("build", "link", "run")) depends_on("python@3.8:3.10", when="@1.22:1.23.1", type=("build", "link", "run")) depends_on("python@:3.10", when="@1.21.2:1.21", type=("build", "link", "run")) @@ -99,19 +95,30 @@ class PyNumpy(PythonPackage): depends_on("python@:3.8", when="@1.17.3:1.19.2", type=("build", "link", "run")) depends_on("python@:3.7", when="@1.14.5:1.17.2", type=("build", "link", "run")) + depends_on("py-cython@0.29.34:3", when="@1.26:", type="build") + depends_on("py-cython@0.29.34:2", when="@1.25", type="build") + depends_on("py-cython@0.29.30:2", when="@1.22.4:1.24", type="build") + depends_on("py-cython@0.29.24:2", when="@1.21.2:1.22.3", type="build") + depends_on("py-cython@0.29.21:2", when="@1.19.1:1.21.1", type="build") + depends_on("py-cython@0.29.14:2", when="@1.18.1:1.19.0", type="build") + depends_on("py-cython@0.29.13:2", when="@1.18.0", type="build") + depends_on("py-pyproject-metadata@0.7.1:", when="@1.26:", type="build") + depends_on("py-tomli@1:", when="@1.26: ^python@:3.10", type="build") + depends_on("py-setuptools@60:", when="@1.26: ^python@3.12:", type="build") # https://github.com/spack/spack/pull/32078 - depends_on("py-setuptools@:63", type=("build", "run")) + depends_on("py-setuptools@:63", when="@:1.25", type=("build", "run")) depends_on("py-setuptools@:59", when="@:1.22.1", type=("build", "run")) - # Check pyproject.toml for updates to the required cython version - depends_on("py-cython@0.29.34:2", when="@1.25:", type="build") - depends_on("py-cython@0.29.13:2", when="@1.18.0:", type="build") - depends_on("py-cython@0.29.14:2", when="@1.18.1:", type="build") - depends_on("py-cython@0.29.21:2", when="@1.19.1:", type="build") - depends_on("py-cython@0.29.24:2", when="@1.21.2:", type="build") - depends_on("py-cython@0.29.30:2", when="@1.22.4:", type="build") - depends_on("blas", when="+blas") - depends_on("lapack", when="+lapack") + depends_on("py-colorama", when="@1.26: platform=windows", type="build") + + # Required to use --config-settings + depends_on("py-pip@23.1:", when="@1.26:", type="build") + # meson is vendored, ninja and pkgconfig are not + depends_on("ninja@1.8.2:", when="@1.26:", type="build") + depends_on("pkgconfig", when="@1.26:", type="build") + depends_on("blas") + depends_on("lapack") + # test_requirements.txt depends_on("py-nose@1.0.0:", when="@:1.14", type="test") depends_on("py-pytest", when="@1.15:", type="test") depends_on("py-hypothesis", when="@1.19:", type="test") @@ -145,13 +152,21 @@ class PyNumpy(PythonPackage): when="@1.22.0:1.22.3", ) - # version 1.21.0 runs into an infinit loop during printing + # meson.build + # https://docs.scipy.org/doc/scipy/dev/toolchain.html#compilers + conflicts("%gcc@:8.3", when="@1.26:", msg="NumPy requires GCC >= 8.4") + conflicts("%gcc@:4.7", msg="NumPy requires GCC >= 4.8") + conflicts( + "%msvc@:19.19", + when="@1.26:", + msg="NumPy requires at least vc142 (default with Visual Studio 2019) " + "when building with MSVC", + ) + + # version 1.21.0 runs into an infinite loop during printing # (e.g. print(numpy.ones(1000)) when compiled with gcc 11 conflicts("%gcc@11:", when="@1.21.0") - # GCC 4.8 is the minimum version that works - conflicts("%gcc@:4.7", msg="GCC 4.8+ required") - # NVHPC support added in https://github.com/numpy/numpy/pull/17344 conflicts("%nvhpc", when="@:1.19") @@ -159,6 +174,10 @@ class PyNumpy(PythonPackage): conflicts("%intel", when="@1.23.0:1.23.3") conflicts("%oneapi", when="@1.23.0:1.23.3") + @property + def archive_files(self): + return [join_path(self.stage.source_path, "build", "meson-logs", "meson-log.txt")] + def url_for_version(self, version): url = "https://files.pythonhosted.org/packages/source/n/numpy/numpy-{}.{}" if version >= Version("1.23"): @@ -193,16 +212,68 @@ def flag_handler(self, name, flags): return (flags, None, None) - @run_before("install") - def set_blas_lapack(self): - # https://numpy.org/devdocs/user/building.html - # https://github.com/numpy/numpy/blob/master/site.cfg.example + def blas_lapack_pkg_config(self) -> Tuple[str, str]: + """Convert library names to pkg-config names. - # Skip if no BLAS/LAPACK requested + Returns: + The names of the blas and lapack libs that pkg-config should search for. + """ spec = self.spec - if "+blas" not in spec and "+lapack" not in spec: - return + blas = spec["blas"].libs.names[0] + lapack = spec["lapack"].libs.names[0] + + if spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]: + blas = "mkl-dynamic-lp64-seq" + if spec["lapack"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]: + lapack = "mkl-dynamic-lp64-seq" + + if spec["blas"].name in ["blis", "amdblis"]: + blas = "blis" + + if spec["blas"].name == "cray-libsci": + blas = "libsci" + + if spec["lapack"].name == "cray-libsci": + lapack = "libsci" + + if "armpl" in blas: + if "_mp" in blas: + blas = "armpl-dynamic-lp64-omp" + else: + blas = "armpl-dynamic-lp64-seq" + + if "armpl" in lapack: + if "_mp" in lapack: + lapack = "armpl-dynamic-lp64-omp" + else: + lapack = "armpl-dynamic-lp64-seq" + + return blas, lapack + + @when("@1.26:") + def config_settings(self, spec, prefix): + blas, lapack = self.blas_lapack_pkg_config() + return { + "builddir": "build", + "compile-args": f"-j{make_jobs}", + "setup-args": { + # https://scipy.github.io/devdocs/building/blas_lapack.html + "-Dblas": blas, + "-Dlapack": lapack, + # https://numpy.org/doc/stable/reference/simd/build-options.html + # TODO: get this working in CI + # "-Dcpu-baseline": "native", + # "-Dcpu-dispatch": "none", + }, + } + + def blas_lapack_site_cfg(self) -> None: + """Write a site.cfg file to configure BLAS/LAPACK.""" + spec = self.spec + + # https://numpy.org/doc/1.25/user/building.html + # https://github.com/numpy/numpy/blob/v1.25.2/site.cfg.example def write_library_dirs(f, dirs): f.write("library_dirs = {0}\n".format(dirs)) if not ( @@ -211,17 +282,11 @@ def write_library_dirs(f, dirs): ): f.write("rpath = {0}\n".format(dirs)) - blas_libs = LibraryList([]) - blas_headers = HeaderList([]) - if "+blas" in spec: - blas_libs = spec["blas"].libs - blas_headers = spec["blas"].headers + blas_libs = spec["blas"].libs + blas_headers = spec["blas"].headers - lapack_libs = LibraryList([]) - lapack_headers = HeaderList([]) - if "+lapack" in spec: - lapack_libs = spec["lapack"].libs - lapack_headers = spec["lapack"].headers + lapack_libs = spec["lapack"].libs + lapack_headers = spec["lapack"].headers lapackblas_libs = lapack_libs + blas_libs lapackblas_headers = lapack_headers + blas_headers @@ -334,15 +399,25 @@ def write_library_dirs(f, dirs): write_library_dirs(f, lapack_lib_dirs) f.write("include_dirs = {0}\n".format(lapack_header_dirs)) + @when("@:1.25") + @run_before("install") + def set_blas_lapack(self): + self.blas_lapack_site_cfg() + + @when("@1.26:") + def setup_build_environment(self, env): + # https://github.com/scipy/scipy/issues/19357 + if self.spec.satisfies("%apple-clang@15:"): + env.append_flags("LDFLAGS", "-Wl,-ld_classic") + + @when("@:1.25") def setup_build_environment(self, env): # Tell numpy which BLAS/LAPACK libraries we want to use. - # https://github.com/numpy/numpy/pull/13132 - # https://numpy.org/devdocs/user/building.html#accelerated-blas-lapack-libraries spec = self.spec - # https://numpy.org/devdocs/user/building.html#blas - if "blas" not in spec: - blas = "" - elif ( + # https://github.com/numpy/numpy/pull/13132 + # https://numpy.org/doc/1.25/user/building.html#accelerated-blas-lapack-libraries + # https://numpy.org/doc/1.25/user/building.html#blas + if ( spec["blas"].name == "intel-mkl" or spec["blas"].name == "intel-parallel-studio" or spec["blas"].name == "intel-oneapi-mkl" @@ -361,10 +436,8 @@ def setup_build_environment(self, env): env.set("NPY_BLAS_ORDER", blas) - # https://numpy.org/devdocs/user/building.html#lapack - if "lapack" not in spec: - lapack = "" - elif ( + # https://numpy.org/doc/1.25/user/building.html#lapack + if ( spec["lapack"].name == "intel-mkl" or spec["lapack"].name == "intel-parallel-studio" or spec["lapack"].name == "intel-oneapi-mkl" diff --git a/var/spack/repos/builtin/packages/py-pip/package.py b/var/spack/repos/builtin/packages/py-pip/package.py index 52d290d0b549c8..dfa85d55bc055e 100644 --- a/var/spack/repos/builtin/packages/py-pip/package.py +++ b/var/spack/repos/builtin/packages/py-pip/package.py @@ -15,6 +15,8 @@ class PyPip(Package, PythonExtension): url = "https://files.pythonhosted.org/packages/py3/p/pip/pip-20.2-py3-none-any.whl" list_url = "https://pypi.org/simple/pip/" + tags = ["build-tools"] + maintainers("adamjstewart", "pradyunsg") version( diff --git a/var/spack/repos/builtin/packages/py-pyfr/package.py b/var/spack/repos/builtin/packages/py-pyfr/package.py index 7cbfe6ab71f67d..9f81ef7597fa51 100644 --- a/var/spack/repos/builtin/packages/py-pyfr/package.py +++ b/var/spack/repos/builtin/packages/py-pyfr/package.py @@ -41,7 +41,7 @@ class PyPyfr(PythonPackage, CudaPackage, ROCmPackage): depends_on("py-h5py@2.10:", type=("build", "run")) depends_on("py-mako@1.0.0:", type=("build", "run")) depends_on("py-mpi4py@3.1.0:", type=("build", "run")) - depends_on("py-numpy@1.20:+blas", type=("build", "run")) + depends_on("py-numpy@1.20:", type=("build", "run")) depends_on("py-platformdirs@2.2.0:", type=("build", "run")) depends_on("py-pytools@2016.2.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pyzmq/package.py b/var/spack/repos/builtin/packages/py-pyzmq/package.py index 4850dddc08ec8e..bf60b4be57d91b 100644 --- a/var/spack/repos/builtin/packages/py-pyzmq/package.py +++ b/var/spack/repos/builtin/packages/py-pyzmq/package.py @@ -45,6 +45,9 @@ class PyPyzmq(PythonPackage): # pyproject.toml depends_on("py-setuptools", type="build") + # https://github.com/zeromq/pyzmq/issues/1278 + # https://github.com/zeromq/pyzmq/pull/1317 + depends_on("py-setuptools@:59", when="@17:18.0", type="build") depends_on("py-packaging", type="build") # setup.py diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index 4a07657d80d7bb..51f89f7d4cdf92 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -3,16 +3,11 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import glob -import os - from spack.package import * class PyScipy(PythonPackage): - """SciPy (pronounced "Sigh Pie") is a Scientific Library for Python. - It provides many user-friendly and efficient numerical routines such - as routines for numerical integration and optimization.""" + """Fundamental algorithms for scientific computing in Python.""" homepage = "https://www.scipy.org/" pypi = "scipy/scipy-1.10.1.tar.gz" @@ -20,7 +15,9 @@ class PyScipy(PythonPackage): maintainers("adamjstewart", "rgommers") - version("master", branch="master") + version("main", branch="main") + version("master", branch="master", deprecated=True) + version("1.11.3", sha256="bba4d955f54edd61899776bad459bf7326e14b9fa1c552181f0479cc60a568cd") version("1.11.2", sha256="b29318a5e39bd200ca4381d80b065cdf3076c7d7281c5e36569e99273867f61d") version("1.11.1", sha256="fb5b492fa035334fd249f0973cc79ecad8b09c604b42a127a677b45a9a3d4289") version("1.11.0", sha256="f9b0248cb9d08eead44cde47cbf6339f1e9aa0dfde28f5fb27950743e317bd5d") @@ -65,22 +62,18 @@ class PyScipy(PythonPackage): depends_on("python@:3.8", when="@1.3.2:1.5.3", type=("build", "link", "run")) depends_on("python@:3.7", when="@1.1:1.3.1", type=("build", "link", "run")) - # TODO: remove once pip build supports BLAS/LAPACK specification - # https://github.com/mesonbuild/meson-python/pull/167 - depends_on("py-build", when="@1.9:", type="build") - - depends_on("py-meson-python@0.12.1:0.13", when="@1.11:", type="build") - depends_on("py-meson-python@0.11:0.12", when="@1.10.1:1.10", type="build") - depends_on("py-meson-python@0.11", when="@1.10.0", type="build") - depends_on("py-meson-python@0.9:", when="@1.9.2:1.9", type="build") - depends_on("py-meson-python@0.8.1:", when="@1.9.1", type="build") - depends_on("py-meson-python@0.7", when="@1.9.0", type="build") - depends_on("meson@0.62.2", when="@1.9.0:1.9.1", type="build") + depends_on("py-meson-python@0.12.1:", when="@1.11:", type="build") + depends_on("py-meson-python@0.11:", when="@1.10:", type="build") + depends_on("py-meson-python@0.9:", when="@1.9.2:", type="build") + depends_on("py-meson-python@0.8.1:", when="@1.9.1:", type="build") + depends_on("py-meson-python@0.7:", when="@1.9:", type="build") + depends_on("meson", when="@1.9.0:1.9.1", type="build") depends_on("py-cython@0.29.35:2", when="@1.11:", type="build") depends_on("py-cython@0.29.32:2", when="@1.9.2:", type="build") depends_on("py-cython@0.29.21:2", when="@1.9:", type="build") depends_on("py-cython@0.29.18:2", when="@1.7:", type="build") - depends_on("py-pybind11@2.10.4:2.10", when="@1.11:", type=("build", "link")) + depends_on("py-pybind11@2.10.4:2.11.0", when="@1.11.3:", type=("build", "link")) + depends_on("py-pybind11@2.10.4:2.10", when="@1.11.0:1.11.2", type=("build", "link")) depends_on("py-pybind11@2.10.1", when="@1.10", type=("build", "link")) depends_on("py-pybind11@2.4.3:2.10", when="@1.9.1:1.9", type=("build", "link")) depends_on("py-pybind11@2.4.3:2.9", when="@1.9.0", type=("build", "link")) @@ -90,14 +83,11 @@ class PyScipy(PythonPackage): depends_on("py-pybind11@2.4.3:", when="@1.5:1.6.1", type=("build", "link")) depends_on("py-pybind11@2.4.0:", when="@1.4.1:1.4", type=("build", "link")) depends_on("py-pybind11@2.2.4:", when="@1.4.0", type=("build", "link")) - depends_on("py-pythran@0.12:0.13", when="@1.11:", type="build") - depends_on("py-pythran@0.12", when="@1.10", type="build") - depends_on("py-pythran@0.9.12:0.12", when="@1.9.2:1.9", type="build") - depends_on("py-pythran@0.9.12:0.11", when="@1.9.0:1.9.1", type="build") - depends_on("py-pythran@0.10", when="@1.8", type="build") - depends_on("py-pythran@0.9.12:0.9", when="@1.7.2:1.7", type="build") - depends_on("py-pythran@0.9.11", when="@1.7.0:1.7.1", type="build") - depends_on("py-wheel@:0.40", when="@1.11:", type="build") + depends_on("py-pythran@0.12:", when="@1.10:", type="build") + depends_on("py-pythran@0.10:", when="@1.8", type="build") + depends_on("py-pythran@0.9.12:", when="@1.7.2:", type="build") + depends_on("py-pythran@0.9.11:", when="@1.7:", type="build") + depends_on("py-wheel@:0.40", when="@1.11.0:1.11.2", type="build") depends_on("py-wheel@:0.38", when="@1.10", type="build") depends_on("py-wheel@:0.37", when="@:1.9", type="build") depends_on("pkgconfig", when="@1.9:", type="build") @@ -105,43 +95,53 @@ class PyScipy(PythonPackage): depends_on("py-setuptools@:59", when="@1.8", type="build") depends_on("py-setuptools@:57", when="@1.7", type="build") depends_on("py-setuptools@:51.0.0", when="@1.6", type="build") - depends_on("py-numpy@1.21.6:1.27+blas+lapack", when="@1.11:", type=("build", "link", "run")) - depends_on("py-numpy@1.19.5:1.26+blas+lapack", when="@1.10", type=("build", "link", "run")) - depends_on("py-numpy@1.18.5:1.25+blas+lapack", when="@1.9", type=("build", "link", "run")) - depends_on("py-numpy@1.17.3:1.24+blas+lapack", when="@1.8", type=("build", "link", "run")) - depends_on( - "py-numpy@1.16.5:1.22+blas+lapack", when="@1.6.2:1.7", type=("build", "link", "run") - ) - depends_on("py-numpy@1.16.5:+blas+lapack", when="@1.6:1.6.1", type=("build", "link", "run")) - depends_on("py-numpy@1.14.5:+blas+lapack", when="@1.5.0:1.5", type=("build", "link", "run")) - depends_on("py-numpy@1.13.3:+blas+lapack", when="@1.3:1.4", type=("build", "link", "run")) - depends_on("py-numpy@1.8.2:+blas+lapack", when="@:1.2", type=("build", "link", "run")) + depends_on("py-numpy@1.21.6:1.27", when="@1.11:", type=("build", "link", "run")) + depends_on("py-numpy@1.19.5:1.26", when="@1.10", type=("build", "link", "run")) + depends_on("py-numpy@1.18.5:1.25", when="@1.9", type=("build", "link", "run")) + depends_on("py-numpy@1.17.3:1.24", when="@1.8", type=("build", "link", "run")) + depends_on("py-numpy@1.16.5:1.22", when="@1.6:1.7", type=("build", "link", "run")) + depends_on("py-numpy@1.14.5:1.21", when="@1.5", type=("build", "link", "run")) + depends_on("py-numpy@1.13.3:1.21", when="@1.3:1.4", type=("build", "link", "run")) + depends_on("py-numpy@1.8.2:1.20", when="@:1.2", type=("build", "link", "run")) depends_on("py-pytest", type="test") - # NOTE: scipy should use the same BLAS/LAPACK as numpy. - # For scipy 1.8 and older, this is achieved by calling the set_blas_lapack() - # and setup_build_environment() from numpy in the scipy spec. - depends_on("blas") - depends_on("lapack") + # Required to use --config-settings + depends_on("py-pip@23.1:", when="@1.9:", type="build") # https://docs.scipy.org/doc/scipy/dev/toolchain.html#other-libraries depends_on("lapack@3.7.1:", when="@1.9:") depends_on("lapack@3.4.1:", when="@1.2:") + depends_on("lapack") + depends_on("blas") + # meson.build # https://docs.scipy.org/doc/scipy/dev/toolchain.html#compilers - conflicts("%gcc@:7", when="@1.10:") - conflicts("%gcc@:4.7", when="@:1.9") - conflicts("%apple-clang@:9", when="@1.10:") - conflicts("%msvc@:19.19", when="@1.10:") + conflicts("%gcc@:7", when="@1.10:", msg="SciPy requires GCC >= 8.0") + conflicts("%gcc@:4.7", when="@:1.9", msg="SciPy requires GCC >= 4.8") + conflicts( + "%msvc@:19.19", + when="@1.10:", + msg="SciPy requires at least vc142 (default with Visual Studio 2019) " + "when building with MSVC", + ) - # https://github.com/scipy/scipy/pull/11324 - conflicts("@1.4.0:1.4.1", when="target=ppc64le:") + # https://github.com/scipy/scipy/issues/19352 + conflicts("^py-cython@3.0.3") # https://github.com/mesonbuild/meson/pull/10909#issuecomment-1282241479 # Intel OneAPI ifx claims to support -fvisibility, but this does not work. # Meson adds this flag for all Python extensions which include Fortran code. conflicts("%oneapi@:2023.0", when="@1.9:") + # error: expected unqualified-id (exact compiler versions unknown) + conflicts("%apple-clang@15:", when="@:1.9") + + # https://docs.scipy.org/doc//scipy-1.10.1/release.1.7.3.html + conflicts("platform=darwin target=aarch64:", when="@:1.7.2") + + # https://github.com/scipy/scipy/pull/11324 + conflicts("@1.4.0:1.4.1", when="target=ppc64le:") + # https://github.com/scipy/scipy/issues/12860 patch( "https://git.sagemath.org/sage.git/plain/build/pkgs/scipy/patches/extern_decls.patch?id=711fe05025795e44b84233e065d240859ccae5bd", @@ -155,12 +155,6 @@ class PyScipy(PythonPackage): def archive_files(self): return [join_path(self.stage.source_path, "build", "meson-logs", "meson-log.txt")] - @run_before("install") - def set_blas_lapack(self): - # Pick up BLAS/LAPACK from numpy - if self.spec.satisfies("@:1.8"): - self.spec["py-numpy"].package.set_blas_lapack() - @run_before("install") def set_fortran_compiler(self): if self.compiler.f77 is None or self.compiler.fc is None: @@ -200,53 +194,27 @@ def setup_build_environment(self, env): if self.spec.satisfies("@:1.8"): self.spec["py-numpy"].package.setup_build_environment(env) - # TODO: remove once pip build supports BLAS/LAPACK specification - # https://github.com/mesonbuild/meson-python/pull/167 + # https://github.com/scipy/scipy/issues/19357 + if self.spec.satisfies("%apple-clang@15:"): + env.append_flags("LDFLAGS", "-Wl,-ld_classic") + @when("@1.9:") - def install(self, spec, prefix): - blas = spec["blas"].libs.names[0] - lapack = spec["lapack"].libs.names[0] - if spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]: - blas = "mkl-dynamic-lp64-seq" - if spec["lapack"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]: - lapack = "mkl-dynamic-lp64-seq" - if spec["blas"].name in ["blis", "amdblis"]: - blas = "blis" - if "armpl" in blas: - if "_mp" in blas: - blas = "armpl-dynamic-lp64-omp" - else: - blas = "armpl-dynamic-lp64-seq" - if "armpl" in lapack: - if "_mp" in lapack: - lapack = "armpl-dynamic-lp64-omp" - else: - lapack = "armpl-dynamic-lp64-seq" - - args = [ - "setup", - "build", - "-Dblas=" + blas, - "-Dlapack=" + lapack, - "--prefix=" + join_path(os.getcwd(), "build-install"), - "-Ddebug=false", - "-Doptimization=2", - ] - meson = which("meson") - meson(*args) - args = [ - "-m", - "build", - "--wheel", - "-Cbuilddir=build", - "--no-isolation", - "--skip-dependency-check", - "-Ccompile-args=-j%s" % make_jobs, - ".", - ] - python(*args) - args = std_pip_args + ["--prefix=" + prefix, glob.glob(join_path("dist", "scipy*.whl"))[0]] - pip(*args) + def config_settings(self, spec, prefix): + blas, lapack = self.spec["py-numpy"].package.blas_lapack_pkg_config() + return { + "builddir": "build", + "compile-args": f"-j{make_jobs}", + "setup-args": { + # http://scipy.github.io/devdocs/building/blas_lapack.html + "-Dblas": blas, + "-Dlapack": lapack, + }, + } + + @when("@:1.8") + @run_before("install") + def set_blas_lapack(self): + self.spec["py-numpy"].package.blas_lapack_site_cfg() @run_after("install") @on_package_attributes(run_tests=True) diff --git a/var/spack/repos/builtin/packages/py-tomopy/package.py b/var/spack/repos/builtin/packages/py-tomopy/package.py index b99e60ef91cb3e..59a1c0f1b32d49 100644 --- a/var/spack/repos/builtin/packages/py-tomopy/package.py +++ b/var/spack/repos/builtin/packages/py-tomopy/package.py @@ -34,7 +34,7 @@ class PyTomopy(PythonPackage): # Note: The module name of py-scikit-build is skbuild: depends_on("py-scikit-build", type=("build")) depends_on("py-scikit-image@0.17:", type=("build", "run")) - depends_on("py-numpy+blas", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) depends_on("py-pyfftw", type=("build", "run"), when="@1.0:1.9") depends_on("py-scipy", type=("build", "run")) depends_on("py-setuptools", type="build") From fd4650e4d49982371f0e7b34a499f4d519883322 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 28 Oct 2023 14:51:55 +0200 Subject: [PATCH 354/408] py-comm: add 0.1.4 (#40669) --- var/spack/repos/builtin/packages/py-comm/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-comm/package.py b/var/spack/repos/builtin/packages/py-comm/package.py index fd195b1f4f7644..5e82ade2c04fd2 100644 --- a/var/spack/repos/builtin/packages/py-comm/package.py +++ b/var/spack/repos/builtin/packages/py-comm/package.py @@ -12,7 +12,10 @@ class PyComm(PythonPackage): homepage = "https://github.com/ipython/comm" pypi = "comm/comm-0.1.3.tar.gz" + version("0.1.4", sha256="354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15") version("0.1.3", sha256="a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e") depends_on("py-hatchling@1.10:", type="build") - depends_on("py-traitlets@5.3:", type=("build", "run")) + + depends_on("py-traitlets@4:", when="@0.1.4:", type=("build", "run")) + depends_on("py-traitlets@5.3:", when="@0.1.3", type=("build", "run")) From c0b13292cc273fbacdad254d2a2f051476f716aa Mon Sep 17 00:00:00 2001 From: Erik Heeren Date: Sat, 28 Oct 2023 14:55:49 +0200 Subject: [PATCH 355/408] py-bluepyemodel: opensourcing with dependencies (#40592) * py-bluepyemodel: new package with dependencies * py-morphio: add MPI as dependency to avoid failing builds * Formatting * py-bluepyefe: no need to set NEURON_INIT_MPI * py-morphio: unifurcation branch is ancient history * py-bluepyopt: only set NEURON_INIT_MPI with +neuron * py-efel: get rid of old version * py-morph{-tool,io}: rename develop to master to match branch * py-bluepyefe: unset PMI_RANK is also neuron-related * py-bluepyopt: PMI_RANK is also neuron-related * Implement review remarks * py-morph-tool, py-neurom: small fixes * py-morphio: reword dependencies --- .../builtin/packages/py-bluepyefe/package.py | 25 ++++++++++++ .../packages/py-bluepyemodel/package.py | 36 +++++++++++++++++ .../builtin/packages/py-bluepyopt/package.py | 37 ++++++++++++++++++ .../packages/py-bluepyopt/pmi_rank.patch | 17 ++++++++ .../packages/py-currentscape/package.py | 23 +++++++++++ .../repos/builtin/packages/py-efel/package.py | 24 ++++++++++++ .../builtin/packages/py-morph-tool/package.py | 39 +++++++++++++++++++ .../builtin/packages/py-morphio/package.py | 30 ++++++++++++++ .../builtin/packages/py-neurom/package.py | 35 +++++++++++++++++ 9 files changed, 266 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-bluepyefe/package.py create mode 100644 var/spack/repos/builtin/packages/py-bluepyemodel/package.py create mode 100644 var/spack/repos/builtin/packages/py-bluepyopt/package.py create mode 100644 var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch create mode 100644 var/spack/repos/builtin/packages/py-currentscape/package.py create mode 100644 var/spack/repos/builtin/packages/py-efel/package.py create mode 100644 var/spack/repos/builtin/packages/py-morph-tool/package.py create mode 100644 var/spack/repos/builtin/packages/py-morphio/package.py create mode 100644 var/spack/repos/builtin/packages/py-neurom/package.py diff --git a/var/spack/repos/builtin/packages/py-bluepyefe/package.py b/var/spack/repos/builtin/packages/py-bluepyefe/package.py new file mode 100644 index 00000000000000..8a15e4edf9e2c5 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bluepyefe/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PyBluepyefe(PythonPackage): + """Blue Brain Python E-feature extraction""" + + homepage = "https://github.com/BlueBrain/BluePyEfe" + pypi = "bluepyefe/bluepyefe-2.2.18.tar.gz" + git = "https://github.com/BlueBrain/BluePyEfe.git" + + version("2.2.18", sha256="bfb50c6482433ec2ffb4b65b072d2778bd89ae50d92dd6830969222aabb30275") + + depends_on("py-setuptools", type="build") + + depends_on("py-numpy@:1.23", type=("build", "run")) + depends_on("py-neo", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-efel", type=("build", "run")) + depends_on("py-scipy", type=("build", "run")) + depends_on("py-h5py", type=("build", "run")) + depends_on("py-igor", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bluepyemodel/package.py b/var/spack/repos/builtin/packages/py-bluepyemodel/package.py new file mode 100644 index 00000000000000..f865b9791b622b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bluepyemodel/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBluepyemodel(PythonPackage): + """Python library to optimize and evaluate electrical models.""" + + homepage = "https://github.com/BlueBrain/BluePyEModel" + pypi = "bluepyemodel/bluepyemodel-0.0.46.tar.gz" + + version("0.0.46", sha256="ad4c125e491f3337fcc341a4f389b8a616d883ce50fd77d9fb0ea6e13be5da61") + + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") + + depends_on("py-numpy", type=("build", "run")) + depends_on("py-scipy", type=("build", "run")) + depends_on("py-pandas", type=("build", "run")) + depends_on("py-ipyparallel@6.3:", type=("build", "run")) + depends_on("py-tqdm", type=("build", "run")) + depends_on("py-pyyaml", type=("build", "run")) + depends_on("py-gitpython", type=("build", "run")) + depends_on("py-bluepyopt@1.12.12:", type=("build", "run")) + depends_on("py-bluepyefe@2.2.0:", type=("build", "run")) + depends_on("py-neurom@3.0:3", type=("build", "run")) + depends_on("py-efel@3.1:", type=("build", "run")) + depends_on("py-configparser", type=("build", "run")) + depends_on("py-morph-tool@2.8:", type=("build", "run")) + depends_on("py-fasteners@0.16:", type=("build", "run")) + depends_on("neuron+python@8.0:", type=("build", "run")) + depends_on("py-jinja2@3.0.3", when="@0.0.11:", type=("build", "run")) + depends_on("py-currentscape@0.0.11:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bluepyopt/package.py b/var/spack/repos/builtin/packages/py-bluepyopt/package.py new file mode 100644 index 00000000000000..ccc39f913558fe --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bluepyopt/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PyBluepyopt(PythonPackage): + """Bluebrain Python Optimisation Library""" + + homepage = "https://github.com/BlueBrain/BluePyOpt" + pypi = "bluepyopt/bluepyopt-1.9.27.tar.gz" + + # NOTE : while adding new release check pmi_rank.patch compatibility + version("1.14.4", sha256="7567fd736053250ca06030f67ad93c607b100c2b98df8dc588c26b64cb3e171c") + + # patch required to avoid hpe-mpi linked mechanism library + patch("pmi_rank.patch") + + variant("scoop", default=False, description="Use BluePyOpt together with py-scoop") + + depends_on("py-setuptools", type="build") + depends_on("py-numpy@1.6:", type=("build", "run")) + depends_on("py-pandas@0.18:", type=("build", "run")) + depends_on("py-deap@1.3.3:", type=("build", "run")) + depends_on("py-efel@2.13:", type=("build", "run")) + depends_on("py-ipyparallel", type=("build", "run")) + depends_on("py-pickleshare@0.7.3:", type=("build", "run")) + depends_on("py-jinja2@2.8:", type=("build", "run")) + depends_on("py-future", type=("build", "run")) + depends_on("py-pebble@4.6:", type=("build", "run")) + depends_on("py-scoop@0.7:", type=("build", "run"), when="+scoop") + depends_on("neuron@7.4:", type=("build", "run")) + + def setup_run_environment(self, env): + env.unset("PMI_RANK") + env.set("NEURON_INIT_MPI", "0") diff --git a/var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch b/var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch new file mode 100644 index 00000000000000..21a73849b28683 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch @@ -0,0 +1,17 @@ +diff --git a/bluepyopt/ephys/simulators.py b/bluepyopt/ephys/simulators.py +index e71ad8b..3c93237 100644 +--- a/bluepyopt/ephys/simulators.py ++++ b/bluepyopt/ephys/simulators.py +@@ -89,6 +89,12 @@ class NrnSimulator(object): + NrnSimulator._nrn_disable_banner() + self.banner_disabled = True + ++ # certain mpi libraries (hpe-mpt) use PMI_RANK env variable to initialize ++ # MPI before calling MPI_Init (which is undesirable). Unset this variable ++ # if exist to avoid issue with loading neuron and mechanism library. ++ if 'PMI_RANK' in os.environ: ++ os.environ.pop("PMI_RANK") ++ + import neuron # NOQA + + return neuron diff --git a/var/spack/repos/builtin/packages/py-currentscape/package.py b/var/spack/repos/builtin/packages/py-currentscape/package.py new file mode 100644 index 00000000000000..eb6d75be89c8ab --- /dev/null +++ b/var/spack/repos/builtin/packages/py-currentscape/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCurrentscape(PythonPackage): + """Module to easily plot the currents in electrical neuron models.""" + + homepage = "https://github.com/BlueBrain/Currentscape" + git = "https://github.com/BlueBrain/Currentscape.git" + pypi = "currentscape/currentscape-1.0.12.tar.gz" + + version("1.0.12", sha256="d83c5a58074e4d612553472a487e5d1d2854dc4d5c161817c6bafdf4a5988011") + + depends_on("py-setuptools", type=("build", "run")) + depends_on("py-setuptools-scm", type=("build",)) + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-palettable", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-efel/package.py b/var/spack/repos/builtin/packages/py-efel/package.py new file mode 100644 index 00000000000000..a33749b9af75eb --- /dev/null +++ b/var/spack/repos/builtin/packages/py-efel/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PyEfel(PythonPackage): + """The Electrophys Feature Extract Library (eFEL) allows + neuroscientists to automatically extract features from time series data + recorded from neurons (both in vitro and in silico). + Examples are the action potential width and amplitude in + voltage traces recorded during whole-cell patch clamp experiments. + The user of the library provides a set of traces and selects the + features to be calculated. The library will then extract the requested + features and return the values to the user.""" + + homepage = "https://github.com/BlueBrain/eFEL" + pypi = "efel/efel-3.0.80.tar.gz" + + version("5.2.0", sha256="ed2c5efe22a4c703a4d9e47775b939009e1456713ac896898ebabf177c60b1dc") + + depends_on("py-setuptools", type="build") + depends_on("py-numpy@1.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-morph-tool/package.py b/var/spack/repos/builtin/packages/py-morph-tool/package.py new file mode 100644 index 00000000000000..7927b468c07edf --- /dev/null +++ b/var/spack/repos/builtin/packages/py-morph-tool/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMorphTool(PythonPackage): + """Python morphology manipulation toolkit""" + + homepage = "https://github.com/BlueBrain/morph-tool" + git = "https://github.com/BlueBrain/morph-tool.git" + pypi = "morph-tool/morph-tool-2.9.1.tar.gz" + + version("master", branch="master") + version("2.9.1", sha256="305e9456c8047726588b23dfa070eb95ccbe5573e9fea3e0a83dc93eacdf61dc") + version("2.9.0", sha256="c60d4010e17ddcc3f53c864c374fffee05713c8f8fd2ba4eed7706041ce1fa47") + + variant("nrn", default=False, description="Enable additional neuron support") + variant("plot", default=False, description="Enable additional plotly support") + variant("parallel", default=False, description="Enable additional parallel support") + + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") + + depends_on("py-click@6.7:", type=("build", "run")) + depends_on("py-deprecation@2.1.0:", type=("build", "run")) + depends_on("py-more-itertools@8.6.0:", type=("build", "run")) + depends_on("py-morphio@3", type=("build", "run")) + depends_on("py-neurom@3", type=("build", "run")) + depends_on("py-numpy@1.14:", type=("build", "run")) + depends_on("py-pandas@1.0.3:", type=("build", "run")) + depends_on("py-xmltodict@0.12.0:", type=("build", "run")) + + depends_on("py-plotly@4.1.0:", type=("build", "run"), when="+plot") + depends_on("py-dask+bag@2.19.0:", type=("build", "run"), when="+parallel") + depends_on("neuron+python@7.8:", type=("build", "run"), when="+nrn") + depends_on("py-bluepyopt@1.9.37:", type=("build", "run"), when="+nrn") diff --git a/var/spack/repos/builtin/packages/py-morphio/package.py b/var/spack/repos/builtin/packages/py-morphio/package.py new file mode 100644 index 00000000000000..a5a9fee7deaf3c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-morphio/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import sys + +from spack.package import * + + +class PyMorphio(PythonPackage): + """Python library for reading / writing morphology files""" + + homepage = "https://github.com/BlueBrain/MorphIO" + git = "https://github.com/BlueBrain/MorphIO.git" + pypi = "morphio/MorphIO-3.3.2.tar.gz" + + version("master", branch="master", submodules=True) + + version("3.3.6", sha256="0f2e55470d92a3d89f2141ae905ee104fd16257b93dafb90682d90171de2f4e6") + + depends_on("py-setuptools@24.2:", type="build") + depends_on("py-setuptools-scm", type="build") + + depends_on("ninja", type="build") + depends_on("cmake@3.2:", type="build") + depends_on("py-numpy@1.14.1:", type=("build", "run")) + depends_on("py-h5py@3", when="platform=windows", type=("build", "run")) + if sys.platform != "win32": + depends_on("hdf5") diff --git a/var/spack/repos/builtin/packages/py-neurom/package.py b/var/spack/repos/builtin/packages/py-neurom/package.py new file mode 100644 index 00000000000000..19bad5fc7b94c7 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-neurom/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNeurom(PythonPackage): + """Python library neuron morphology analysis""" + + homepage = "https://github.com/BlueBrain/NeuroM" + git = "https://github.com/BlueBrain/NeuroM.git" + pypi = "neurom/neurom-2.2.1.tar.gz" + + version("master", branch="master") + version("3.2.4", sha256="a584e0979b54deee906dd716ea90de20773e20b527d83960d0fe655b0905eb4a") + + variant("plotly", default=False, description="Enable plotly support") + + depends_on("py-setuptools@42:", type=("build", "run")) + depends_on("py-setuptools-scm", type="build") + depends_on("python@3.8:", type=("build", "run")) + + depends_on("py-click@7.0:", type=("build", "run")) + depends_on("py-matplotlib@3.2.1:", type=("build", "run")) + depends_on("py-morphio@3.3.6:", type=("build", "run")) + depends_on("py-numpy@1.8.0:", type=("build", "run")) + depends_on("py-pandas@1.0.5:", type=("build", "run")) + depends_on("py-pyyaml@3.10:", type=("build", "run")) + depends_on("py-scipy@1.2.0:", type=("build", "run")) + depends_on("py-tqdm@4.8.4:", type=("build", "run")) + + depends_on("py-plotly@3.6.0:", type=("build", "run"), when="+plotly") + depends_on("py-psutil@5.5.1:", type=("build", "run"), when="+plotly") From bb6611552626aa49a058db26f5d2fc47a833ef71 Mon Sep 17 00:00:00 2001 From: Jen Herting Date: Sat, 28 Oct 2023 09:02:19 -0400 Subject: [PATCH 356/408] New version of py-langsmith (#40674) Co-authored-by: Benjamin Meyers --- var/spack/repos/builtin/packages/py-langsmith/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-langsmith/package.py b/var/spack/repos/builtin/packages/py-langsmith/package.py index a152c9903d01ce..e5c8363487b6e3 100644 --- a/var/spack/repos/builtin/packages/py-langsmith/package.py +++ b/var/spack/repos/builtin/packages/py-langsmith/package.py @@ -11,6 +11,7 @@ class PyLangsmith(PythonPackage): pypi = "langsmith/langsmith-0.0.10.tar.gz" + version("0.0.11", sha256="7c1be28257d6c7279c85f81e6d8359d1006af3b1238fc198d13ca75c8fe421c8") version("0.0.10", sha256="11e5db0d8e29ee5583cabd872eeece8ce50738737b1f52f316ac984f4a1a58c5") version("0.0.7", sha256="2f18e51cfd4e42f2b3cf00fa87e9d03012eb7269cdafd8e7c0cf7aa828dcc03e") From 5e446e637949a595847d8386bf5514e50652ccbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Lacroix?= Date: Sat, 28 Oct 2023 15:03:02 +0200 Subject: [PATCH 357/408] NCCL: Add version 2.19.3-1 (#40704) --- var/spack/repos/builtin/packages/nccl/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/nccl/package.py b/var/spack/repos/builtin/packages/nccl/package.py index 21db9dad2cb739..51f10ca7eec3a7 100644 --- a/var/spack/repos/builtin/packages/nccl/package.py +++ b/var/spack/repos/builtin/packages/nccl/package.py @@ -17,6 +17,7 @@ class Nccl(MakefilePackage, CudaPackage): maintainers("adamjstewart") libraries = ["libnccl.so"] + version("2.19.3-1", sha256="1c5474553afedb88e878c772f13d6f90b9226b3f2971dfa6f873adb9443100c2") version("2.18.5-1", sha256="16ac98f3e926c024ce48e10ab220e19ce734adc48c423cfd55ad6f509bd1179f") version("2.18.3-1", sha256="6477d83c9edbb34a0ebce6d751a1b32962bc6415d75d04972b676c6894ceaef9") version("2.18.1-1", sha256="0e4ede5cf8df009bff5aeb3a9f194852c03299ae5664b5a425b43358e7a9eef2") From 755de5538c7016ba1226646badffece76faabbd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Sat, 28 Oct 2023 15:05:37 +0200 Subject: [PATCH 358/408] py-generateds: new package (#40555) * [add] py-generateds: new package * py-generateds: Update from review Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> * py-generateds: add versions 2.41.5, 2.42.1, 2.42.2, 2.43.1 and 2.43.2 --------- Co-authored-by: LydDeb Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> --- .../builtin/packages/py-generateds/package.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-generateds/package.py diff --git a/var/spack/repos/builtin/packages/py-generateds/package.py b/var/spack/repos/builtin/packages/py-generateds/package.py new file mode 100644 index 00000000000000..bcf82787cf4dd3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-generateds/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyGenerateds(PythonPackage): + """Generate Python data structures and XML parser from Xschema.""" + + homepage = "http://www.davekuhlman.org/generateDS.html" + pypi = "generateDS/generateDS-2.41.4.tar.gz" + + maintainers("LydDeb") + + version("2.43.2", sha256="e86f033f4d93414dd5b04cab9544a68b8f46d559073d85cd0990266b7b9ec09e") + version("2.43.1", sha256="2d3d71b42a09ba153bc51d2204324d04e384d0f15e41bdba881ee2daff9bbd68") + version("2.42.2", sha256="1d322aa7e074c262062b068660dd0c53bbdb0bb2b30152bb9e0074bd29fd365a") + version("2.42.1", sha256="87e4654449d34150802ca0cfb2330761382510d1385880f4d607cd34466abc2d") + version("2.41.5", sha256="8800c09454bb22f8f80f2ee138072d4e58bd5b6c14dbdf0a2a7ca13f06ba72e4") + version("2.41.4", sha256="804592eef573fa514741528a0bf9998f0c57ee29960c87f54608011f1fc722ea") + + depends_on("py-setuptools", type="build") + depends_on("py-six", type=("build", "run")) + depends_on("py-lxml", type=("build", "run")) + depends_on("py-requests@2.21:", type=("build", "run")) From f3671f20210f917faa4f8fe37deddd93dcde4a95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Sat, 28 Oct 2023 15:06:48 +0200 Subject: [PATCH 359/408] py-moarchiving: new package (#40558) * [add] py-moarchiving: new package * py-moarchiving: update from review: description, variant default value is False, switch when and type --------- Co-authored-by: LydDeb --- .../packages/py-moarchiving/package.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-moarchiving/package.py diff --git a/var/spack/repos/builtin/packages/py-moarchiving/package.py b/var/spack/repos/builtin/packages/py-moarchiving/package.py new file mode 100644 index 00000000000000..a43a9c9efad260 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-moarchiving/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMoarchiving(PythonPackage): + """ + Biobjective Archive class with hypervolume indicator and uncrowded + hypervolume improvement computation. + """ + + homepage = "https://github.com/CMA-ES/moarchiving" + pypi = "moarchiving/moarchiving-0.6.0.tar.gz" + + maintainers("LydDeb") + + version("0.6.0", sha256="705ded992d399bc1ac703e68391bded6f64e1bde81b2bb25061eaa6208b5b29a") + + variant("arbitrary_precision", default=False, description="Build with Fraction support") + + depends_on("py-setuptools", type="build") + depends_on("py-fraction", when="+arbitrary_precision", type=("build", "run")) From 4381484751d7e74fb79da0af2eb02b05e26cecaf Mon Sep 17 00:00:00 2001 From: Jerome Soumagne Date: Sat, 28 Oct 2023 12:05:50 -0500 Subject: [PATCH 360/408] mercury: add v2.3.1 (#40749) --- var/spack/repos/builtin/packages/mercury/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/mercury/package.py b/var/spack/repos/builtin/packages/mercury/package.py index 1c531277d79ce9..a2cdad6f1b6874 100644 --- a/var/spack/repos/builtin/packages/mercury/package.py +++ b/var/spack/repos/builtin/packages/mercury/package.py @@ -17,6 +17,7 @@ class Mercury(CMakePackage): tags = ["e4s"] version("master", branch="master", submodules=True) + version("2.3.1", sha256="36182d49f2db7e2b075240cab4aaa1d4ec87a7756450c87643ededd1e6f16104") version("2.3.0", sha256="e9e62ce1bb2fd482f0e85ad75fa255d9750c6fed50ba441a03de93b3b8eae742") version("2.2.0", sha256="e66490cf63907c3959bbb2932b5aaf51d96a481b17f0935f409f3a862eff97f6") version("2.1.0", sha256="9a58437161e9273b1b1c484d2f1a477a89eea9afe84575415025d47656f3761b") From e6c7225c06761260e393e38346d2bc3c468af465 Mon Sep 17 00:00:00 2001 From: Aoba Date: Mon, 30 Oct 2023 00:56:27 +0800 Subject: [PATCH 361/408] Add liggght patched for newer compiler (#38685) * Add liggght patched for newer compiler Add C++ 17 support Add Clang and Oneapi support * Add maintainers * Fix format in liggghts * Fix maintainers before versions Co-authored-by: Alec Scott * Fix style and user to usr * Update package.py --------- Co-authored-by: Alec Scott --- .../builtin/packages/liggghts/cpp-17.patch | 75 ++++++ .../makefile-llvm-based-compiler.patch | 21 ++ .../builtin/packages/liggghts/makefile.patch | 240 ++++++++++++++++++ .../builtin/packages/liggghts/package.py | 20 +- 4 files changed, 350 insertions(+), 6 deletions(-) create mode 100644 var/spack/repos/builtin/packages/liggghts/cpp-17.patch create mode 100644 var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch create mode 100644 var/spack/repos/builtin/packages/liggghts/makefile.patch diff --git a/var/spack/repos/builtin/packages/liggghts/cpp-17.patch b/var/spack/repos/builtin/packages/liggghts/cpp-17.patch new file mode 100644 index 00000000000000..73c4bffdd0f0de --- /dev/null +++ b/var/spack/repos/builtin/packages/liggghts/cpp-17.patch @@ -0,0 +1,75 @@ +diff --git a/src/math_vector.h b/src/math_vector.h +index 2b8704af..79c0cedd 100644 +--- a/src/math_vector.h ++++ b/src/math_vector.h +@@ -94,7 +94,7 @@ inline void vec_neg(vector &dest) { // -a + dest[2] = -dest[2]; } + + inline void vec_norm(vector &dest) { // a/|a| +- register double f = sqrt(vec_dot(dest, dest)); ++ double f = sqrt(vec_dot(dest, dest)); + dest[0] /= f; + dest[1] /= f; + dest[2] /= f; } +@@ -222,7 +222,7 @@ inline void form_subtr(shape &dest, form &src) { // m_a-m_b + dest[3] -= src[3]; dest[4] -= src[4]; dest[5] -= src[5]; } + + inline int form_inv(form &m_inv, form &m) { // m^-1 +- register double det = form_det(m); ++ double det = form_det(m); + if (fzero(det)) return 0; + m_inv[0] = (m[1]*m[2]-m[3]*m[3])/det; + m_inv[1] = (m[0]*m[2]-m[4]*m[4])/det; +@@ -377,7 +377,7 @@ inline void form4_unit(form4 &dest) { + dest[0] = dest[1] = dest[2] = dest[3] = 1.0; } + + inline double form4_det(form4 &m) { +- register double f = m[6]*m[7]-m[5]*m[8]; ++ double f = m[6]*m[7]-m[5]*m[8]; + return m[0]*( + m[1]*(m[2]*m[3]-m[4]*m[4])+ + m[5]*(2.0*m[4]*m[7]-m[2]*m[5])-m[3]*m[7]*m[7])+f*f+ +@@ -387,7 +387,7 @@ inline double form4_det(form4 &m) { + m[9]*(m[4]*m[4]-m[2]*m[3])); } + + inline int form4_inv(form4 &m_inv, form4 &m) { +- register double det = form4_det(m); ++ double det = form4_det(m); + if (fzero(det)) return 0; + m_inv[0] = (m[1]*(m[2]*m[3]-m[4]*m[4])+ + m[5]*(2.0*m[4]*m[7]-m[2]*m[5])-m[3]*m[7]*m[7])/det; +diff --git a/src/pair.cpp b/src/pair.cpp +index c0889f72..8c212715 100644 +--- a/src/pair.cpp ++++ b/src/pair.cpp +@@ -566,7 +566,7 @@ void Pair::init_tables_disp(double cut_lj_global) + } + + rsq = rsq_lookup.f; +- register double x2 = g2*rsq, a2 = 1.0/x2; ++ double x2 = g2*rsq, a2 = 1.0/x2; + x2 = a2*exp(-x2); + + rdisptable[i] = rsq_lookup.f; +@@ -612,7 +612,7 @@ void Pair::init_tables_disp(double cut_lj_global) + if (rsq_lookup.f < (cut_lj_globalsq = cut_lj_global * cut_lj_global)) { + rsq_lookup.f = cut_lj_globalsq; + +- register double x2 = g2*rsq, a2 = 1.0/x2; ++ double x2 = g2*rsq, a2 = 1.0/x2; + x2 = a2*exp(-x2); + f_tmp = g8*(((6.0*a2+6.0)*a2+3.0)*a2+1.0)*x2*rsq; + e_tmp = g6*((a2+1.0)*a2+0.5)*x2; +diff --git a/src/utils.h b/src/utils.h +index fab00e9b..5a122627 100644 +--- a/src/utils.h ++++ b/src/utils.h +@@ -67,7 +67,7 @@ namespace Utils { + + inline std::string int_to_string(int a) + { +- return static_cast< std::ostringstream & >(( std::ostringstream() << std::dec << a ) ).str(); ++ return static_cast< std::ostringstream & >(( std::ostringstream().flush() << std::dec << a ) ).str(); + } + + inline std::string double_to_string(double dbl) diff --git a/var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch b/var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch new file mode 100644 index 00000000000000..a5c26300a34392 --- /dev/null +++ b/var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch @@ -0,0 +1,21 @@ +diff --git a/src/MAKE/Makefile.auto b/src/MAKE/Makefile.auto +index 239f886..8f42e73 100644 +--- a/src/MAKE/Makefile.auto ++++ b/src/MAKE/Makefile.auto +@@ -816,12 +816,14 @@ ifeq ($(USE_VTK), "ON") + endif + endif + endif +- open_bracket := ( +- close_bracket := ) ++ open_bracket := (" ++ close_bracket := ") ++ message := message + space := + space += + VTK_TMP := $(subst $(open_bracket),$(space),$(VTK_TMP)) + VTK_TMP := $(subst $(close_bracket),$(space),$(VTK_TMP)) ++ VTK_TMP := $(subst $(message),$(space),$(VTK_TMP)) + VTK_MAJOR_VERSION := $(patsubst "%",%,$(word $(words $(VTK_TMP)),$(VTK_TMP))) + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#vtk_major_version: $(VTK_MAJOR_VERSION)" >> $(AUTO_LOG_FILE)) diff --git a/var/spack/repos/builtin/packages/liggghts/makefile.patch b/var/spack/repos/builtin/packages/liggghts/makefile.patch new file mode 100644 index 00000000000000..370e4b8dad21f0 --- /dev/null +++ b/var/spack/repos/builtin/packages/liggghts/makefile.patch @@ -0,0 +1,240 @@ +diff --git a/src/MAKE/Makefile.auto b/src/MAKE/Makefile.auto +index dde9e72..239f886 100644 +--- a/src/MAKE/Makefile.auto ++++ b/src/MAKE/Makefile.auto +@@ -440,12 +440,12 @@ ifeq ($(USE_MPI), "ON") + TMP_INC = -I$(MPI_INC) + endif + # We assume that the compiler supports #pragma message +- TMP := $(shell $(ECHO) '\#include \n \#if defined(MPICH) \n \#pragma message "MPICH" \n \#elif defined(OPEN_MPI) \n \#pragma message "OpenMPI" \n \#else \n \#pragma message "Unknown" \n \#endif' > $(TMPFILE) && $(MPICXX) $(OPT_LVL) $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1) ++ TMP := $(shell $(ECHO) '#include \n #if defined(MPICH) \n #pragma message "MPICH" \n #elif defined(OPEN_MPI) \n #pragma message "OpenMPI" \n #else \n #pragma message "Unknown" \n #endif' > $(TMPFILE) && $(MPICXX) $(OPT_LVL) $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1) + # See if compilation has worked out + ifeq ($(TMP), -1) + # Maybe it failed because of the optimization as -Og is not known + ifeq ($(USE_DEBUG), "ON") +- TMP := $(shell $(ECHO) '\#include \n \#if defined(MPICH) \n \#pragma message "MPICH" \n \#elif defined(OPEN_MPI) \n \#pragma message "OpenMPI" \n \#else \n \#pragma message "Unknown" \n \#endif' > $(TMPFILE) && $(MPICXX) -O0 -g $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1) ++ TMP := $(shell $(ECHO) '#include \n #if defined(MPICH) \n #pragma message "MPICH" \n #elif defined(OPEN_MPI) \n #pragma message "OpenMPI" \n #else \n #pragma message "Unknown" \n #endif' > $(TMPFILE) && $(MPICXX) -O0 -g $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile a simple MPI example (testing with -Og and -O0). Test was done with MPI_INC="$(TMP_INC)" and MPICXX="$(MPICXX)"') + else +@@ -566,7 +566,7 @@ else + $(shell $(ECHO) "#Compiling with mpi stubs" >> $(AUTO_LOG_FILE)) + $(shell $(ECHO) "#Command: $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE)") + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile a simple c++ example. Please make sure that you have run "make stubs" before compiling LIGGGHTS itself. Test was done with CXX=$(CXX), EXTRA_INC=$(EXTRA_INC), EXTRA_LIB=$(EXTRA_LIB) and EXTRA_ADDLIBS=$(EXTRA_ADDLIBS).') + endif +@@ -595,7 +595,7 @@ endif + HAVE_MATH_SPECIAL_FUNCS = 0 + # For c++17 this is included without any further defines + ifeq ($(CXXVERSION),17) +- TMP := $(shell $(ECHO) '\#include \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP),0) + HAVE_MATH_SPECIAL_FUNCS = 1 + endif +@@ -604,14 +604,14 @@ ifeq ($(CXXVERSION),17) + else + # For c++11 we need to check if ISO 29124:2010 is supported + ifeq ($(CXXVERSION),11) +- TMP := $(shell $(ECHO) '\#define __STDCPP_WANT_MATH_SPEC_FUNCS__ 1 \n \#include \n \#if !defined(__STDCPP_MATH_SPEC_FUNCS__) || __STDCPP_MATH_SPEC_FUNCS__ < 201003L \n \#error "STOP" \n \#endif \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#define __STDCPP_WANT_MATH_SPEC_FUNCS__ 1 \n #include \n #if !defined(__STDCPP_MATH_SPEC_FUNCS__) || __STDCPP_MATH_SPEC_FUNCS__ < 201003L \n #error "STOP" \n #endif \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP),0) + HAVE_MATH_SPECIAL_FUNCS = 1 + endif + endif + endif + ifeq ($(HAVE_MATH_SPECIAL_FUNCS),0) +- TMP := $(shell $(ECHO) '\#include \n int main(){ std::tr1::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){ std::tr1::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + HAVE_TR1_CMATH = 0 + ifeq ($(TMP),0) + HAVE_TR1_CMATH = 1 +@@ -729,7 +729,7 @@ ifeq ($(USE_VTK), "ON") + $(shell $(ECHO) "#vtk major version detection" >> $(AUTO_LOG_FILE)) + endif + # note we assume here that our compiler supports #pragma message +- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) ++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#vtk major version detection result: $(VTK_TMP)" >> $(AUTO_LOG_FILE)) + endif +@@ -744,7 +744,7 @@ ifeq ($(USE_VTK), "ON") + ifeq ($(VTK_INC),-I) + VTK_INC = + endif +- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) ++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#vtk major version detection result (lib): $(VTK_TMP)" >> $(AUTO_LOG_FILE)) + endif +@@ -797,7 +797,7 @@ ifeq ($(USE_VTK), "ON") + # At this stage we now have VTK downloaded. Next we need to compile it + $(info VTK has been downloaded and will be compiled now. This can take several minutes.) + OBJDIR := $(PWD) +- TMP := $(shell $(ECHO) '\#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/vtk" \n mkdir -p build \n cd src \n git checkout $(VTK_VERSION_TAG) &>> $(AUTO_LOG_FILE) \n cd ../build \n cmake -DBUILD_TESTING:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=Release -DCMAKE_INSTALL_PREFIX=../install -DModule_vtkIOMPIParallel:BOOL=ON -DVTK_Group_MPI:BOOL=ON -DVTK_Group_Rendering:BOOL=OFF -DVTK_RENDERING_BACKEND:STRING=None -DVTK_USE_X:BOOL=OFF -DModule_vtkIOMPIImage:BOOL=ON -DModule_vtkParallelMPI:BOOL=ON ../src &>> $(AUTO_LOG_FILE) \n make &>> $(AUTO_LOG_FILE) \n make install &>> $(AUTO_LOG_FILE)' > $(TMPFILE)) ++ TMP := $(shell $(ECHO) '#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/vtk" \n mkdir -p build \n cd src \n git checkout $(VTK_VERSION_TAG) &>> $(AUTO_LOG_FILE) \n cd ../build \n cmake -DBUILD_TESTING:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=Release -DCMAKE_INSTALL_PREFIX=../install -DModule_vtkIOMPIParallel:BOOL=ON -DVTK_Group_MPI:BOOL=ON -DVTK_Group_Rendering:BOOL=OFF -DVTK_RENDERING_BACKEND:STRING=None -DVTK_USE_X:BOOL=OFF -DModule_vtkIOMPIImage:BOOL=ON -DModule_vtkParallelMPI:BOOL=ON ../src &>> $(AUTO_LOG_FILE) \n make &>> $(AUTO_LOG_FILE) \n make install &>> $(AUTO_LOG_FILE)' > $(TMPFILE)) + TMP := $(shell bash $(TMPFILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Compilation of vtk failed. Please install it manually') +@@ -807,7 +807,7 @@ ifeq ($(USE_VTK), "ON") + ifeq ($(VTK_INC),-I) + VTK_INC = + endif +- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) ++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#vtk major version detection result (lib): $(VTK_TMP)" >> $(AUTO_LOG_FILE)) + endif +@@ -826,7 +826,7 @@ ifeq ($(USE_VTK), "ON") + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#vtk_major_version: $(VTK_MAJOR_VERSION)" >> $(AUTO_LOG_FILE)) + endif +- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MINOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) ++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MINOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) + ifeq ($(VTK_TMP), -1) + $(error Could not obtain VTK_MINOR_VERSION) + endif +@@ -885,7 +885,7 @@ ifeq ($(USE_VTK), "ON") + VTK_LIB = + endif + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon$(VTK_APPENDIX_5) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon$(VTK_APPENDIX_5) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + ifeq ($(VTK_LIB_SET), 0) + VTK_LIB := -L$(dir $(shell find $(VTK_BASE_PATH)/lib* -name 'libvtkCommon.so' | tail -n 1)) +@@ -893,7 +893,7 @@ ifeq ($(USE_VTK), "ON") + VTK_LIB = + endif + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not determine suitable appendix of VTK library with VTK_INC="$(VTK_INC)", VTK_LIB="$(VTK_LIB)" and VTK_APPENDIX="$(VTK_APPENDIX)"') + else +@@ -924,7 +924,7 @@ ifeq ($(USE_VTK), "ON") + $(shell $(ECHO) "#vtk_lib: $(VTK_LIB)" >> $(AUTO_LOG_FILE)) + $(shell $(ECHO) "#appendix command: $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys$(VTK_APPENDIX) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE)" >> $(AUTO_LOG_FILE)) + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys$(VTK_APPENDIX) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys$(VTK_APPENDIX) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#attempting without appendix" >> $(AUTO_LOG_FILE)) +@@ -935,7 +935,7 @@ ifeq ($(USE_VTK), "ON") + VTK_LIB = + endif + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not determine suitable appendix of VTK library with VTK_INC="$(VTK_INC)", VTK_LIB="$(VTK_LIB)" and VTK_APPENDIX="$(VTK_APPENDIX)"') + else +@@ -1025,9 +1025,9 @@ ifeq ($(USE_VTK), "ON") + $(shell $(ECHO) "#vtk_addlibs: $(VTK_ADDLIBS)" >> $(AUTO_LOG_FILE)) + $(shell $(ECHO) "#vtk_rpath: $(VTK_RPATH)" >> $(AUTO_LOG_FILE)) + $(shell $(ECHO) "#vtk compile test:" >> $(AUTO_LOG_FILE)) +- TMP := $(shell $(ECHO) "\#include \n int main(){}" > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) &>> $(AUTO_LOG_FILE)) ++ TMP := $(shell $(ECHO) "#include \n int main(){}" > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) &>> $(AUTO_LOG_FILE)) + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile VTK example with VTK_INC="$(VTK_INC)", VTK_LIB="$(VTK_LIB)" and VTK_ADDLIBS="$(VTK_ADDLIBS)"') + endif +@@ -1057,7 +1057,7 @@ ifeq ($(USE_SUPERQUADRICS), "ON") + ifeq ($(REQUIRE_BOOST),1) + BOOST_INC ?= $(BOOST_INC_USR) + # Include test +- TMP := $(shell $(ECHO) '\#include "boost/math/special_functions/beta.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(BOOST_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "boost/math/special_functions/beta.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(BOOST_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile boost example with BOOST_INC="$(BOOST_INC)" as boost/math/special_functions/beta.hpp could not be found') + endif +@@ -1082,7 +1082,7 @@ ifeq ($(USE_JPG), "ON") + $(shell $(ECHO) "#JPG_ADDLIBS: $(JPG_ADDLIBS)" >> $(AUTO_LOG_FILE)) + $(shell $(ECHO) "jpg compile test:" >> $(AUTO_LOG_FILE)) + endif +- TMP := $(shell $(ECHO) '\#include \n \#include \n \#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n #include \n #include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile jpg example with JPG_INC="$(JPG_INC)"') + endif +@@ -1090,7 +1090,7 @@ ifeq ($(USE_JPG), "ON") + $(shell $(ECHO) "jpg link test:" >> $(AUTO_LOG_FILE)) + endif + # Linking test +- TMP := $(shell $(ECHO) '\#include \n \#include \n \#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(JPG_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n #include \n #include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(JPG_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile and link jpg example with JPG_INC="$(JPG_INC)", JPG_LIB="$(JPG_LIB)" and JPG_ADDLIBS="$(JPG_ADDLIBS)"') + endif +@@ -1119,7 +1119,7 @@ ifeq ($(USE_CONVEX), "ON") + CONVEX_ADDLIBS += -lccd + # Test settings + # Link test +- TMP := $(shell $(ECHO) '\#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + # Automatic download and compilation if AUTODOWNLOAD_CONVEX is set + ifeq ($(TMP), -1) + ifeq ($(AUTOINSTALL_CONVEX), "ON") +@@ -1168,7 +1168,7 @@ ifeq ($(USE_CONVEX), "ON") + endif + # At this stage we now have libccd downloaded. Next we need to compile it + OBJDIR := $(PWD) +- TMP := $(shell $(ECHO) '\#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/libccd/src" \n make PREFIX="$(PWD)/../../" USE_DOUBLE=yes &> /dev/null' > $(TMPFILE)) ++ TMP := $(shell $(ECHO) '#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/libccd/src" \n make PREFIX="$(PWD)/../../" USE_DOUBLE=yes &> /dev/null' > $(TMPFILE)) + TMP := $(shell bash $(TMPFILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Compilation of libccd failed. Please install it manually') +@@ -1178,12 +1178,12 @@ ifeq ($(USE_CONVEX), "ON") + endif + endif + # Include test +- TMP := $(shell $(ECHO) '\#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -E $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -E $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile Convex (libccd) example with CONVEX_INC="$(CONVEX_INC)"') + endif + # Link test +- TMP := $(shell $(ECHO) '\#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile and link Convex (libccd) example with CONVEX_INC="$(CONVEX_INC)", CONVEX_LIB="$(CONVEX_LIB)" and CONVEX_ADDLIBS="$(CONVEX_ADDLIBS)"') + endif +@@ -1210,7 +1210,7 @@ ifeq ($(USE_MFEM), "ON") + MFEM_LIB ?= -L$(LIB_PATH)/mfem + MFEM_ADDLIBS += -lmfem + # Link test +- TMP := $(shell $(ECHO) '\#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + ifeq ($(AUTOINSTALL_MFEM), "ON") + $(info 'Could not compile MFEM example. As AUTOINSTALL_MFEM is set to "ON". MFEM will now be automatically downloaded to ../lib/mfem') +@@ -1257,7 +1257,7 @@ ifeq ($(USE_MFEM), "ON") + # At this stage we now have MFEM downloaded. Next we need to compile it + TMP := $(shell ls $(LIB_PATH)/mfem/libmfem.a && echo 0 || echo -1) + ifeq ($(TMP), -1) +- TMP := $(shell $(ECHO) '\#!/bin/bash \n cd $(LIB_PATH)/mfem \n make config \n make all -j 4' > $(TMPFILE)) ++ TMP := $(shell $(ECHO) '#!/bin/bash \n cd $(LIB_PATH)/mfem \n make config \n make all -j 4' > $(TMPFILE)) + TMP := $(shell bash $(TMPFILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Compilation of MFEM failed. Please install it manually') +@@ -1270,12 +1270,12 @@ ifeq ($(USE_MFEM), "ON") + + + # Include test +- TMP := $(shell $(ECHO) '\#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile MFEM example with MFEM_INC="$(MFEM_INC)"') + endif + # Link test +- TMP := $(shell $(ECHO) '\#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile and link MFEM example. Test was done with MFEM_INC="$(MFEM_INC)", MFEM_LIB="$(MFEM_LIB)" and MFEM_ADDLIBS="$(MFEM_ADDLIBS)"') + endif diff --git a/var/spack/repos/builtin/packages/liggghts/package.py b/var/spack/repos/builtin/packages/liggghts/package.py index d9487bea4553ff..dac43ff3655115 100644 --- a/var/spack/repos/builtin/packages/liggghts/package.py +++ b/var/spack/repos/builtin/packages/liggghts/package.py @@ -16,6 +16,8 @@ class Liggghts(MakefilePackage): url = "https://github.com/CFDEMproject/LIGGGHTS-PUBLIC/archive/3.8.0.tar.gz" git = "ssh://git@github.com/CFDEMproject/LIGGGHTS-PUBLIC.git" + maintainers("SofiaXu") + version("3.8.0", sha256="9cb2e6596f584463ac2f80e3ff7b9588b7e3638c44324635b6329df87b90ab03") variant("mpi", default=True, description="Enable MPI support") @@ -28,7 +30,13 @@ class Liggghts(MakefilePackage): depends_on("mpi", when="+mpi") depends_on("jpeg", when="+jpeg") depends_on("zlib-api", when="+gzip") - + # patch for makefile test code + patch("makefile.patch") + # patch for clang and oneapi + patch("makefile-llvm-based-compiler.patch", when="%clang") + patch("makefile-llvm-based-compiler.patch", when="%oneapi") + # C++17 support + patch("cpp-17.patch") build_directory = "src" build_targets = ["auto"] @@ -55,9 +63,9 @@ def edit(self, spec, prefix): if "+mpi" in spec: mpi = spec["mpi"] - makefile.filter(r"^#(MPICXX_USER=).*", r"\1{0}".format(mpi.mpicxx)) - makefile.filter(r"^#(MPI_INC_USER=).*", r"\1{0}".format(mpi.prefix.include)) - makefile.filter(r"^#(MPI_LIB_USER=).*", r"\1{0}".format(mpi.prefix.lib)) + makefile.filter(r"^#(MPICXX_USR=).*", r"\1{0}".format(mpi.mpicxx)) + makefile.filter(r"^#(MPI_INC_USR=).*", r"\1{0}".format(mpi.prefix.include)) + makefile.filter(r"^#(MPI_LIB_USR=).*", r"\1{0}".format(mpi.prefix.lib)) else: makefile.filter(r"^(USE_MPI = ).*", r'\1"OFF"') # Set path to C++ compiler. @@ -70,8 +78,8 @@ def edit(self, spec, prefix): if "+jpeg" in spec: jpeg = spec["jpeg"] makefile.filter(r"^(USE_JPG = ).*", r'\1"ON"') - makefile.filter(r"^#(JPG_INC_USER=-I).*", r"\1{0}".format(jpeg.prefix.include)) - makefile.filter(r"^#(JPG_LIB_USER=-L).*", r"\1{0}".format(jpeg.prefix.lib)) + makefile.filter(r"^#(JPG_INC_USR=-I).*", r"\1{0}".format(jpeg.prefix.include)) + makefile.filter(r"^#(JPG_LIB_USR=-L).*", r"\1{0}".format(jpeg.prefix.lib)) if "+gzip" in spec: makefile.filter(r"^(USE_GZIP = ).*", r'\1"ON"') From 2fe68d185a3f7abc5635b6ff694edf5d874b5365 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sun, 29 Oct 2023 19:45:23 +0100 Subject: [PATCH 362/408] py-numcodecs: fix broken sse / avx2 variables (#40754) --- var/spack/repos/builtin/packages/py-numcodecs/package.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-numcodecs/package.py b/var/spack/repos/builtin/packages/py-numcodecs/package.py index badf48b465bf8f..6d466c19175c48 100644 --- a/var/spack/repos/builtin/packages/py-numcodecs/package.py +++ b/var/spack/repos/builtin/packages/py-numcodecs/package.py @@ -49,10 +49,11 @@ def setup_build_environment(self, env): # This package likes to compile natively by checking cpu features and then setting flags # -msse2 and -mavx2, which we want to avoid in Spack. This could go away if the package # supports external libraries. - if "avx2" not in self.spec.target.features: - env.set("DISABLE_NUMCODECS_AVX2", "1") - if "sse2" not in self.spec.target.features: - env.set("DISABLE_NUMCODECS_SSE2", "1") + if self.spec.satisfies("target=x86_64:"): + if "avx2" not in self.spec.target.features: + env.set("DISABLE_NUMCODECS_AVX2", "1") + if "sse2" not in self.spec.target.features: + env.set("DISABLE_NUMCODECS_SSE2", "1") def flag_handler(self, name, flags): if name == "cflags": From 9fea0e1c5a9d837bece48f93cf05f806b45316e3 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Sun, 29 Oct 2023 20:01:27 -0500 Subject: [PATCH 363/408] acts: add v28.1.0:30.3.2 (#40723) * acts: new version from 28.1.0 to 30.3.1 * acts: new version 30.3.2 * acts: new variant +podio --- var/spack/repos/builtin/packages/acts/package.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py index 9b06fd3d444360..0acd01140221d8 100644 --- a/var/spack/repos/builtin/packages/acts/package.py +++ b/var/spack/repos/builtin/packages/acts/package.py @@ -40,6 +40,18 @@ class Acts(CMakePackage, CudaPackage): # Supported Acts versions version("main", branch="main") version("master", branch="main", deprecated=True) # For compatibility + version("30.3.2", commit="76826f208f5929d8326798c87263f2563d0ae7e9", submodules=True) + version("30.3.1", commit="bbee459dd93855417d5717d53cbbb2bace7de2bb", submodules=True) + version("30.3.0", commit="311acb9ab41c2d79a4b90b193e5b25297182d670", submodules=True) + version("30.2.0", commit="264b0a3214cbf8ca013623fc196e2d90d647c58f", submodules=True) + version("30.1.1", commit="3d43492b2775e62051e9ad31f06b91d6e2357ab9", submodules=True) + version("30.1.0", commit="60d9eec916f6c81373858c8d99d821861d7efeb8", submodules=True) + version("30.0.0", commit="00fa3fabac86a1e65198d4b94dd263b1c731a84c", submodules=True) + version("29.2.0", commit="b2d65308399d8f653fa8bdd73a2a203c58608358", submodules=True) + version("29.1.0", commit="4681c3b142db469b00ca03e92e6b237f7c89d141", submodules=True) + version("29.0.0", commit="9c6e4597af39f826e17d46850fdb407a48817ba6", submodules=True) + version("28.2.0", commit="c612e7c625f961330e383fb7856cc7398dd82881", submodules=True) + version("28.1.0", commit="08e51b5f93c0d09f2d1e7e4f062e715072ec3e9b", submodules=True) version("28.0.0", commit="0d8aa418c00e8f79bab2cf88234f3433670b447c", submodules=True) version("27.1.0", commit="219480220738318fbedb943cac85415687d75b66", submodules=True) version("27.0.0", commit="4d7029bd4e9285fcda2770aef6d78a7f833cb14f", submodules=True) @@ -214,6 +226,7 @@ class Acts(CMakePackage, CudaPackage): variant("mlpack", default=False, description="Build MLpack plugin", when="@25:") variant("onnx", default=False, description="Build ONNX plugin") variant("odd", default=False, description="Build the Open Data Detector", when="@19.1:") + variant("podio", default=False, description="Build Podio plugin", when="@30.3:") variant( "profilecpu", default=False, @@ -300,6 +313,8 @@ class Acts(CMakePackage, CudaPackage): depends_on("mlpack@3.1.1:", when="+mlpack") depends_on("nlohmann-json @3.9.1:", when="@0.14: +json") depends_on("podio @0.6:", when="@25: +edm4hep") + depends_on("podio @0.16:", when="@30.3: +edm4hep") + depends_on("podio @0.16:", when="+podio") depends_on("pythia8", when="+pythia8") depends_on("python", when="+python") depends_on("python@3.8:", when="+python @19.11:19") @@ -390,6 +405,7 @@ def plugin_cmake_variant(plugin_name, spack_variant): plugin_cmake_variant("ONNX", "onnx"), enable_cmake_variant("CPU_PROFILING", "profilecpu"), enable_cmake_variant("MEMORY_PROFILING", "profilemem"), + plugin_cmake_variant("PODIO", "podio"), example_cmake_variant("PYTHIA8", "pythia8"), example_cmake_variant("PYTHON_BINDINGS", "python"), plugin_cmake_variant("ACTSVG", "svg"), From 98ab78101bc24202db39e6d436c465f97aa60e4e Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 30 Oct 2023 07:38:53 +0100 Subject: [PATCH 364/408] ASP-based solver: avoid cycles in clingo using hidden directive (#40720) The code should be functonally equivalent to what it was before, but now to avoid cycles by design we are using a "hidden" feature of clingo --- lib/spack/spack/solver/asp.py | 29 ----------------------- lib/spack/spack/solver/concretize.lp | 4 ++++ lib/spack/spack/solver/cycle_detection.lp | 21 ---------------- 3 files changed, 4 insertions(+), 50 deletions(-) delete mode 100644 lib/spack/spack/solver/cycle_detection.lp diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 115de02096ce36..4acc54954960ae 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -8,7 +8,6 @@ import enum import itertools import os -import pathlib import pprint import re import types @@ -889,14 +888,6 @@ def on_model(model): timer.start("solve") solve_result = self.control.solve(**solve_kwargs) - - if solve_result.satisfiable and self._model_has_cycles(models): - tty.debug(f"cycles detected, falling back to slower algorithm [specs={specs}]") - self.control.load(os.path.join(parent_dir, "cycle_detection.lp")) - self.control.ground([("no_cycle", [])]) - models.clear() - solve_result = self.control.solve(**solve_kwargs) - timer.stop("solve") # once done, construct the solve result @@ -950,26 +941,6 @@ def on_model(model): return result, timer, self.control.statistics - def _model_has_cycles(self, models): - """Returns true if the best model has cycles in it""" - cycle_detection = clingo.Control() - parent_dir = pathlib.Path(__file__).parent - lp_file = parent_dir / "cycle_detection.lp" - - min_cost, best_model = min(models) - with cycle_detection.backend() as backend: - for atom in best_model: - if atom.name == "attr" and str(atom.arguments[0]) == '"depends_on"': - symbol = fn.depends_on(atom.arguments[1], atom.arguments[2]) - atom_id = backend.add_atom(symbol.symbol()) - backend.add_rule([atom_id], [], choice=False) - - cycle_detection.load(str(lp_file)) - cycle_detection.ground([("base", []), ("no_cycle", [])]) - cycle_result = cycle_detection.solve() - - return cycle_result.unsatisfiable - class ConcreteSpecsByHash(collections.abc.Mapping): """Mapping containing concrete specs keyed by DAG hash. diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 7630ec1c78abe7..bec90212907f06 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -1329,6 +1329,10 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package #defined installed_hash/2. +% This statement, which is a hidden feature of clingo, let us avoid cycles in the DAG +#edge (A, B) : depends_on(A, B). + + %----------------------------------------------------------------- % Optimization to avoid errors %----------------------------------------------------------------- diff --git a/lib/spack/spack/solver/cycle_detection.lp b/lib/spack/spack/solver/cycle_detection.lp deleted file mode 100644 index 310c543623d153..00000000000000 --- a/lib/spack/spack/solver/cycle_detection.lp +++ /dev/null @@ -1,21 +0,0 @@ -% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other -% Spack Project Developers. See the top-level COPYRIGHT file for details. -% -% SPDX-License-Identifier: (Apache-2.0 OR MIT) - -%============================================================================= -% Avoid cycles in the DAG -% -% Some combinations of conditional dependencies can result in cycles; -% this ensures that we solve around them. Note that these rules are quite -% demanding on both grounding and solving, since they need to compute and -% consider all possible paths between pair of nodes. -%============================================================================= - - -#program no_cycle. -path(Parent, Child) :- depends_on(Parent, Child). -path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant). -:- path(A, A). - -#defined depends_on/2. From fce8927fb484127319277b52678e4a4b21f3ca8a Mon Sep 17 00:00:00 2001 From: Veselin Dobrev Date: Mon, 30 Oct 2023 00:17:51 -0700 Subject: [PATCH 365/408] mfem: support petsc+rocm with spack-installed rocm (#40768) --- var/spack/repos/builtin/packages/mfem/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index 5fac0860ea1040..baab5cb80890c1 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -923,6 +923,7 @@ def find_optional_library(name, prefix): if "+rocm" in spec: amdgpu_target = ",".join(spec.variants["amdgpu_target"].value) options += ["HIP_CXX=%s" % spec["hip"].hipcc, "HIP_ARCH=%s" % amdgpu_target] + hip_headers = HeaderList([]) hip_libs = LibraryList([]) # To use a C++ compiler that supports -xhip flag one can use # something like this: @@ -933,7 +934,7 @@ def find_optional_library(name, prefix): # hip_libs += find_libraries("libamdhip64", spec["hip"].prefix.lib) if "^hipsparse" in spec: # hipsparse is needed @4.4.0:+rocm hipsparse = spec["hipsparse"] - options += ["HIP_OPT=%s" % hipsparse.headers.cpp_flags] + hip_headers += hipsparse.headers hip_libs += hipsparse.libs # Note: MFEM's defaults.mk wants to find librocsparse.* in # $(HIP_DIR)/lib, so we set HIP_DIR to be $ROCM_PATH when using @@ -943,11 +944,16 @@ def find_optional_library(name, prefix): options += ["HIP_DIR=%s" % env["ROCM_PATH"]] else: options += ["HIP_DIR=%s" % hipsparse["rocsparse"].prefix] + if "^rocthrust" in spec and not spec["hip"].external: + # petsc+rocm needs the rocthrust header path + hip_headers += spec["rocthrust"].headers if "%cce" in spec: # We assume the proper Cray CCE module (cce) is loaded: craylibs_path = env["CRAYLIBS_" + machine().upper()] craylibs = ["libmodules", "libfi", "libcraymath", "libf", "libu", "libcsup"] hip_libs += find_libraries(craylibs, craylibs_path) + if hip_headers: + options += ["HIP_OPT=%s" % hip_headers.cpp_flags] if hip_libs: options += ["HIP_LIB=%s" % ld_flags_from_library_list(hip_libs)] From 4aa4342d00464152d7868db6634a3fd05d97a0c2 Mon Sep 17 00:00:00 2001 From: Cameron Rutherford Date: Mon, 30 Oct 2023 03:35:36 -0400 Subject: [PATCH 366/408] exago: fix exago missing on PYTHONPATH when `+python` (#40748) --- var/spack/repos/builtin/packages/exago/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index 8db0f7f16fbefe..06a9c9f3931e59 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -62,10 +62,14 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): variant("raja", default=False, description="Enable/Disable RAJA") variant("python", default=True, when="@1.4:", description="Enable/Disable Python bindings") variant("logging", default=True, description="Enable/Disable spdlog based logging") + conflicts( "+python", when="+ipopt+rocm", msg="Python bindings require -fPIC with Ipopt for rocm." ) + # Adds ExaGO's python wrapper to PYTHONPATH + extends("python", when="+python") + # Solver options variant("hiop", default=False, description="Enable/Disable HiOp") variant("ipopt", default=False, description="Enable/Disable IPOPT") From bf93ed4852f7dc1142adf1413b9af5d15ac2630c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Mon, 30 Oct 2023 08:54:36 +0100 Subject: [PATCH 367/408] pcl: checksum new versions (#39039) --- .../repos/builtin/packages/pcl/package.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/pcl/package.py b/var/spack/repos/builtin/packages/pcl/package.py index f57dfed3871772..e0dd4967aa44cc 100644 --- a/var/spack/repos/builtin/packages/pcl/package.py +++ b/var/spack/repos/builtin/packages/pcl/package.py @@ -14,12 +14,29 @@ class Pcl(CMakePackage): homepage = "https://pointclouds.org/" url = "https://github.com/PointCloudLibrary/pcl/releases/download/pcl-1.11.1/source.tar.gz" + version("1.13.1", sha256="be4d499c066203a3c296e2f7e823d6209be5983415f2279310ed1c9abb361d30") + version("1.13.0", sha256="bd110789f6a7416ed1c58da302afbdb80f8d297a9e23cc02fd78ab78b4762698") + version("1.12.1", sha256="a9573efad5e024c02f2cc9180bb8f82605c3772c62463efbe25c5d6e634b91dc") + version("1.12.0", sha256="606a2d5c7af304791731d6b8ea79365bc8f2cd75908006484d71ecee01d9b51c") version("1.11.1", sha256="19d1a0bee2bc153de47c05da54fc6feb23393f306ab2dea2e25419654000336e") depends_on("cmake@3.5:", type="build") + depends_on("cmake@3.10:", when="@1.12.1:", type="build") depends_on("eigen@3.1:") + depends_on("eigen@3.3:", when="@1.13:") depends_on("flann@1.7:") - depends_on("boost@1.55:+filesystem+date_time+iostreams+system") + depends_on("flann@1.9.1:", when="@1.12:") + depends_on("boost@1.55:") + depends_on("boost@1.65:", when="@1.12:") + depends_on("boost+filesystem+iostreams+system") + depends_on("boost+date_time", when="@:1.13.0") + + # fix build with clang: #30653 + with when("@:1.12"): + patch( + "https://github.com/PointCloudLibrary/pcl/commit/dff16af269fbd2c15772d53064882b2bf8c2ffe9.patch?full_index=1", + sha256="17a7a7aec8e63701294612cbb25d46ac1ce58f643dbc68e1517329ae0b68956d", + ) # TODO: replace this with an explicit list of components of Boost, # for instance depends_on('boost +filesystem') From 9c8c416df1552db216d411cc9a5371068ffd8fbe Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 30 Oct 2023 03:28:52 -0500 Subject: [PATCH 368/408] PyTorch: build with external fp16 (#40760) --- .../repos/builtin/packages/fp16/package.py | 28 +++---------------- .../builtin/packages/py-torch/package.py | 6 ++-- 2 files changed, 6 insertions(+), 28 deletions(-) diff --git a/var/spack/repos/builtin/packages/fp16/package.py b/var/spack/repos/builtin/packages/fp16/package.py index f3d535efbdccb1..5e56aec0113a74 100644 --- a/var/spack/repos/builtin/packages/fp16/package.py +++ b/var/spack/repos/builtin/packages/fp16/package.py @@ -14,7 +14,7 @@ class Fp16(CMakePackage): git = "https://github.com/Maratyszcza/FP16.git" version("master", branch="master") - version("2020-05-14", commit="4dfe081cf6bcd15db339cf2680b9281b8451eeb3") # py-torch@1.5:1.9 + version("2020-05-14", commit="4dfe081cf6bcd15db339cf2680b9281b8451eeb3") # py-torch@1.5: version("2018-11-28", commit="febbb1c163726b5db24bed55cc9dc42529068997") # py-torch@1.1:1.4 version("2018-10-10", commit="34d4bf01bbf7376f2baa71b8fa148b18524d45cf") # py-torch@1.0 version("2018-02-25", commit="43d6d17df48ebf622587e7ed9472ea76573799b9") # py-torch@:0.4 @@ -29,31 +29,11 @@ class Fp16(CMakePackage): destination="deps", placement="psimd", ) - resource( - name="googletest", - url="https://github.com/google/googletest/archive/release-1.8.0.zip", - sha256="f3ed3b58511efd272eb074a3a6d6fb79d7c2e6a0e374323d1e6bcbcc1ef141bf", - destination="deps", - placement="googletest", - ) - resource( - name="googlebenchmark", - url="https://github.com/google/benchmark/archive/v1.2.0.zip", - sha256="cc463b28cb3701a35c0855fbcefb75b29068443f1952b64dd5f4f669272e95ea", - destination="deps", - placement="googlebenchmark", - ) def cmake_args(self): return [ self.define("PSIMD_SOURCE_DIR", join_path(self.stage.source_path, "deps", "psimd")), - self.define( - "GOOGLETEST_SOURCE_DIR", join_path(self.stage.source_path, "deps", "googletest") - ), - self.define( - "GOOGLEBENCHMARK_SOURCE_DIR", - join_path(self.stage.source_path, "deps", "googlebenchmark"), - ), - self.define("FP16_BUILD_TESTS", self.run_tests), - self.define("FP16_BUILD_BENCHMARKS", self.run_tests), + self.define("FP16_BUILD_TESTS", False), + # https://github.com/Maratyszcza/FP16/issues/21 + self.define("FP16_BUILD_BENCHMARKS", False), ] diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 8b641c4e702159..96cae5404be448 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -198,8 +198,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # depends_on("sleef@3.5.1_2020-12-22", when="@1.8:") # https://github.com/pytorch/pytorch/issues/60334 # depends_on("sleef@3.4.0_2019-07-30", when="@1.6:1.7") - # https://github.com/Maratyszcza/FP16/issues/18 - # depends_on("fp16@2020-05-14", when="@1.6:") + depends_on("fp16@2020-05-14", when="@1.6:") depends_on("pthreadpool@2021-04-13", when="@1.9:") depends_on("pthreadpool@2020-10-05", when="@1.8") depends_on("pthreadpool@2020-06-15", when="@1.6:1.7") @@ -631,8 +630,7 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False): # env.set("USE_SYSTEM_CPUINFO", "ON") # https://github.com/pytorch/pytorch/issues/60270 # env.set("USE_SYSTEM_GLOO", "ON") - # https://github.com/Maratyszcza/FP16/issues/18 - # env.set("USE_SYSTEM_FP16", "ON") + env.set("USE_SYSTEM_FP16", "ON") env.set("USE_SYSTEM_PTHREADPOOL", "ON") env.set("USE_SYSTEM_PSIMD", "ON") env.set("USE_SYSTEM_FXDIV", "ON") From 7bb91d57ef3de2f474646754aa23155a044235ec Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 30 Oct 2023 03:32:48 -0500 Subject: [PATCH 369/408] py-pandas: add v2.1.2 (#40734) --- var/spack/repos/builtin/packages/py-pandas/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index f3d531f3bc382f..3dea26ff2c4111 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -17,8 +17,7 @@ class PyPandas(PythonPackage): maintainers("adamjstewart") - variant("excel", when="@1.4:", default=False, description="Build with support for Excel") - + version("2.1.2", sha256="52897edc2774d2779fbeb6880d2cfb305daa0b1a29c16b91f531a18918a6e0f3") version("2.1.1", sha256="fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b") version("2.1.0", sha256="62c24c7fc59e42b775ce0679cfa7b14a5f9bfb7643cfbe708c960699e05fb918") version("2.0.3", sha256="c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c") @@ -66,6 +65,8 @@ class PyPandas(PythonPackage): version("0.24.1", sha256="435821cb2501eabbcee7e83614bd710940dc0cf28b5afbc4bdb816c31cec71af") version("0.23.4", sha256="5b24ca47acf69222e82530e89111dd9d14f9b970ab2cd3a1c2c78f0c4fbba4f4") + variant("excel", when="@1.4:", default=False, description="Build with support for Excel") + # Required dependencies # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#python-version-support depends_on("python@3.9:3.12", when="@2.1.1:", type=("build", "run")) @@ -91,6 +92,7 @@ class PyPandas(PythonPackage): depends_on("py-versioneer+toml", when="@2:", type="build") # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#dependencies + depends_on("py-numpy@1.22.4:2", when="@2.1.2:", type=("build", "run")) depends_on("py-numpy@1.22.4:", when="@2.1:", type=("build", "run")) depends_on("py-numpy@1.20.3:", when="@1.5:", type=("build", "run")) depends_on("py-numpy@1.18.5:", when="@1.4:", type=("build", "run")) From d5824a943b51a954c224b3e489eade1d5c0a48a9 Mon Sep 17 00:00:00 2001 From: Veselin Dobrev Date: Mon, 30 Oct 2023 01:36:02 -0700 Subject: [PATCH 370/408] Fix an issue with using the environment variable `MACHTYPE` which is not always defined (#40733) * Fix an issue reported here: https://github.com/spack/spack/pull/36154#issuecomment-1781854894 * [@spackbot] updating style on behalf of v-dobrev --- var/spack/repos/builtin/packages/butterflypack/package.py | 4 +++- var/spack/repos/builtin/packages/strumpack/package.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/butterflypack/package.py b/var/spack/repos/builtin/packages/butterflypack/package.py index 848dbcdfebabef..c9726a52aaa942 100644 --- a/var/spack/repos/builtin/packages/butterflypack/package.py +++ b/var/spack/repos/builtin/packages/butterflypack/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +from platform import machine + from spack.package import * @@ -74,7 +76,7 @@ def cmake_args(self): args.append("-Denable_openmp=%s" % ("ON" if "+openmp" in spec else "OFF")) if "%cce" in spec: # Assume the proper Cray CCE module (cce) is loaded: - craylibs_path = env["CRAYLIBS_" + env["MACHTYPE"].capitalize()] + craylibs_path = env["CRAYLIBS_" + machine().upper()] env.setdefault("LDFLAGS", "") env["LDFLAGS"] += " -Wl,-rpath," + craylibs_path diff --git a/var/spack/repos/builtin/packages/strumpack/package.py b/var/spack/repos/builtin/packages/strumpack/package.py index fce0c4cd175f29..a82b3784b49a32 100644 --- a/var/spack/repos/builtin/packages/strumpack/package.py +++ b/var/spack/repos/builtin/packages/strumpack/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +from platform import machine + from spack.package import * from spack.util.environment import set_env @@ -173,7 +175,7 @@ def cmake_args(self): if "%cce" in spec: # Assume the proper Cray CCE module (cce) is loaded: - craylibs_path = env["CRAYLIBS_" + env["MACHTYPE"].capitalize()] + craylibs_path = env["CRAYLIBS_" + machine().upper()] env.setdefault("LDFLAGS", "") env["LDFLAGS"] += " -Wl,-rpath," + craylibs_path From 2dc7500e64e4984bd4ebf44adc28241863101d30 Mon Sep 17 00:00:00 2001 From: wspear Date: Mon, 30 Oct 2023 01:40:08 -0700 Subject: [PATCH 371/408] RAJA: add "plugins" variant (#40750) --- var/spack/repos/builtin/packages/raja/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index cc1ede76be35f4..99221b9b08c7d3 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -114,6 +114,7 @@ class Raja(CachedCMakePackage, CudaPackage, ROCmPackage): variant("openmp", default=True, description="Build OpenMP backend") variant("shared", default=True, description="Build Shared Libs") + variant("plugins", default=False, description="Enable runtime plugins") variant("examples", default=True, description="Build examples.") variant("exercises", default=True, description="Build exercises.") # TODO: figure out gtest dependency and then set this default True @@ -225,6 +226,7 @@ def initconfig_package_entries(self): if "camp" in self.spec: entries.append(cmake_cache_path("camp_DIR", spec["camp"].prefix)) entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec)) + entries.append(cmake_cache_option("RAJA_ENABLE_RUNTIME_PLUGINS", "+plugins" in spec)) entries.append( cmake_cache_option("{}ENABLE_EXAMPLES".format(option_prefix), "+examples" in spec) ) From fc812fb85953b6f1d85405d5d73a33e338150426 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 30 Oct 2023 04:40:31 -0500 Subject: [PATCH 372/408] acts: new variant +binaries when +examples (#40738) Co-authored-by: wdconinc --- var/spack/repos/builtin/packages/acts/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py index 0acd01140221d8..f474b92cc98c99 100644 --- a/var/spack/repos/builtin/packages/acts/package.py +++ b/var/spack/repos/builtin/packages/acts/package.py @@ -243,6 +243,9 @@ class Acts(CMakePackage, CudaPackage): variant("tgeo", default=False, description="Build the TGeo plugin", when="+identification") # Variants that only affect Acts examples for now + variant( + "binaries", default=False, description="Build the examples binaries", when="@23: +examples" + ) variant( "edm4hep", default=False, @@ -384,6 +387,7 @@ def plugin_cmake_variant(plugin_name, spack_variant): cmake_variant("ANALYSIS_APPS", "analysis"), plugin_cmake_variant("AUTODIFF", "autodiff"), cmake_variant("BENCHMARKS", "benchmarks"), + example_cmake_variant("BINARIES", "binaries"), plugin_cmake_variant("CUDA", "cuda"), plugin_cmake_variant("DD4HEP", "dd4hep"), example_cmake_variant("DD4HEP", "dd4hep"), From 6020c237d244c18c0b94debaba39729c2d716673 Mon Sep 17 00:00:00 2001 From: Brian Van Essen Date: Mon, 30 Oct 2023 03:26:24 -0700 Subject: [PATCH 373/408] aluminum: make network variants "sticky" (#40715) --- var/spack/repos/builtin/packages/aluminum/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/aluminum/package.py b/var/spack/repos/builtin/packages/aluminum/package.py index 7c48339f69413a..bb085f8681044b 100644 --- a/var/spack/repos/builtin/packages/aluminum/package.py +++ b/var/spack/repos/builtin/packages/aluminum/package.py @@ -119,12 +119,14 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage): "ofi_libfabric_plugin", default=spack.platforms.cray.slingshot_network(), when="+rccl", + sticky=True, description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib", ) variant( "ofi_libfabric_plugin", default=spack.platforms.cray.slingshot_network(), when="+nccl", + sticky=True, description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib", ) From 88cae4cc2ed510e5e9a3cb9a837217d1dcf46311 Mon Sep 17 00:00:00 2001 From: Alberto Sartori Date: Mon, 30 Oct 2023 12:09:42 +0100 Subject: [PATCH 374/408] justbuild: add version 1.2.2 (#40701) --- var/spack/repos/builtin/packages/justbuild/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/justbuild/package.py b/var/spack/repos/builtin/packages/justbuild/package.py index 2251535dc8bf2e..06a350821fbcba 100644 --- a/var/spack/repos/builtin/packages/justbuild/package.py +++ b/var/spack/repos/builtin/packages/justbuild/package.py @@ -22,6 +22,7 @@ class Justbuild(Package): maintainers("asartori86") version("master", branch="master") + version("1.2.2", tag="v1.2.2", commit="e1ee04684c34ae30ac3c91b6753e99a81a9dc51c") version("1.2.1", tag="v1.2.1", commit="959cd90083d0c783389cd09e187c98322c16469f") version("1.1.4", tag="v1.1.4", commit="32e96afd159f2158ca129fd00bf02c273d8e1e48") version("1.1.3", tag="v1.1.3", commit="3aed5d450aec38be18edec822ac2efac6d49a938") From 93bd0c55612c5d38e8028e3994303e69afc41582 Mon Sep 17 00:00:00 2001 From: Federico Ficarelli <1379149+nazavode@users.noreply.github.com> Date: Mon, 30 Oct 2023 12:12:20 +0100 Subject: [PATCH 375/408] pegtl: add v3.2.7 (#35687) --- var/spack/repos/builtin/packages/pegtl/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pegtl/package.py b/var/spack/repos/builtin/packages/pegtl/package.py index 8384ed91281e66..72a4bd67ada63f 100644 --- a/var/spack/repos/builtin/packages/pegtl/package.py +++ b/var/spack/repos/builtin/packages/pegtl/package.py @@ -19,6 +19,7 @@ class Pegtl(CMakePackage): git = "https://github.com/taocpp/PEGTL.git" version("master", branch="master") + version("3.2.7", sha256="444c3c33686c6b2d8d45ad03af5041b7bc910ef44ac10216237d8e3e8d6e7025") version("3.2.0", sha256="91aa6529ef9e6b57368e7b5b1f04a3bd26a39419d30e35a3c5c66ef073926b56") version("2.8.3", sha256="370afd0fbe6d73c448a33c10fbe4a7254f92077f5a217317d0a32a9231293015") version("2.1.4", sha256="d990dccc07b4d9ba548326d11c5c5e34fa88b34fe113cb5377da03dda29f23f2") From 776d2024bbdbc381efcd110dfc8362a97c5ed9c2 Mon Sep 17 00:00:00 2001 From: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> Date: Mon, 30 Oct 2023 06:16:25 -0500 Subject: [PATCH 376/408] ISPC: Drop ncurses workaround in favor of patch (#39662) ISPC had a bug in their lookup for NCurses, this was fixed upstream and backported here. --- var/spack/repos/builtin/packages/ispc/package.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/ispc/package.py b/var/spack/repos/builtin/packages/ispc/package.py index 1da8faa0fd6956..8bef2ce2f584c1 100644 --- a/var/spack/repos/builtin/packages/ispc/package.py +++ b/var/spack/repos/builtin/packages/ispc/package.py @@ -67,10 +67,15 @@ class Ispc(CMakePackage): sha256="d3ccf547d3ba59779fd375e10417a436318f2200d160febb9f830a26f0daefdc", ) + # Fix library lookup for NCurses in CMake + patch( + "https://patch-diff.githubusercontent.com/raw/ispc/ispc/pull/2638.patch?full_index=1", + when="@1.18:1.20", + sha256="3f7dae8d4a683fca2a6157bbcb7cbe9692ff2094b0f4afaf29be121c02b0b3ad", + ) + def setup_build_environment(self, env): if self.spec.satisfies("@1.18.0:"): - env.append_flags("LDFLAGS", "-lcurses") - env.append_flags("LDFLAGS", "-ltinfo") env.append_flags("LDFLAGS", "-lz") def patch(self): From 30b23d8a6f18d261dd70dcf66fb2a07b69e604f1 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Mon, 30 Oct 2023 12:22:31 +0100 Subject: [PATCH 377/408] Fetch recola from gitlab and add a new version of collier (#40651) Co-authored-by: jmcarcell --- .../repos/builtin/packages/collier/package.py | 1 + .../repos/builtin/packages/recola/package.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/collier/package.py b/var/spack/repos/builtin/packages/collier/package.py index c29704e354b0d2..98407d7c22076a 100644 --- a/var/spack/repos/builtin/packages/collier/package.py +++ b/var/spack/repos/builtin/packages/collier/package.py @@ -18,6 +18,7 @@ class Collier(CMakePackage): maintainers("vvolkl") + version("1.2.8", sha256="5cb24ce24ba1f62b7a96c655b31e9fddccc603eff31e60f9033b16354a6afd89") version("1.2.7", sha256="fde4b144a17c1bf5aa2ceaa86c71c79da10c9de8fec7bd33c8bffb4198acd5ca") version("1.2.6", sha256="b0d517868c71d2d1b8b6d3e0c370a43c9eb18ea8393a6e80070a5a2206f7de36") version("1.2.5", sha256="3ec58a975ff0c3b1ca870bc38973476c923ff78fd3dd5850e296037852b94a8b") diff --git a/var/spack/repos/builtin/packages/recola/package.py b/var/spack/repos/builtin/packages/recola/package.py index 80d11f2433444d..c4cb8d3c5f48b1 100644 --- a/var/spack/repos/builtin/packages/recola/package.py +++ b/var/spack/repos/builtin/packages/recola/package.py @@ -15,20 +15,27 @@ class Recola(CMakePackage): tags = ["hep"] - homepage = "https://recola.hepforge.org" - url = "https://recola.hepforge.org/downloads/?f=recola2-2.2.3.tar.gz" + homepage = "https://recola.gitlab.io/recola2/" + url = "https://gitlab.com/recola/recola2/-/archive/2.2.4/recola2-2.2.4.tar.gz" maintainers("vvolkl") variant("python", default=True, description="Build py-recola python bindings.") - version("2.2.4", sha256="16bdefb633d51842b4d32c39a43118d7052302cd63be456a473557e9b7e0316e") - version("2.2.3", sha256="db0f5e448ed603ac4073d4bbf36fd74f401a22876ad390c0d02c815a78106c5f") + version("2.2.4", sha256="212ae6141bc5de38c50be3e0c6947a3b0752aeb463cf850c22cfed5e61b1a64b") + version("2.2.3", sha256="8dc25798960c272434fcde93817ed92aad82b2a7cf07438bb4deb5688d301086") + version("2.2.2", sha256="a64cf2b4aa213289dfab6e2255a77264f281cd0ac85f5e9770c82b815272c5c9") + version("2.2.0", sha256="a64cf2b4aa213289dfab6e2255a77264f281cd0ac85f5e9770c82b815272c5c9") version( "1.4.3", url="https://recola.hepforge.org/downloads/?f=recola-1.4.3.tar.gz", sha256="f6a7dce6e1f09821ba919524f786557984f216c001ab63e7793e8aa9a8560ceb", ) + version( + "1.4.0", + url="https://recola.hepforge.org/downloads/?f=recola-1.4.0.tar.gz", + sha256="dc7db5ac9456dda2e6c03a63ad642066b0b5e4ceb8cae1f2a13ab33b35caaba8", + ) depends_on("collier") depends_on("recola-sm") From d3aa42147b729426791f26d805ac9f820bae1478 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 30 Oct 2023 12:52:47 +0100 Subject: [PATCH 378/408] binary_distribution.py: fix type annotation singleton (#40572) Convince the language server it's really just a BinaryCacheIndex, otherwise it defaults to thinking it's Singleton, and can't autocomplete etc. --- lib/spack/spack/binary_distribution.py | 38 +++++++++++--------------- lib/spack/spack/bootstrap/core.py | 2 +- lib/spack/spack/ci.py | 2 +- lib/spack/spack/test/conftest.py | 6 ++-- 4 files changed, 21 insertions(+), 27 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index af04dfefb07113..6a49ab445e71d1 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -25,7 +25,7 @@ import warnings from contextlib import closing, contextmanager from gzip import GzipFile -from typing import Dict, List, NamedTuple, Optional, Tuple, Union +from typing import Dict, List, NamedTuple, Optional, Set, Tuple from urllib.error import HTTPError, URLError import llnl.util.filesystem as fsys @@ -53,6 +53,7 @@ import spack.util.crypto import spack.util.file_cache as file_cache import spack.util.gpg +import spack.util.path import spack.util.spack_json as sjson import spack.util.spack_yaml as syaml import spack.util.timer as timer @@ -130,25 +131,25 @@ class BinaryCacheIndex: mean we should have paid the price to update the cache earlier? """ - def __init__(self, cache_root): - self._index_cache_root = cache_root + def __init__(self, cache_root: Optional[str] = None): + self._index_cache_root: str = cache_root or binary_index_location() # the key associated with the serialized _local_index_cache self._index_contents_key = "contents.json" # a FileCache instance storing copies of remote binary cache indices - self._index_file_cache = None + self._index_file_cache: Optional[file_cache.FileCache] = None # stores a map of mirror URL to index hash and cache key (index path) - self._local_index_cache = None + self._local_index_cache: Optional[dict] = None # hashes of remote indices already ingested into the concrete spec # cache (_mirrors_for_spec) - self._specs_already_associated = set() + self._specs_already_associated: Set[str] = set() # mapping from mirror urls to the time.time() of the last index fetch and a bool indicating # whether the fetch succeeded or not. - self._last_fetch_times = {} + self._last_fetch_times: Dict[str, float] = {} # _mirrors_for_spec is a dictionary mapping DAG hashes to lists of # entries indicating mirrors where that concrete spec can be found. @@ -158,7 +159,7 @@ def __init__(self, cache_root): # - the concrete spec itself, keyed by ``spec`` (including the # full hash, since the dag hash may match but we want to # use the updated source if available) - self._mirrors_for_spec = {} + self._mirrors_for_spec: Dict[str, dict] = {} def _init_local_index_cache(self): if not self._index_file_cache: @@ -529,15 +530,8 @@ def binary_index_location(): return spack.util.path.canonicalize_path(cache_root) -def _binary_index(): - """Get the singleton store instance.""" - return BinaryCacheIndex(binary_index_location()) - - -#: Singleton binary_index instance -binary_index: Union[BinaryCacheIndex, llnl.util.lang.Singleton] = llnl.util.lang.Singleton( - _binary_index -) +#: Default binary cache index instance +BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore class NoOverwriteException(spack.error.SpackError): @@ -2255,7 +2249,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False): tty.debug("No Spack mirrors are currently configured") return {} - results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check) + results = BINARY_INDEX.find_built_spec(spec, mirrors_to_check=mirrors_to_check) # The index may be out-of-date. If we aren't only considering indices, try # to fetch directly since we know where the file should be. @@ -2264,7 +2258,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False): # We found a spec by the direct fetch approach, we might as well # add it to our mapping. if results: - binary_index.update_spec(spec, results) + BINARY_INDEX.update_spec(spec, results) return results @@ -2280,12 +2274,12 @@ def update_cache_and_get_specs(): Throws: FetchCacheError """ - binary_index.update() - return binary_index.get_all_built_specs() + BINARY_INDEX.update() + return BINARY_INDEX.get_all_built_specs() def clear_spec_cache(): - binary_index.clear() + BINARY_INDEX.clear() def get_keys(install=False, trust=False, force=False, mirrors=None): diff --git a/lib/spack/spack/bootstrap/core.py b/lib/spack/spack/bootstrap/core.py index d7b39b02e0cc38..9fb04453c42590 100644 --- a/lib/spack/spack/bootstrap/core.py +++ b/lib/spack/spack/bootstrap/core.py @@ -214,7 +214,7 @@ def _install_and_test( with spack.config.override(self.mirror_scope): # This index is currently needed to get the compiler used to build some # specs that we know by dag hash. - spack.binary_distribution.binary_index.regenerate_spec_cache() + spack.binary_distribution.BINARY_INDEX.regenerate_spec_cache() index = spack.binary_distribution.update_cache_and_get_specs() if not index: diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index fca28362540623..cda7a622d69187 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -932,7 +932,7 @@ def generate_gitlab_ci_yaml( # Speed up staging by first fetching binary indices from all mirrors try: - bindist.binary_index.update() + bindist.BINARY_INDEX.update() except bindist.FetchCacheError as e: tty.warn(e) diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 3505d7213046f2..514b1e91542403 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -495,7 +495,7 @@ def mock_binary_index(monkeypatch, tmpdir_factory): tmpdir = tmpdir_factory.mktemp("mock_binary_index") index_path = tmpdir.join("binary_index").strpath mock_index = spack.binary_distribution.BinaryCacheIndex(index_path) - monkeypatch.setattr(spack.binary_distribution, "binary_index", mock_index) + monkeypatch.setattr(spack.binary_distribution, "BINARY_INDEX", mock_index) yield @@ -1710,8 +1710,8 @@ def inode_cache(): @pytest.fixture(autouse=True) def brand_new_binary_cache(): yield - spack.binary_distribution.binary_index = llnl.util.lang.Singleton( - spack.binary_distribution._binary_index + spack.binary_distribution.BINARY_INDEX = llnl.util.lang.Singleton( + spack.binary_distribution.BinaryCacheIndex ) From f1182e62f717392cb4f4420e8975e0f4ee796696 Mon Sep 17 00:00:00 2001 From: RichardBuntLinaro <133871029+RichardBuntLinaro@users.noreply.github.com> Date: Mon, 30 Oct 2023 12:43:07 +0000 Subject: [PATCH 379/408] linaro-forge: add v23.0.4 (#40772) --- var/spack/repos/builtin/packages/linaro-forge/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/linaro-forge/package.py b/var/spack/repos/builtin/packages/linaro-forge/package.py index 7724f87a29ce6d..e67613fef1f621 100644 --- a/var/spack/repos/builtin/packages/linaro-forge/package.py +++ b/var/spack/repos/builtin/packages/linaro-forge/package.py @@ -23,6 +23,9 @@ class LinaroForge(Package): maintainers("kenche-linaro") if platform.machine() in ["aarch64", "arm64"]: + version( + "23.0.4", sha256="a19e6b247badaa52f78815761f71fb95a565024b7f79bdfb2f602f18b47a881c" + ) version( "23.0.3", sha256="a7e23ef2a187f8e2d6a6692cafb931c9bb614abf58e45ea9c2287191c4c44f02" ) @@ -40,6 +43,9 @@ class LinaroForge(Package): "21.1.3", sha256="4a4ff7372aad5a31fc9e18b7b6c493691ab37d8d44a3158584e62d1ab82b0eeb" ) elif platform.machine() == "ppc64le": + version( + "23.0.4", sha256="927c1ba733cf63027243060586b196f8262e545d898712044c359a6af6fc5795" + ) version( "23.0.3", sha256="5ff9770f4bc4a2df4bac8a2544a9d6bad9fba2556420fa2e659e5c21e741caf7" ) @@ -60,6 +66,9 @@ class LinaroForge(Package): "21.1.3", sha256="eecbc5686d60994c5468b2d7cd37bebe5d9ac0ba37bd1f98fbfc69b071db541e" ) elif platform.machine() == "x86_64": + version( + "23.0.4", sha256="41a81840a273ea9a232efb4f031149867c5eff7a6381d787e18195f1171caac4" + ) version( "23.0.3", sha256="f2a010b94838f174f057cd89d12d03a89ca946163536eab178dd1ec877cdc27f" ) From bc20c6194484c3071b6f1a8ff0e25c9eed3b937f Mon Sep 17 00:00:00 2001 From: Brian Vanderwende Date: Mon, 30 Oct 2023 06:53:57 -0600 Subject: [PATCH 380/408] Get utilities necessary for successful PIO build (#40502) --- var/spack/repos/builtin/packages/parallelio/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/parallelio/package.py b/var/spack/repos/builtin/packages/parallelio/package.py index f3bcbaa99ad1a6..1841fe6bf6ccc8 100644 --- a/var/spack/repos/builtin/packages/parallelio/package.py +++ b/var/spack/repos/builtin/packages/parallelio/package.py @@ -55,6 +55,11 @@ class Parallelio(CMakePackage): depends_on("parallel-netcdf", type="link", when="+pnetcdf") resource(name="genf90", git="https://github.com/PARALLELIO/genf90.git", tag="genf90_200608") + resource( + name="CMake_Fortran_utils", + git="https://github.com/CESM-Development/CMake_Fortran_utils.git", + tag="CMake_Fortran_utils_150308", + ) # Allow argument mismatch in gfortran versions > 10 for mpi library compatibility patch("gfortran.patch", when="@:2.5.8 +fortran %gcc@10:") From 342b56905d28a1bf7ca479284492bc49e0b8fd4a Mon Sep 17 00:00:00 2001 From: SXS Bot <31972027+sxs-bot@users.noreply.github.com> Date: Mon, 30 Oct 2023 05:56:05 -0700 Subject: [PATCH 381/408] spectre: add v2023.10.11 (#40463) Co-authored-by: nilsvu --- var/spack/repos/builtin/packages/spectre/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/spectre/package.py b/var/spack/repos/builtin/packages/spectre/package.py index 7b8bc9dfebb1be..df0ff30acd88db 100644 --- a/var/spack/repos/builtin/packages/spectre/package.py +++ b/var/spack/repos/builtin/packages/spectre/package.py @@ -29,6 +29,9 @@ class Spectre(CMakePackage): generator("ninja") version("develop", branch="develop") + version( + "2023.10.11", sha256="f25d17bc80cc49ebdd81726326701fe9ecd2b6705d86e6e3d48d9e4a458c8aff" + ) version( "2023.09.07", sha256="2375117df09d99a2716d445ff51d151422467bd42cd38b5f1177d2d40cb90916" ) From 3ee4f97e0278c62db3374610cb6bf88699130503 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 30 Oct 2023 06:08:23 -0700 Subject: [PATCH 382/408] must: remove release candidates (#40476) --- var/spack/repos/builtin/packages/must/package.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/must/package.py b/var/spack/repos/builtin/packages/must/package.py index 1b3b0b152dc52b..5d6c36bb044e84 100644 --- a/var/spack/repos/builtin/packages/must/package.py +++ b/var/spack/repos/builtin/packages/must/package.py @@ -21,11 +21,8 @@ class Must(CMakePackage): version("1.9.0", sha256="24998f4ca6bce718d69347de90798600f2385c21266c2d1dd39a87dd8bd1fba4") version("1.8.0", sha256="9754fefd2e4c8cba812f8b56a5dd929bc84aa599b2509305e1eb8518be0a8a39") - version("1.8.0-rc1", sha256="49fd2487fbd1aa41f4252c7e37efebd3f6ff48218c88e82f34b88d59348fe406") - version( - "1.8-preview", sha256="67b4b061db7a893e22a6610e2085072716d11738bc6cc3cb3ffd60d6833e8bad" - ) version("1.7.2", sha256="616c54b7487923959df126ac4b47ae8c611717d679fe7ec29f57a89bf0e2e0d0") + variant("test", default=False, description="Enable must internal tests") variant("tsan", default=True, description="Enable thread sanitizer") variant("graphviz", default=False, description="Use to generate graphs") From 44b79ae8126346a5927d3dfc7e7c5f303d824980 Mon Sep 17 00:00:00 2001 From: marcost2 <52476474+marcost2@users.noreply.github.com> Date: Mon, 30 Oct 2023 10:19:42 -0300 Subject: [PATCH 383/408] freesurfer: fix support for linux (#39864) * Load the script file during enviroment setup so that all the enviroment variables are set properly * Patch csh/tcsh so that it uses spacks via env * Update SHA for latest version * Extend shebang to perl and fix up the regex --- .../builtin/packages/freesurfer/package.py | 25 +++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/freesurfer/package.py b/var/spack/repos/builtin/packages/freesurfer/package.py index 4bf4a4a2f5ef71..0e7188db06c51a 100644 --- a/var/spack/repos/builtin/packages/freesurfer/package.py +++ b/var/spack/repos/builtin/packages/freesurfer/package.py @@ -3,7 +3,11 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import glob +import os + from spack.package import * +from spack.util.environment import EnvironmentModifications class Freesurfer(Package): @@ -14,11 +18,11 @@ class Freesurfer(Package): # A license is required, but is free to obtain. license_required = True - license_files = ["./license.txt"] + license_files = [".license"] maintainers("robgics") - version("7.4.1", sha256="eb6545d1ffdee17a90abd2e7dc444aa1091a6138e257f6f956a7ff214635b092") + version("7.4.1", sha256="313a96caeb246c5985f483633b5cf43f86ed8f7ccc6d6acfac8eedb638443010") version("7.4.0", sha256="6b65c2edf3b88973ced0324269a88966c541f221b799337c6570c38c2f884431") version("7.3.2", sha256="58518d3ee5abd2e05109208aed2eef145c4e3b994164df8c4e0033c1343b9e56") version("7.2.0", sha256="4cca78602f898bf633428b9d82cbb9b07e3ab97a86c620122050803779c86d62") @@ -27,6 +31,9 @@ class Freesurfer(Package): depends_on("mesa-glu") depends_on("qt") + depends_on("tcsh") + depends_on("bc") + depends_on("perl") def url_for_version(self, version): return "https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/{0}/freesurfer-linux-centos7_x86_64-{1}.tar.gz".format( @@ -34,10 +41,24 @@ def url_for_version(self, version): ) def setup_run_environment(self, env): + source_file = join_path(self.prefix, "SetUpFreeSurfer.sh") env.prepend_path("PATH", self.prefix.bin) env.set("FREESURFER_HOME", self.prefix) env.set("SUBJECTS_DIR", join_path(self.prefix, "subjects")) env.set("FUNCTIONALS_DIR", join_path(self.prefix, "sessions")) + env.append_path("PERL5LIB", join_path(self.prefix, "mni/share/perl5")) + env.append_path("PATH", join_path(self.prefix, "mni/bin")) + env.extend(EnvironmentModifications.from_sourcing_file(source_file)) def install(self, spec, prefix): + scripts = ["sources.csh", "SetUpFreeSurfer.csh"] + scripts.extend(glob.glob("bin/*")) + scripts.extend(glob.glob("subjects/**/*", recursive=True)) + scripts.extend(glob.glob("fsfast/bin/*", recursive=True)) + scripts.extend(glob.glob("mni/bin/*", recursive=True)) + for s in scripts: + if os.path.isfile(s): + filter_file(r"(\/usr)?(\/local?)\/bin\/tcsh", "/usr/bin/env -S tcsh", s) + filter_file(r"(\/usr)?(\/local?)\/bin\/csh", "/usr/bin/env -S csh", s) + filter_file(r"(\/usr)?(\/local)?\/bin\/perl", "/usr/bin/env -S perl", s) install_tree(".", prefix) From a2f2ea3a9aefff4f10cb6bb4e94c8df77ecb6e1b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 30 Oct 2023 15:07:30 +0100 Subject: [PATCH 384/408] tty: flush immediately (#40774) --- lib/spack/llnl/util/tty/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index b3975cc08d7d88..ec7bd665374c08 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -211,6 +211,7 @@ def info(message, *args, **kwargs): stream.write(line + "\n") else: stream.write(indent + _output_filter(str(arg)) + "\n") + stream.flush() def verbose(message, *args, **kwargs): From eb12c3c42626ef505a11dfd763e7509228aca4c9 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 30 Oct 2023 15:29:27 +0100 Subject: [PATCH 385/408] ci: print colored specs in concretization progress (#40711) --- lib/spack/spack/ci.py | 44 ++++++++----------- lib/spack/spack/environment/environment.py | 5 ++- lib/spack/spack/spec.py | 12 +++-- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 4 +- 4 files changed, 33 insertions(+), 32 deletions(-) diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index cda7a622d69187..afad3b7a45197e 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -25,6 +25,7 @@ import llnl.util.filesystem as fs import llnl.util.tty as tty from llnl.util.lang import memoized +from llnl.util.tty.color import cescape, colorize import spack import spack.binary_distribution as bindist @@ -97,15 +98,6 @@ def _remove_reserved_tags(tags): return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS] -def _get_spec_string(spec): - format_elements = ["{name}{@version}", "{%compiler}"] - - if spec.architecture: - format_elements.append(" {arch=architecture}") - - return spec.format("".join(format_elements)) - - def _spec_deps_key(s): return "{0}/{1}".format(s.name, s.dag_hash(7)) @@ -210,22 +202,22 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi tty.msg("Staging summary ([x] means a job needs rebuilding):") for stage_index, stage in enumerate(stages): - tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage))) + tty.msg(f" stage {stage_index} ({len(stage)} jobs):") - for job in sorted(stage): + for job in sorted(stage, key=lambda j: (not rebuild_decisions[j].rebuild, j)): s = spec_labels[job] - rebuild = rebuild_decisions[job].rebuild reason = rebuild_decisions[job].reason - reason_msg = " ({0})".format(reason) if reason else "" - tty.msg( - " [{1}] {0} -> {2}{3}".format( - job, "x" if rebuild else " ", _get_spec_string(s), reason_msg - ) - ) - if rebuild_decisions[job].mirrors: - tty.msg(" found on the following mirrors:") - for murl in rebuild_decisions[job].mirrors: - tty.msg(" {0}".format(murl)) + reason_msg = f" ({reason})" if reason else "" + spec_fmt = "{name}{@version}{%compiler}{/hash:7}" + if rebuild_decisions[job].rebuild: + status = colorize("@*g{[x]} ") + msg = f" {status}{s.cformat(spec_fmt)}{reason_msg}" + else: + msg = f"{s.format(spec_fmt)}{reason_msg}" + if rebuild_decisions[job].mirrors: + msg += f" [{', '.join(rebuild_decisions[job].mirrors)}]" + msg = colorize(f" @K - {cescape(msg)}@.") + tty.msg(msg) def _compute_spec_deps(spec_list): @@ -2258,13 +2250,13 @@ def build_name(self): spec.architecture, self.build_group, ) - tty.verbose( + tty.debug( "Generated CDash build name ({0}) from the {1}".format(build_name, spec.name) ) return build_name build_name = os.environ.get("SPACK_CDASH_BUILD_NAME") - tty.verbose("Using CDash build name ({0}) from the environment".format(build_name)) + tty.debug("Using CDash build name ({0}) from the environment".format(build_name)) return build_name @property # type: ignore @@ -2278,11 +2270,11 @@ def build_stamp(self): Returns: (str) current CDash build stamp""" build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP") if build_stamp: - tty.verbose("Using build stamp ({0}) from the environment".format(build_stamp)) + tty.debug("Using build stamp ({0}) from the environment".format(build_stamp)) return build_stamp build_stamp = cdash_build_stamp(self.build_group, time.time()) - tty.verbose("Generated new build stamp ({0})".format(build_stamp)) + tty.debug("Generated new build stamp ({0})".format(build_stamp)) return build_stamp @property # type: ignore diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 0b36351d4e853c..9998161df2b45d 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1525,7 +1525,10 @@ def _concretize_separately(self, tests=False): ): batch.append((i, concrete)) percentage = (j + 1) / len(args) * 100 - tty.verbose(f"{duration:6.1f}s [{percentage:3.0f}%] {root_specs[i]}") + tty.verbose( + f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} " + f"{root_specs[i].colored_str}" + ) sys.stdout.flush() # Add specs in original order diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index bfe453fa36e950..f1eee321f08f91 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -4498,10 +4498,16 @@ def format_path( def __str__(self): sorted_nodes = [self] + sorted( - self.traverse(root=False), key=lambda x: x.name or x.abstract_hash + self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash) ) - spec_str = " ^".join(d.format() for d in sorted_nodes) - return spec_str.strip() + return " ^".join(d.format() for d in sorted_nodes).strip() + + @property + def colored_str(self): + sorted_nodes = [self] + sorted( + self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash) + ) + return " ^".join(d.cformat() for d in sorted_nodes).strip() def install_status(self): """Helper for tree to print DB install status.""" diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index e5475a7bdc6ed5..196037585fcdce 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -150,7 +150,7 @@ default: - spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))" < "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" - spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" - - spack -v + - spack -v --color=always --config-scope "${SPACK_CI_CONFIG_ROOT}" --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}" --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}" @@ -203,7 +203,7 @@ default: - spack --version - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - spack env activate --without-view . - - spack -v + - spack -v --color=always ci generate --check-index-only --buildcache-destination "${PUSH_BUILDCACHE_DEPRECATED}" --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" From 97a76605590be8f37737d89fa95ff18fc7593335 Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Mon, 30 Oct 2023 13:33:22 -0400 Subject: [PATCH 386/408] squashfuse: add version 0.5.0 (#40775) --- var/spack/repos/builtin/packages/squashfuse/package.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/squashfuse/package.py b/var/spack/repos/builtin/packages/squashfuse/package.py index 939b738c7d99a4..40aec33134b405 100644 --- a/var/spack/repos/builtin/packages/squashfuse/package.py +++ b/var/spack/repos/builtin/packages/squashfuse/package.py @@ -10,12 +10,12 @@ class Squashfuse(AutotoolsPackage): """squashfuse - Mount SquashFS archives using FUSE""" homepage = "https://github.com/vasi/squashfuse" - url = "https://github.com/vasi/squashfuse/releases/download/0.1.104/squashfuse-0.1.104.tar.gz" git = "https://github.com/vasi/squashfuse.git" maintainers("haampie") version("master", branch="master") + version("0.5.0", sha256="d7602c7a3b1d0512764547d27cb8cc99d1b21181e1c9819e76461ee96c2ab4d9") version("0.1.104", sha256="aa52460559e0d0b1753f6b1af5c68cfb777ca5a13913285e93f4f9b7aa894b3a") version("0.1.103", sha256="42d4dfd17ed186745117cfd427023eb81effff3832bab09067823492b6b982e7") @@ -51,6 +51,14 @@ class Squashfuse(AutotoolsPackage): depends_on("automake", type="build", when="@master") depends_on("libtool", type="build", when="@master") + def url_for_version(self, version): + url = "https://github.com/vasi/squashfuse/releases/download/" + if version == Version("0.5.0"): + url += "v{}/squashfuse-{}.tar.gz" + else: + url += "{}/squashfuse-{}.tar.gz" + return url.format(version, version) + def flag_handler(self, name, flags): if name == "cflags" and "+min_size" in self.spec: if "-Os" in self.compiler.opt_flags: From d92b260e3a48389c7d572f2c91f42a2193e7b803 Mon Sep 17 00:00:00 2001 From: MatthewLieber <77356607+MatthewLieber@users.noreply.github.com> Date: Mon, 30 Oct 2023 19:01:48 -0400 Subject: [PATCH 387/408] adding sha for OMB 7.3 release (#40784) Co-authored-by: Matt Lieber --- var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py index 8c9f50a3d1aa2d..6a755fcbf11f89 100644 --- a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py +++ b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py @@ -20,6 +20,7 @@ class OsuMicroBenchmarks(AutotoolsPackage, CudaPackage, ROCmPackage): maintainers("natshineman", "harisubramoni", "MatthewLieber") + version("7.3", sha256="8fa25b8aaa34e4b07ab3a4f30b7690ab46b038b08d204a853a9b6aa7bdb02f2f") version("7.2", sha256="1a4e1f2aab0e65404b3414e23bd46616184b69b6231ce9313d9c630bd6e633c1") version("7.1-1", sha256="85f4dd8be1df31255e232852769ae5b82e87a5fb14be2f8eba1ae9de8ffe391a") version("7.1", sha256="2c4c931ecaf19e8ab72a393ee732e25743208c9a58fa50023e3fac47064292cc") From f465bc75f987c42a676bc00f5bf7b3dafb8fe912 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 31 Oct 2023 00:11:53 +0100 Subject: [PATCH 388/408] build(deps): bump black from 23.9.1 to 23.10.1 in /lib/spack/docs (#40680) Bumps [black](https://github.com/psf/black) from 23.9.1 to 23.10.1. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.9.1...23.10.1) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 31403710385657..10e19f093e5eec 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -8,6 +8,6 @@ pygments==2.16.1 urllib3==2.0.7 pytest==7.4.3 isort==5.12.0 -black==23.9.1 +black==23.10.1 flake8==6.1.0 mypy==1.6.1 From 4779faaaf835c828a24fd53b04067fb7848b8163 Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Mon, 30 Oct 2023 18:22:55 -0500 Subject: [PATCH 389/408] Add hdf5 version 1.14.3. (#40786) Add hdf5 version 1.10.11. Update version condition for adding h5pfc->h5fc symlink. File h5pfc exists in versions 1.10.10 and 1.10.22. --- var/spack/repos/builtin/packages/hdf5/package.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index dbd9acf7d6e4e9..0a3903e8015478 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -20,7 +20,7 @@ class Hdf5(CMakePackage): """ homepage = "https://portal.hdfgroup.org" - url = "https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.8/src/hdf5-1.10.8.tar.gz" + url = "https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.3/src/hdf5-1.14.3.tar.gz" list_url = "https://support.hdfgroup.org/ftp/HDF5/releases" list_depth = 3 git = "https://github.com/HDFGroup/hdf5.git" @@ -41,6 +41,11 @@ class Hdf5(CMakePackage): # Odd versions are considered experimental releases # Even versions are maintenance versions + version( + "1.14.3", + sha256="09cdb287aa7a89148c1638dd20891fdbae08102cf433ef128fd345338aa237c7", + preferred=True, + ) version( "1.14.2", sha256="1c342e634008284a8c2794c8e7608e2eaf26d01d445fb3dfd7f33cb2fb51ac53", @@ -71,6 +76,11 @@ class Hdf5(CMakePackage): sha256="a62dcb276658cb78e6795dd29bf926ed7a9bc4edf6e77025cd2c689a8f97c17a", preferred=True, ) + version( + "1.10.11", + sha256="341684c5c0976b8c7e6951735a400275a90693604464cac73e9f323c696fc79c", + preferred=True, + ) version( "1.10.10", sha256="a6877ab7bd5d769d2d68618fdb54beb50263dcc2a8c157fe7e2186925cdb02db", @@ -657,7 +667,7 @@ def ensure_parallel_compiler_wrappers(self): # 1.10.6 and 1.12.0. The current develop versions do not produce 'h5pfc' # at all. Here, we make sure that 'h5pfc' is available when Fortran and # MPI support are enabled (only for versions that generate 'h5fc'). - if self.spec.satisfies("@1.8.22:1.8," "1.10.6:1.10," "1.12.0:1.12" "+fortran+mpi"): + if self.spec.satisfies("@1.8.22:1.8," "1.10.6:1.10.9," "1.12.0:1.12" "+fortran+mpi"): with working_dir(self.prefix.bin): # No try/except here, fix the condition above instead: symlink("h5fc", "h5pfc") From ccf497aa0e71b32d2e492d4b9026f35b99b7f46d Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Tue, 31 Oct 2023 01:55:33 +0100 Subject: [PATCH 390/408] dd4hep: Add tag for version 1.27 (#40776) --- var/spack/repos/builtin/packages/dd4hep/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py index 459582022bfe1d..77c3934bdff7fd 100644 --- a/var/spack/repos/builtin/packages/dd4hep/package.py +++ b/var/spack/repos/builtin/packages/dd4hep/package.py @@ -24,6 +24,7 @@ class Dd4hep(CMakePackage): tags = ["hep"] version("master", branch="master") + version("1.27", sha256="51fbd0f91f2511261d9b01e4b3528c658bea1ea1b5d67b25b6812615e782a902") version("1.26", sha256="de2cc8d8e99217e23fdf0a55b879d3fd3a864690d6660e7808f1ff99eb47f384") version("1.25.1", sha256="6267e76c74fbb346aa881bc44de84434ebe788573f2997a189996252fc5b271b") version("1.25", sha256="102a049166a95c2f24fc1c03395a819fc4501c175bf7915d69ccc660468d094d") From 9fcb093689461b276b19c74a95abd0b31488bdd6 Mon Sep 17 00:00:00 2001 From: Freifrau von Bleifrei Date: Tue, 31 Oct 2023 02:28:52 +0100 Subject: [PATCH 391/408] selalib: add (sca)lapack dependency (#40667) * selalib: add (sca)lapack dependency * selalib: change when "-mpi" to "~mpi" --- var/spack/repos/builtin/packages/selalib/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/selalib/package.py b/var/spack/repos/builtin/packages/selalib/package.py index 23b56afc217f35..d36a4d20538ca3 100644 --- a/var/spack/repos/builtin/packages/selalib/package.py +++ b/var/spack/repos/builtin/packages/selalib/package.py @@ -39,10 +39,12 @@ class Selalib(CMakePackage): depends_on("fgsl") depends_on("git", type=("build", "run", "test")) depends_on("hdf5+fortran+cxx") + depends_on("lapack", when="~mpi") with when("+mpi"): depends_on("mpi") depends_on("fftw+mpi") depends_on("hdf5+mpi") + depends_on("scalapack") depends_on("python@3.0.0:", type=("build")) # beware: compiling w/ zfp may throw type mismatch errors depends_on("zfp+fortran", when="+compression") From 4055fb572ff08c9e177abb5b638fd240e008e5ae Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 30 Oct 2023 18:54:31 -0700 Subject: [PATCH 392/408] docs: update `license()` docs with examples and links (#40598) - [x] Add links to information people are going to want to know when adding license information to their packages (namely OSI licenses and SPDX identifiers). - [x] Update the packaging docs for `license()` with Spack as an example for `when=`. After all, it's a dual-licensed package that changed once in the past. - [x] Add link to https://spdx.org/licenses/ in the `spack create` boilerplate as well. --- lib/spack/docs/packaging_guide.rst | 55 ++++++++++++++++++++++++------ lib/spack/spack/cmd/create.py | 1 + 2 files changed, 45 insertions(+), 11 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index d488ae0c7f1825..fad913cb0f7c70 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -3765,7 +3765,7 @@ Similarly, ``spack install example +feature build_system=autotools`` will pick the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``. Dependencies are always specified in the package class. When some dependencies -depend on the choice of the build system, it is possible to use when conditions as +depend on the choice of the build system, it is possible to use when conditions as usual: .. code-block:: python @@ -3783,7 +3783,7 @@ usual: depends_on("cmake@3.18:", when="@2.0:", type="build") depends_on("cmake@3:", type="build") - # Specify extra build dependencies used only in the configure script + # Specify extra build dependencies used only in the configure script with when("build_system=autotools"): depends_on("perl", type="build") depends_on("pkgconfig", type="build") @@ -6831,25 +6831,58 @@ the adapter role is to "emulate" a method resolution order like the one represen Specifying License Information ------------------------------ -A significant portion of software that Spack packages is open source. Most open -source software is released under one or more common open source licenses. -Specifying the specific license that a package is released under in a project's -`package.py` is good practice. To specify a license, find the SPDX identifier for -a project and then add it using the license directive: +Most of the software in Spack is open source, and most open source software is released +under one or more `common open source licenses `_. +Specifying the license that a package is released under in a project's +`package.py` is good practice. To specify a license, find the `SPDX identifier +`_ for a project and then add it using the license +directive: .. code-block:: python license("") +For example, the SPDX ID for the Apache Software License, version 2.0 is ``Apache-2.0``, +so you'd write: + +.. code-block:: python + + license("Apache-2.0") + +Or, for a dual-licensed package like Spack, you would use an `SPDX Expression +`_ with both of its +licenses: + +.. code-block:: python + + license("Apache-2.0 OR MIT") + Note that specifying a license without a when clause makes it apply to all versions and variants of the package, which might not actually be the case. For example, a project might have switched licenses at some point or have certain build configurations that include files that are licensed differently. -To account for this, you can specify when licenses should be applied. For -example, to specify that a specific license identifier should only apply -to versionup to and including 1.5, you could write the following directive: +Spack itself used to be under the ``LGPL-2.1`` license, until it was relicensed +in version ``0.12`` in 2018. + +You can specify when a ``license()`` directive applies using with a ``when=`` +clause, just like other directives. For example, to specify that a specific +license identifier should only apply to versions up to ``0.11``, but another +license should apply for later versions, you could write: .. code-block:: python - license("...", when="@:1.5") + license("LGPL-2.1", when="@:0.11") + license("Apache-2.0 OR MIT", when="@0.12:") + +Note that unlike for most other directives, the ``when=`` constraints in the +``license()`` directive can't intersect. Spack needs to be able to resolve +exactly one license identifier expression for any given version. To specify +*multiple* licenses, use SPDX expressions and operators as above. The operators +you probably care most about are: + +* ``OR``: user chooses one license to adhere to; and +* ``AND``: user has to adhere to all the licenses. +You may also care about `license exceptions +`_ that use the ``WITH`` operator, +e.g. ``Apache-2.0 WITH LLVM-exception``. diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 32c6ed13e174b7..946e9bc8b960d3 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -64,6 +64,7 @@ class {class_name}({base_class_name}): # maintainers("github_user1", "github_user2") # FIXME: Add the SPDX identifier of the project's license below. + # See https://spdx.org/licenses/ for a list. license("UNKNOWN") {versions} From 4ea353d668955ade943bfa0d4a4677cdb7f6c0dd Mon Sep 17 00:00:00 2001 From: G-Ragghianti <33492707+G-Ragghianti@users.noreply.github.com> Date: Mon, 30 Oct 2023 22:12:09 -0400 Subject: [PATCH 393/408] Added NVML and cgroup support to the slurm package (#40638) * Added NVML support to the slurm package * dbus package is required for cgroup support * Fixing formatting * Style fix * Added PAM support * Added ROCm SMI support --- .../repos/builtin/packages/dbus/package.py | 4 ++++ .../repos/builtin/packages/slurm/package.py | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/var/spack/repos/builtin/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py index 69cda7b4771ddd..37a1b8a694b907 100644 --- a/var/spack/repos/builtin/packages/dbus/package.py +++ b/var/spack/repos/builtin/packages/dbus/package.py @@ -29,6 +29,7 @@ class Dbus(AutotoolsPackage): version("1.8.2", sha256="5689f7411165adc953f37974e276a3028db94447c76e8dd92efe910c6d3bae08") variant("xml_docs", default=False, description="Build XML documentation") + variant("system-socket", default="default", description="Location for the DBus system socket") depends_on("pkgconfig", type="build") depends_on("docbook-xml", type="build") @@ -41,6 +42,9 @@ class Dbus(AutotoolsPackage): def configure_args(self): args = ["--disable-systemd", "--disable-launchd"] args += self.enable_or_disable("xml-docs", variant="xml_docs") + socket = self.spec.variants["system-socket"].value + if socket != "default": + args += ["--with-system-socket={0}".format(socket)] return args @run_after("install") diff --git a/var/spack/repos/builtin/packages/slurm/package.py b/var/spack/repos/builtin/packages/slurm/package.py index 61214702b08710..aa4f126018bf39 100644 --- a/var/spack/repos/builtin/packages/slurm/package.py +++ b/var/spack/repos/builtin/packages/slurm/package.py @@ -129,6 +129,10 @@ class Slurm(AutotoolsPackage): description="Set system configuration path (possibly /etc/slurm)", ) variant("restd", default=False, description="Enable the slurmrestd server") + variant("nvml", default=False, description="Enable NVML autodetection") + variant("cgroup", default=False, description="Enable cgroup plugin") + variant("pam", default=False, description="Enable PAM support") + variant("rsmi", default=False, description="Enable ROCm SMI support") # TODO: add variant for BG/Q and Cray support @@ -156,6 +160,11 @@ class Slurm(AutotoolsPackage): depends_on("libyaml", when="+restd") depends_on("libjwt", when="+restd") + depends_on("cuda", when="+nvml") + depends_on("dbus", when="+cgroup") + depends_on("linux-pam", when="+pam") + depends_on("rocm-smi-lib", when="+rsmi") + executables = ["^srun$", "^salloc$"] @classmethod @@ -213,6 +222,15 @@ def configure_args(self): else: args.append("--without-pmix") + if spec.satisfies("+nvml"): + args.append(f"--with-nvml={spec['cuda'].prefix}") + + if spec.satisfies("+pam"): + args.append(f"--with-pam_dir={spec['linux-pam'].prefix}") + + if spec.satisfies("+rsmi"): + args.append(f"--with-rsmi={spec['rocm-smi-lib'].prefix}") + sysconfdir = spec.variants["sysconfdir"].value if sysconfdir != "PREFIX/etc": args.append("--sysconfdir={0}".format(sysconfdir)) From d24baf242c72270bfa5168d083856d108cd1d9e7 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 31 Oct 2023 10:04:53 +0100 Subject: [PATCH 394/408] tutorial: replace zlib -> gmake to avoid deprecated versions (#40769) --- .../stacks/tutorial/spack.yaml | 21 +++++++------------ 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 0bc36ce8e44447..9e43de3cf0b366 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -1,9 +1,4 @@ spack: - config: - # allow deprecated versions in concretizations - # required for zlib - deprecated: true - view: false packages: all: @@ -13,11 +8,11 @@ spack: definitions: - gcc_system_packages: - matrix: - - - zlib - - zlib@1.2.8 - - zlib@1.2.8 cflags=-O3 + - - gmake + - gmake@4.3 + - gmake@4.3 cflags=-O3 - tcl - - tcl ^zlib@1.2.8 cflags=-O3 + - tcl ^gmake@4.3 cflags=-O3 - hdf5 - hdf5~mpi - hdf5+hl+mpi ^mpich @@ -26,13 +21,13 @@ spack: - gcc@12.1.0 - mpileaks - lmod - - macsio@1.1+scr^scr@2.0.0~fortran^silo~fortran^hdf5~fortran + - macsio@1.1+scr ^scr@2.0.0~fortran ^silo~fortran ^hdf5~fortran - ['%gcc@11.3.0'] - gcc_old_packages: - - zlib%gcc@10.4.0 + - gmake%gcc@10.4.0 - clang_packages: - matrix: - - [zlib, tcl ^zlib@1.2.8] + - [gmake, tcl ^gmake@4.3] - ['%clang@14.0.0'] - gcc_spack_built_packages: - matrix: @@ -41,7 +36,7 @@ spack: - [^openblas, ^netlib-lapack] - ['%gcc@12.1.0'] - matrix: - - [py-scipy^openblas, armadillo^openblas, netlib-lapack, openmpi, mpich, elpa^mpich] + - [py-scipy ^openblas, armadillo ^openblas, netlib-lapack, openmpi, mpich, elpa ^mpich] - ['%gcc@12.1.0'] specs: - $gcc_system_packages From a087db70f0657aa25fea41d1ba10ed4c6d038dac Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 31 Oct 2023 12:58:33 +0100 Subject: [PATCH 395/408] ci: bump tutorial image and toolchain (#40795) --- share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml | 2 +- .../cloud_pipelines/stacks/tutorial/spack.yaml | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 196037585fcdce..880aeb6811a1d0 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -621,7 +621,7 @@ aws-isc-aarch64-build: tutorial-generate: extends: [ ".tutorial", ".generate-x86_64"] - image: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-05-07 + image: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-10-30 tutorial-build: extends: [ ".tutorial", ".build" ] diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 9e43de3cf0b366..1ff435bc9bfb7c 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -18,26 +18,26 @@ spack: - hdf5+hl+mpi ^mpich - trilinos - trilinos +hdf5 ^hdf5+hl+mpi ^mpich - - gcc@12.1.0 + - gcc@12 - mpileaks - lmod - macsio@1.1+scr ^scr@2.0.0~fortran ^silo~fortran ^hdf5~fortran - - ['%gcc@11.3.0'] + - ['%gcc@11'] - gcc_old_packages: - - gmake%gcc@10.4.0 + - gmake%gcc@10 - clang_packages: - matrix: - [gmake, tcl ^gmake@4.3] - - ['%clang@14.0.0'] + - ['%clang@14'] - gcc_spack_built_packages: - matrix: - [netlib-scalapack] - [^mpich, ^openmpi] - [^openblas, ^netlib-lapack] - - ['%gcc@12.1.0'] + - ['%gcc@12'] - matrix: - [py-scipy ^openblas, armadillo ^openblas, netlib-lapack, openmpi, mpich, elpa ^mpich] - - ['%gcc@12.1.0'] + - ['%gcc@12'] specs: - $gcc_system_packages - $gcc_old_packages @@ -48,7 +48,7 @@ spack: pipeline-gen: - build-job: image: - name: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-05-07 + name: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-10-30 entrypoint: [''] cdash: build-group: Spack Tutorial From 053615a58881908551541f7dc1abdd123f1b171d Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 31 Oct 2023 15:08:41 +0100 Subject: [PATCH 396/408] spack checksum: fix error when initial filter yields empty list (#40799) --- lib/spack/spack/stage.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 03689c39bacfd6..7418b5a44ee694 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -893,9 +893,9 @@ def interactive_version_filter( """ # Find length of longest string in the list for padding version_filter = initial_verion_filter or VersionList([":"]) + max_len = max(len(str(v)) for v in url_dict) if url_dict else 0 sorted_and_filtered = [v for v in url_dict if v.satisfies(version_filter)] sorted_and_filtered.sort(reverse=True) - max_len = max(len(str(v)) for v in sorted_and_filtered) orig_url_dict = url_dict # only copy when using editor to modify print_header = True VERSION_COLOR = spack.spec.VERSION_COLOR @@ -903,21 +903,20 @@ def interactive_version_filter( if print_header: has_filter = version_filter != VersionList([":"]) header = [] - if not sorted_and_filtered: - header.append("No versions selected") - elif len(sorted_and_filtered) == len(orig_url_dict): + if len(orig_url_dict) > 0 and len(sorted_and_filtered) == len(orig_url_dict): header.append( f"Selected {llnl.string.plural(len(sorted_and_filtered), 'version')}" ) else: header.append( - f"Selected {len(sorted_and_filtered)} of {len(orig_url_dict)} versions" + f"Selected {len(sorted_and_filtered)} of " + f"{llnl.string.plural(len(orig_url_dict), 'version')}" ) if sorted_and_filtered and known_versions: num_new = sum(1 for v in sorted_and_filtered if v not in known_versions) header.append(f"{llnl.string.plural(num_new, 'new version')}") if has_filter: - header.append(colorize(f"Filtered by {VERSION_COLOR}{version_filter}@.")) + header.append(colorize(f"Filtered by {VERSION_COLOR}@@{version_filter}@.")) version_with_url = [ colorize( From cb7329d054e7cd3289331f8d257972270f33cb2b Mon Sep 17 00:00:00 2001 From: Greg Sjaardema Date: Tue, 31 Oct 2023 09:38:20 -0600 Subject: [PATCH 397/408] Seacas: Update for latest seacas releaes version (#40698) --- var/spack/repos/builtin/packages/seacas/package.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/seacas/package.py b/var/spack/repos/builtin/packages/seacas/package.py index 7a7d48c4087cef..44b4b6a6034cce 100644 --- a/var/spack/repos/builtin/packages/seacas/package.py +++ b/var/spack/repos/builtin/packages/seacas/package.py @@ -31,6 +31,9 @@ class Seacas(CMakePackage): # ###################### Versions ########################## version("master", branch="master") + version( + "2023-10-24", sha256="f93bf0327329c302ed3feb6adf2e3968f01ec325084a457b2c2dbbf6c4f751a2" + ) version( "2023-05-30", sha256="3dd982841854466820a3902163ad1cf1b3fbab65ed7542456d328f2d1a5373c1" ) @@ -132,7 +135,8 @@ class Seacas(CMakePackage): variant("x11", default=True, description="Compile with X11") # ###################### Dependencies ########################## - depends_on("cmake@3.17:", type="build") + depends_on("cmake@3.22:", when="@2023-10-24:", type="build") + depends_on("cmake@3.17:", when="@:2023-05-30", type="build") depends_on("mpi", when="+mpi") # Always depends on netcdf-c @@ -140,9 +144,10 @@ class Seacas(CMakePackage): depends_on("netcdf-c@4.8.0:~mpi", when="~mpi") depends_on("hdf5+hl~mpi", when="~mpi") + depends_on("fmt@10.1.0", when="@2023-10-24:") + depends_on("fmt@9.1.0", when="@2022-10-14:2023-05-30") depends_on("fmt@8.1.0:9", when="@2022-03-04:2022-05-16") - depends_on("fmt@9.1.0", when="@2022-10-14") - depends_on("fmt@9.1.0:", when="@2023-05-30") + depends_on("matio", when="+matio") depends_on("libx11", when="+x11") From ffcdd6adffd51b3e93a737cec13ec1de0e33e197 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 31 Oct 2023 16:52:53 +0100 Subject: [PATCH 398/408] spack checksum: improve signature (#40800) --- lib/spack/spack/cmd/checksum.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 9e5e32b3b76c7a..f927d2d922a26d 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import argparse import re import sys @@ -67,11 +66,18 @@ def setup_parser(subparser): modes_parser.add_argument( "--verify", action="store_true", default=False, help="verify known package checksums" ) - subparser.add_argument("package", help="package or spec. for example cmake or cmake@3.18") + subparser.add_argument("package", help="name or spec (e.g. `cmake` or `cmake@3.18`)") subparser.add_argument( - "versions", nargs=argparse.REMAINDER, help="versions to generate checksums for" + "versions", + nargs="*", + help="checksum these specific versions (if omitted, Spack searches for remote versions)", ) arguments.add_common_arguments(subparser, ["jobs"]) + subparser.epilog = ( + "examples:\n" + " `spack checksum zlib@1.2` autodetects versions 1.2.0 to 1.2.13 from the remote\n" + " `spack checksum zlib 1.2.13` checksums exact version 1.2.13 directly without search\n" + ) def checksum(parser, args): From 4fe5fa25efb79d9b066cb60dcac5080a2dfa0063 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 31 Oct 2023 17:50:13 +0100 Subject: [PATCH 399/408] Fix interaction of spec literals that propagate variants with unify:false (#40789) * Add tests to ensure variant propagation syntax can round-trip to/from string * Add a regression test for the bug in 35298 * Reconstruct the spec constraints in the worker process Specs do not preserve any information on propagation of variants when round-tripping to/from JSON (which we use to pickle), but preserve it when round-tripping to/from strings. Therefore, we pass a spec literal to the worker and reconstruct the Spec objects there. --- lib/spack/spack/environment/environment.py | 3 ++- lib/spack/spack/test/env.py | 26 +++++++++++++++++++ lib/spack/spack/test/spec_syntax.py | 25 ++++++++++++++++++ .../packages/client-not-foo/package.py | 17 ++++++++++++ .../packages/parent-foo/package.py | 21 +++++++++++++++ 5 files changed, 91 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin.mock/packages/client-not-foo/package.py create mode 100644 var/spack/repos/builtin.mock/packages/parent-foo/package.py diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 9998161df2b45d..cd2a5a7533b0b9 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1484,7 +1484,7 @@ def _concretize_separately(self, tests=False): for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints): if uspec not in old_concretized_user_specs: root_specs.append(uspec) - args.append((i, uspec_constraints, tests)) + args.append((i, [str(x) for x in uspec_constraints], tests)) i += 1 # Ensure we don't try to bootstrap clingo in parallel @@ -2403,6 +2403,7 @@ def _concretize_from_constraints(spec_constraints, tests=False): def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]: index, spec_constraints, tests = packed_arguments + spec_constraints = [Spec(x) for x in spec_constraints] with tty.SuppressOutput(msg_enabled=False): start = time.time() spec = _concretize_from_constraints(spec_constraints, tests) diff --git a/lib/spack/spack/test/env.py b/lib/spack/spack/test/env.py index e88af08761979f..f6b89e2108e866 100644 --- a/lib/spack/spack/test/env.py +++ b/lib/spack/spack/test/env.py @@ -690,3 +690,29 @@ def test_removing_spec_from_manifest_with_exact_duplicates( assert "zlib" in manifest.read_text() with ev.Environment(tmp_path) as env: assert len(env.user_specs) == 1 + + +@pytest.mark.regression("35298") +@pytest.mark.only_clingo("Propagation not supported in the original concretizer") +def test_variant_propagation_with_unify_false(tmp_path, mock_packages): + """Spack distributes concretizations to different processes, when unify:false is selected and + the number of roots is 2 or more. When that happens, the specs to be concretized need to be + properly reconstructed on the worker process, if variant propagation was requested. + """ + manifest = tmp_path / "spack.yaml" + manifest.write_text( + """ + spack: + specs: + - parent-foo ++foo + - c + concretizer: + unify: false + """ + ) + with ev.Environment(tmp_path) as env: + env.concretize() + + root = env.matching_spec("parent-foo") + for node in root.traverse(): + assert node.satisfies("+foo") diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index d731fcd31c1ac5..e7a760dc93037d 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -525,6 +525,31 @@ def _specfile_for(spec_str, filename): ], "zlib@git.foo/bar", ), + # Variant propagation + ( + "zlib ++foo", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"), + Token(TokenType.PROPAGATED_BOOL_VARIANT, "++foo"), + ], + "zlib++foo", + ), + ( + "zlib ~~foo", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"), + Token(TokenType.PROPAGATED_BOOL_VARIANT, "~~foo"), + ], + "zlib~~foo", + ), + ( + "zlib foo==bar", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"), + Token(TokenType.PROPAGATED_KEY_VALUE_PAIR, "foo==bar"), + ], + "zlib foo==bar", + ), ], ) def test_parse_single_spec(spec_str, tokens, expected_roundtrip): diff --git a/var/spack/repos/builtin.mock/packages/client-not-foo/package.py b/var/spack/repos/builtin.mock/packages/client-not-foo/package.py new file mode 100644 index 00000000000000..03c9374b3acce1 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/client-not-foo/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ClientNotFoo(Package): + """This package has a variant "foo", which is False by default.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/c-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + variant("foo", default=False, description="") diff --git a/var/spack/repos/builtin.mock/packages/parent-foo/package.py b/var/spack/repos/builtin.mock/packages/parent-foo/package.py new file mode 100644 index 00000000000000..61d15231f70822 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/parent-foo/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ParentFoo(Package): + """This package has a variant "foo", which is True by default, and depends on another + package which has the same variant defaulting to False. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/c-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + variant("foo", default=True, description="") + + depends_on("client-not-foo") From 0cc2989ce19e8e0cc8232ae5c356b7aabb26e378 Mon Sep 17 00:00:00 2001 From: Sreenivasa Murthy Kolam Date: Tue, 31 Oct 2023 22:48:32 +0530 Subject: [PATCH 400/408] add new recipe for rocm packages- amdsmi (#39270) * add new recipe for rocm packages- amdsmilib * update tags,maintainers list --- .../repos/builtin/packages/amdsmi/package.py | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 var/spack/repos/builtin/packages/amdsmi/package.py diff --git a/var/spack/repos/builtin/packages/amdsmi/package.py b/var/spack/repos/builtin/packages/amdsmi/package.py new file mode 100644 index 00000000000000..5c293799b80a2e --- /dev/null +++ b/var/spack/repos/builtin/packages/amdsmi/package.py @@ -0,0 +1,49 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Amdsmi(CMakePackage): + """The AMD System Management Interface Library, or AMD SMI library, + is a C library for Linux that provides a user space interface for + applications to monitor and control AMD device.""" + + homepage = "https://github.com/RadeonOpenCompute/amdsmi" + url = "https://github.com/RadeonOpenCompute/amdsmi/archive/refs/tags/rocm-5.6.0.tar.gz" + + tags = ["rocm"] + maintainers("srekolam", "renjithravindrankannath") + libraries = ["libamd_smi"] + + version("5.6.0", sha256="595c9d6d79d9071290b2f19ab4ef9222c8d2983b4322b3143fcd9d0b1ce0f6d8") + version("5.5.1", sha256="b794c7fd562fd92f2c9f2bbdc2d5dded7486101fcd4598f2e8c3484c9a939281") + version("5.5.0", sha256="dcfbd96e93afcf86b1261464e008e9ef7e521670871a1885e6eaffc7cdc8f555") + + depends_on("cmake@3.11:", type="build") + depends_on("python@3.6:", type="run") + depends_on("py-virtualenv", type="build") + depends_on("llvm@14:", type="build") + depends_on("pkgconfig", type="build") + depends_on("libdrm", type="build") + depends_on("py-pyyaml", type="build") + + @classmethod + def determine_version(cls, lib): + match = re.search(r"lib\S*\.so\.\d+\.\d+\.(\d)(\d\d)(\d\d)", lib) + if match: + ver = "{0}.{1}.{2}".format( + int(match.group(1)), int(match.group(2)), int(match.group(3)) + ) + else: + ver = None + return ver + + def cmake_args(self): + args = [] + args.append(self.define("BUILD_TESTS", "ON")) + args.append("-DCMAKE_INSTALL_LIBDIR=lib") + return args From 1adc5f4a73ee3d4f69a4bcb24c3b014861fa8dfe Mon Sep 17 00:00:00 2001 From: jalcaraz Date: Tue, 31 Oct 2023 12:28:16 -0700 Subject: [PATCH 401/408] TAU: Added dyninst variant (#40790) * Added dyninst variant * Added dyninst variant and fixed some issues * Update package.py * Removed whitespace * Update package.py * Update package.py * Fixed conflicting version --------- Co-authored-by: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> --- var/spack/repos/builtin/packages/tau/package.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index b61ab5753ca64c..9d5252cad2ef46 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -99,6 +99,7 @@ class Tau(Package): variant( "x86_64", default=False, description="Force build for x86 Linux instead of auto-detect" ) + variant("dyninst", default=False, description="Activates dyninst support") depends_on("cmake@3.14:", type="build", when="%clang") depends_on("zlib-api", type="link") @@ -128,6 +129,7 @@ class Tau(Package): depends_on("rocm-smi-lib", when="@2.32.1: +rocm") depends_on("java", type="run") # for paraprof depends_on("oneapi-level-zero", when="+level_zero") + depends_on("dyninst@12.3.0:", when="+dyninst") # Elf only required from 2.28.1 on conflicts("+elf", when="@:2.28.0") @@ -136,6 +138,7 @@ class Tau(Package): # ADIOS2, SQLite only available from 2.29.1 on conflicts("+adios2", when="@:2.29.1") conflicts("+sqlite", when="@:2.29.1") + conflicts("+dyninst", when="@:2.32.1") patch("unwind.patch", when="@2.29.0") @@ -337,6 +340,15 @@ def install(self, spec, prefix): break options.append("-pythonlib=%s" % lib_path) + if "+dyninst" in spec: + options.append("-dyninst=%s" % spec["dyninst"].prefix) + if "+tbb" not in spec: + options.append("-tbb=%s" % spec["intel-tbb"].prefix) + if "+boost" not in spec: + options.append("-boost=%s" % spec["boost"].prefix) + if "+elf" not in spec: + options.append("-elf=%s" % spec["elfutils"].prefix) + compiler_specific_options = self.set_compiler_options(spec) options.extend(compiler_specific_options) configure(*options) From a0bb9bc30a8f9af7f7673c017d69584070e1685b Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Tue, 31 Oct 2023 13:19:12 -0700 Subject: [PATCH 402/408] Fix cflags requirements (#40639) --- lib/spack/spack/solver/concretize.lp | 21 +++++++++++-------- .../spack/test/concretize_requirements.py | 14 +++++++++---- 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index bec90212907f06..2dc899d0b77ba3 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -696,15 +696,18 @@ requirement_group_satisfied(node(ID, Package), X) :- % flags if their only source is from a requirement. This is overly-specific % and should use a more-generic approach like in https://github.com/spack/spack/pull/37180 -{ attr("node_flag", node(ID, A1), A2, A3) } :- - requirement_group_member(Y, Package, X), - activate_requirement(node(ID, Package), X), - imposed_constraint(Y,"node_flag_set", A1, A2, A3). - -{ attr("node_flag_source", node(ID, A1), A2, node(ID, A3)) } :- - requirement_group_member(Y, Package, X), - activate_requirement(node(ID, Package), X), - imposed_constraint(Y,"node_flag_source", A1, A2, A3). +{ attr("node_flag", node(ID, Package), FlagType, FlagValue) } :- + requirement_group_member(ConditionID, Package, RequirementID), + activate_requirement(node(ID, Package), RequirementID), + pkg_fact(Package, condition_effect(ConditionID, EffectID)), + imposed_constraint(EffectID, "node_flag_set", Package, FlagType, FlagValue). + +{ attr("node_flag_source", node(NodeID1, Package1), FlagType, node(NodeID2, Package2)) } :- + requirement_group_member(ConditionID, Package1, RequirementID), + activate_requirement(node(NodeID1, Package1), RequirementID), + pkg_fact(Package1, condition_effect(ConditionID, EffectID)), + imposed_constraint(EffectID, "node_flag_source", Package1, FlagType, Package2), + imposed_packages(NodeID2, Package2). requirement_weight(node(ID, Package), Group, W) :- W = #min { diff --git a/lib/spack/spack/test/concretize_requirements.py b/lib/spack/spack/test/concretize_requirements.py index cd51006088403b..d5295691ce0a8b 100644 --- a/lib/spack/spack/test/concretize_requirements.py +++ b/lib/spack/spack/test/concretize_requirements.py @@ -469,16 +469,22 @@ def test_one_package_multiple_oneof_groups(concretize_scope, test_repo): @pytest.mark.regression("34241") -def test_require_cflags(concretize_scope, test_repo): +def test_require_cflags(concretize_scope, mock_packages): """Ensures that flags can be required from configuration.""" conf_str = """\ packages: - y: + mpich2: require: cflags="-g" + mpi: + require: mpich cflags="-O1" """ update_packages_config(conf_str) - spec = Spec("y").concretized() - assert spec.satisfies("cflags=-g") + + spec_mpich2 = Spec("mpich2").concretized() + assert spec_mpich2.satisfies("cflags=-g") + + spec_mpi = Spec("mpi").concretized() + assert spec_mpi.satisfies("mpich cflags=-O1") def test_requirements_for_package_that_is_not_needed(concretize_scope, test_repo): From da34c1bf4db04b557f3f35de45327438c9293b80 Mon Sep 17 00:00:00 2001 From: Samuel Li Date: Tue, 31 Oct 2023 14:53:09 -0600 Subject: [PATCH 403/408] Update sperr (#40626) * update SPERR package * remove blank line * update SPERR to be version 0.7.1 * a little clean up * bound versions that require zstd * add USE_ZSTD * add libpressio-sperr version upbound * update libpressio-sperr * address review comments * improve format --------- Co-authored-by: Samuel Li Co-authored-by: Samuel Li --- .../packages/libpressio-sperr/package.py | 14 +++++----- .../repos/builtin/packages/sperr/package.py | 27 ++++++++++++------- 2 files changed, 26 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/builtin/packages/libpressio-sperr/package.py b/var/spack/repos/builtin/packages/libpressio-sperr/package.py index cd636f2fc9c070..35576e33a7a08a 100644 --- a/var/spack/repos/builtin/packages/libpressio-sperr/package.py +++ b/var/spack/repos/builtin/packages/libpressio-sperr/package.py @@ -10,17 +10,19 @@ class LibpressioSperr(CMakePackage): """A LibPressio plugin for Sperr""" homepage = "https://github.com/robertu94/libpressio-sperr" - url = "https://github.com/robertu94/libpressio-sperr/archive/refs/tags/0.0.1.tar.gz" + url = "https://github.com/robertu94/libpressio-sperr/archive/refs/tags/0.0.4.tar.gz" git = homepage maintainers("robertu94") - depends_on("libpressio@0.88.0:", when="@0.0.3:") - depends_on("libpressio@:0.88.0", when="@:0.0.2") - depends_on("sperr") - depends_on("pkgconfig", type="build") - version("master", branch="master") + version("0.0.4", sha256="97f2879460b1a28ed8ebf0c300c1cf7ceeb2c7aa7b8a1307ed19bf8cce0b7941") version("0.0.3", sha256="e0d1fd083419aaaa243cbf780b7de17aeb96533000071088aa21ec238d358ecc") version("0.0.2", sha256="61995d687f9e7e798e17ec7238d19d917890dc0ff5dec18293b840c4d6f8c115") version("0.0.1", sha256="e2c164822708624b97654046b42abff704594cba6537d6d0646d485bdf2d03ca") + + depends_on("libpressio@0.88.0:", when="@0.0.3:") + depends_on("libpressio@:0.88.0", when="@:0.0.2") + depends_on("sperr@:0.6.2", when="@:0.0.3") + depends_on("sperr@0.7.1:", when="@0.0.4:") + depends_on("pkgconfig", type="build") diff --git a/var/spack/repos/builtin/packages/sperr/package.py b/var/spack/repos/builtin/packages/sperr/package.py index 131a6a7fdadc77..5def42991f7d5b 100644 --- a/var/spack/repos/builtin/packages/sperr/package.py +++ b/var/spack/repos/builtin/packages/sperr/package.py @@ -12,23 +12,32 @@ class Sperr(CMakePackage): # Package info homepage = "https://github.com/NCAR/SPERR" - url = "https://github.com/NCAR/SPERR/archive/refs/tags/v0.6.2.tar.gz" + url = "https://github.com/NCAR/SPERR/archive/refs/tags/v0.7.1.tar.gz" git = "https://github.com/NCAR/SPERR.git" maintainers("shaomeng", "robertu94") # Versions version("main", branch="main") + version("0.7.1", sha256="1c3f46200be365427d1f57f5873f1b0b6dbcd297de4603a47a7fa3f41b273d79") version("0.6.2", sha256="d986997e2d79a1f27146ad02c623359976a1e72a1ab0d957e128d430cda3782d") version("0.5", sha256="20ad48c0e7599d3e5866e024d0c49648eb817f72ad5459f5468122cf14a97171") - depends_on("git", type="build") - depends_on("zstd", type=("build", "link"), when="+zstd") - depends_on("pkgconfig", type=("build"), when="+zstd") - + # Variants variant("shared", description="build shared libaries", default=True) - variant("zstd", description="use zstd for more compression", default=True) - variant("openmp", description="use openmp in 3D inputs", default=True) + variant("openmp", description="use OpenMP in 3D inputs", default=True) variant("utilities", description="build SPERR CLI utilities", default=True) + variant("zstd", description="use ZSTD for more compression", default=True, when="@:0.6.2") + variant( + "bundle_zstd", + description="Use SPERR bundled ZSTD. Keep it off in SPACK builds.", + default=False, + when="@:0.6.2", + ) + + # Depend ons + depends_on("git", type="build") + depends_on("pkgconfig", type=("build"), when="+zstd") + depends_on("zstd", type=("build", "link"), when="@:0.6.2+zstd") def cmake_args(self): # ensure the compiler supports OpenMP if it is used @@ -37,11 +46,11 @@ def cmake_args(self): args = [ self.define_from_variant("BUILD_SHARED_LIBS", "shared"), - self.define_from_variant("USE_ZSTD", "zstd"), self.define_from_variant("USE_OMP", "openmp"), self.define_from_variant("BUILD_CLI_UTILITIES", "utilities"), + self.define_from_variant("USE_ZSTD", "zstd"), + self.define_from_variant("USE_BUNDLED_ZSTD", "bundle_zstd"), "-DSPERR_PREFER_RPATH=OFF", - "-DUSE_BUNDLED_ZSTD=OFF", "-DBUILD_UNIT_TESTS=OFF", ] return args From eb9323404aec3323089ca55e8c95fcb22310797d Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 31 Oct 2023 16:25:24 -0500 Subject: [PATCH 404/408] PyTorch: build with external gloo (#40759) * PyTorch: build with external gloo * Fix gloo compilation with GCC 11 * undeprecate * py-torch+cuda+gloo requires gloo+cuda --- .../repos/builtin/packages/gloo/package.py | 5 +++- .../builtin/packages/py-horovod/package.py | 2 +- .../builtin/packages/py-torch/package.py | 26 ++++++++++++------- 3 files changed, 21 insertions(+), 12 deletions(-) diff --git a/var/spack/repos/builtin/packages/gloo/package.py b/var/spack/repos/builtin/packages/gloo/package.py index 4ca7d55f43a168..ec4503900e9b4c 100644 --- a/var/spack/repos/builtin/packages/gloo/package.py +++ b/var/spack/repos/builtin/packages/gloo/package.py @@ -13,7 +13,10 @@ class Gloo(CMakePackage, CudaPackage): git = "https://github.com/facebookincubator/gloo.git" version("master", branch="master") - version("2021-05-21", commit="c22a5cfba94edf8ea4f53a174d38aa0c629d070f") # py-torch@1.10: + version("2023-05-19", commit="597accfd79f5b0f9d57b228dec088ca996686475") # py-torch@2.1: + version("2023-01-17", commit="10909297fedab0a680799211a299203e53515032") # py-torch@2.0 + version("2022-05-18", commit="5b143513263133af2b95547e97c07cebeb72bf72") # py-torch@1.13 + version("2021-05-21", commit="c22a5cfba94edf8ea4f53a174d38aa0c629d070f") # py-torch@1.10:1.12 version("2021-05-04", commit="6f7095f6e9860ce4fd682a7894042e6eba0996f1") # py-torch@1.9 version("2020-09-18", commit="3dc0328fe6a9d47bd47c0c6ca145a0d8a21845c6") # py-torch@1.7:1.8 version("2020-03-17", commit="113bde13035594cafdca247be953610b53026553") # py-torch@1.5:1.6 diff --git a/var/spack/repos/builtin/packages/py-horovod/package.py b/var/spack/repos/builtin/packages/py-horovod/package.py index 0e0bc5fd7f6068..5e221c0296824f 100644 --- a/var/spack/repos/builtin/packages/py-horovod/package.py +++ b/var/spack/repos/builtin/packages/py-horovod/package.py @@ -225,7 +225,7 @@ class PyHorovod(PythonPackage, CudaPackage): conflicts( "controllers=gloo", when="@:0.20.0 platform=darwin", msg="Gloo cannot be compiled on MacOS" ) - # FIXME + # https://github.com/horovod/horovod/issues/3996 conflicts("^py-torch@2.1:") # https://github.com/horovod/horovod/pull/1835 diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 96cae5404be448..21a68b069f322f 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -246,14 +246,14 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # depends_on("xnnpack@2021-02-22", when="@1.8:1.9+xnnpack") # depends_on("xnnpack@2020-03-23", when="@1.6:1.7+xnnpack") depends_on("mpi", when="+mpi") - # https://github.com/pytorch/pytorch/issues/60270 - # depends_on("gloo@2023-05-19", when="@2.1:+gloo") - # depends_on("gloo@2023-01-17", when="@2.0+gloo") - # depends_on("gloo@2022-05-18", when="@1.13:1+gloo") - # depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo") - # depends_on("gloo@2021-05-04", when="@1.9+gloo") - # depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo") - # depends_on("gloo@2020-03-17", when="@1.6+gloo") + depends_on("gloo@2023-05-19", when="@2.1:+gloo") + depends_on("gloo@2023-01-17", when="@2.0+gloo") + depends_on("gloo@2022-05-18", when="@1.13:1+gloo") + depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo") + depends_on("gloo@2021-05-04", when="@1.9+gloo") + depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo") + depends_on("gloo@2020-03-17", when="@1.6+gloo") + depends_on("gloo+cuda", when="@1.6:+gloo+cuda") # https://github.com/pytorch/pytorch/issues/60331 # depends_on("onnx@1.14.1", when="@2.1:+onnx_ml") # depends_on("onnx@1.13.1", when="@2.0+onnx_ml") @@ -270,6 +270,13 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("py-six", type="test") depends_on("py-psutil", type="test") + # https://github.com/pytorch/pytorch/issues/90448 + patch( + "https://github.com/pytorch/pytorch/pull/97270.patch?full_index=1", + sha256="beb3fb57746cf8443f5caa6e08b2f8f4d4822c1e11e0c912134bd166c6a0ade7", + when="@1.10:2.0", + ) + # Fix BLAS being overridden by MKL # https://github.com/pytorch/pytorch/issues/60328 patch( @@ -628,8 +635,7 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False): # env.set("USE_SYSTEM_LIBS", "ON") # https://github.com/pytorch/pytorch/issues/60329 # env.set("USE_SYSTEM_CPUINFO", "ON") - # https://github.com/pytorch/pytorch/issues/60270 - # env.set("USE_SYSTEM_GLOO", "ON") + env.set("USE_SYSTEM_GLOO", "ON") env.set("USE_SYSTEM_FP16", "ON") env.set("USE_SYSTEM_PTHREADPOOL", "ON") env.set("USE_SYSTEM_PSIMD", "ON") From 30b76645666d12db222b54b98f4a946e4f99c68c Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 31 Oct 2023 22:27:00 +0100 Subject: [PATCH 405/408] force color in subshell if not SPACK_COLOR (#40782) --- share/spack/setup-env.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index b5f434863595f4..91a601e652c386 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -98,7 +98,7 @@ _spack_shell_wrapper() { if [ "$_sp_arg" = "-h" ] || [ "$_sp_arg" = "--help" ]; then command spack cd -h else - LOC="$(spack location $_sp_arg "$@")" + LOC="$(SPACK_COLOR="${SPACK_COLOR:-always}" spack location $_sp_arg "$@")" if [ -d "$LOC" ] ; then cd "$LOC" else @@ -136,7 +136,7 @@ _spack_shell_wrapper() { command spack env activate "$@" else # Actual call to activate: source the output. - stdout="$(command spack $_sp_flags env activate --sh "$@")" || return + stdout="$(SPACK_COLOR="${SPACK_COLOR:-always}" command spack $_sp_flags env activate --sh "$@")" || return eval "$stdout" fi ;; @@ -158,7 +158,7 @@ _spack_shell_wrapper() { command spack env deactivate -h else # No args: source the output of the command. - stdout="$(command spack $_sp_flags env deactivate --sh)" || return + stdout="$(SPACK_COLOR="${SPACK_COLOR:-always}" command spack $_sp_flags env deactivate --sh)" || return eval "$stdout" fi ;; @@ -186,7 +186,7 @@ _spack_shell_wrapper() { # Args contain --sh, --csh, or -h/--help: just execute. command spack $_sp_flags $_sp_subcommand "$@" else - stdout="$(command spack $_sp_flags $_sp_subcommand --sh "$@")" || return + stdout="$(SPACK_COLOR="${SPACK_COLOR:-always}" command spack $_sp_flags $_sp_subcommand --sh "$@")" || return eval "$stdout" fi ;; From 0ebf978641e7535ee5c06e039f2496107bc62a54 Mon Sep 17 00:00:00 2001 From: Patrick Bridges Date: Tue, 31 Oct 2023 15:28:48 -0600 Subject: [PATCH 406/408] beatnik: mall changes for v1.0 (#40726) Co-authored-by: Massimiliano Culpo --- var/spack/repos/builtin/packages/beatnik/package.py | 4 ++-- var/spack/repos/builtin/packages/silo/package.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/beatnik/package.py b/var/spack/repos/builtin/packages/beatnik/package.py index aa39194494bbee..9afa9afa3825ce 100644 --- a/var/spack/repos/builtin/packages/beatnik/package.py +++ b/var/spack/repos/builtin/packages/beatnik/package.py @@ -14,8 +14,7 @@ class Beatnik(CMakePackage, CudaPackage, ROCmPackage): maintainers("patrickb314", "JStewart28") - # Add proper versions and checksums here. Will add 1.0 when a proper SHA is available - # version("1.0", sha256="XXX") + version("1.0", commit="ae31ef9cb44678d5ace77994b45b0778defa3d2f") version("develop", branch="develop") version("main", branch="main") @@ -55,6 +54,7 @@ class Beatnik(CMakePackage, CudaPackage, ROCmPackage): conflicts("mpich ~rocm", when="+rocm") conflicts("openmpi ~cuda", when="+cuda") conflicts("^intel-mpi") # Heffte won't build with intel MPI because of needed C++ MPI support + conflicts("^spectrum-mpi", when="^cuda@11.3:") # cuda-aware spectrum is broken with cuda 11.3: # Propagate CUDA and AMD GPU targets to cabana for cuda_arch in CudaPackage.cuda_arch_values: diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py index 4b4a0194e867ea..2678b0d7c56021 100644 --- a/var/spack/repos/builtin/packages/silo/package.py +++ b/var/spack/repos/builtin/packages/silo/package.py @@ -111,7 +111,6 @@ def flag_handler(self, name, flags): if "+hdf5" in spec: if spec["hdf5"].satisfies("~shared"): flags.append("-ldl") - flags.append(spec["readline"].libs.search_flags) if "+pic" in spec: if name == "cflags": From edf7aeabfd4de142888f91d638bec26bd2c4cf59 Mon Sep 17 00:00:00 2001 From: Kayla Butler Date: Mon, 20 Nov 2023 17:08:50 -0800 Subject: [PATCH 407/408] Merge upstream --- .github/workflows/build-containers.yml | 46 +- .github/workflows/style/requirements.txt | 2 +- etc/spack/defaults/modules.yaml | 2 - lib/spack/docs/build_settings.rst | 554 +---------------- lib/spack/docs/build_systems/intelpackage.rst | 2 +- lib/spack/docs/configuration.rst | 10 +- lib/spack/docs/frequently_asked_questions.rst | 77 +++ lib/spack/docs/index.rst | 4 +- lib/spack/docs/packages_yaml.rst | 559 ++++++++++++++++++ lib/spack/docs/packaging_guide.rst | 2 +- lib/spack/docs/requirements.txt | 6 +- lib/spack/external/__init__.py | 2 +- lib/spack/external/archspec/__init__.py | 2 +- .../archspec/json/cpu/microarchitectures.json | 202 ++++++- lib/spack/spack/__init__.py | 2 +- lib/spack/spack/audit.py | 85 ++- lib/spack/spack/binary_distribution.py | 118 +++- lib/spack/spack/bootstrap/_common.py | 3 +- lib/spack/spack/bootstrap/config.py | 4 +- lib/spack/spack/build_systems/cached_cmake.py | 16 +- lib/spack/spack/build_systems/oneapi.py | 36 +- lib/spack/spack/ci.py | 17 +- lib/spack/spack/cmd/audit.py | 8 +- lib/spack/spack/cmd/common/confirmation.py | 30 + lib/spack/spack/cmd/compiler.py | 17 +- lib/spack/spack/cmd/config.py | 8 +- lib/spack/spack/cmd/deconcretize.py | 103 ++++ lib/spack/spack/cmd/diff.py | 2 + lib/spack/spack/cmd/gc.py | 3 +- lib/spack/spack/cmd/info.py | 234 ++++++-- lib/spack/spack/cmd/tutorial.py | 2 +- lib/spack/spack/cmd/uninstall.py | 20 +- lib/spack/spack/compilers/__init__.py | 140 +++-- lib/spack/spack/compilers/aocc.py | 13 - lib/spack/spack/compilers/clang.py | 19 +- lib/spack/spack/database.py | 43 +- lib/spack/spack/environment/environment.py | 40 +- lib/spack/spack/main.py | 2 +- lib/spack/spack/modules/common.py | 64 +- lib/spack/spack/package.py | 3 + lib/spack/spack/schema/modules.py | 67 +-- lib/spack/spack/schema/packages.py | 179 +++--- lib/spack/spack/solver/asp.py | 83 ++- lib/spack/spack/solver/concretize.lp | 69 ++- lib/spack/spack/solver/heuristic.lp | 4 +- lib/spack/spack/solver/heuristic_separate.lp | 4 +- lib/spack/spack/test/bindist.py | 76 +++ lib/spack/spack/test/cmd/compiler.py | 20 +- lib/spack/spack/test/cmd/config.py | 23 +- lib/spack/spack/test/cmd/deconcretize.py | 78 +++ lib/spack/spack/test/cmd/env.py | 12 +- lib/spack/spack/test/cmd/info.py | 14 +- lib/spack/spack/test/compilers/basics.py | 2 - lib/spack/spack/test/concretize.py | 71 +-- .../spack/test/concretize_preferences.py | 16 +- lib/spack/spack/test/config.py | 8 +- lib/spack/spack/test/conftest.py | 12 + lib/spack/spack/test/data/config/modules.yaml | 7 +- .../test/data/modules/lmod/blacklist.yaml | 14 - .../modules/lmod/blacklist_environment.yaml | 30 - .../test/data/modules/tcl/blacklist.yaml | 12 - .../modules/tcl/blacklist_environment.yaml | 25 - .../data/modules/tcl/blacklist_implicits.yaml | 8 - .../tcl/invalid_token_in_env_var_name.yaml | 2 +- lib/spack/spack/test/database.py | 8 + lib/spack/spack/test/llnl/util/lock.py | 2 +- lib/spack/spack/test/modules/common.py | 27 +- lib/spack/spack/test/modules/conftest.py | 43 +- lib/spack/spack/test/modules/lmod.py | 5 +- lib/spack/spack/test/modules/tcl.py | 27 +- lib/spack/spack/user_environment.py | 1 + lib/spack/spack/util/executable.py | 7 +- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 24 +- .../gitlab/cloud_pipelines/configs/ci.yaml | 1 + .../stacks/e4s-oneapi/spack.yaml | 6 +- .../stacks/tutorial/spack.yaml | 17 +- share/spack/spack-completion.bash | 17 +- share/spack/spack-completion.fish | 29 +- .../builtin.mock/packages/adios2/package.py | 8 +- .../builtin.mock/packages/ascent/package.py | 5 +- .../builtin.mock/packages/bzip2/package.py | 7 +- .../packages/dependency-foo-bar/package.py | 20 + .../packages/parent-foo-bar/package.py | 22 + .../repos/builtin/packages/abinit/package.py | 33 +- .../repos/builtin/packages/adiak/package.py | 3 +- .../repos/builtin/packages/alpgen/package.py | 1 - .../builtin/packages/alquimia/package.py | 2 +- .../builtin/packages/aluminum/package.py | 353 ++++++----- .../repos/builtin/packages/amrex/package.py | 2 +- .../repos/builtin/packages/ams/package.py | 6 + .../builtin/packages/arrayfire/package.py | 2 +- .../repos/builtin/packages/asio/package.py | 2 + .../repos/builtin/packages/bart/package.py | 2 +- .../builtin/packages/batchedblas/package.py | 2 +- .../repos/builtin/packages/bfs/package.py | 1 + .../repos/builtin/packages/bison/package.py | 7 + .../repos/builtin/packages/brahma/package.py | 7 +- .../repos/builtin/packages/bzip2/package.py | 4 + .../repos/builtin/packages/catch2/package.py | 6 +- .../repos/builtin/packages/clhep/package.py | 2 + .../repos/builtin/packages/cmake/package.py | 21 +- .../builtin/packages/conquest/package.py | 34 +- .../repos/builtin/packages/cool/package.py | 1 - .../repos/builtin/packages/cp2k/package.py | 4 + .../builtin/packages/cpp-logger/package.py | 1 + .../repos/builtin/packages/cpr/package.py | 2 +- .../repos/builtin/packages/ctffind/package.py | 2 +- .../repos/builtin/packages/cube/package.py | 3 + .../repos/builtin/packages/cubelib/package.py | 1 + .../repos/builtin/packages/cubew/package.py | 1 + .../packages/darshan-runtime/package.py | 4 +- .../repos/builtin/packages/dd4hep/package.py | 3 +- .../repos/builtin/packages/dealii/package.py | 130 ++-- .../builtin/packages/dihydrogen/package.py | 439 +++++++++----- .../builtin/packages/discotec/package.py | 3 + .../builtin/packages/dla-future/package.py | 15 +- .../repos/builtin/packages/doxygen/package.py | 2 + .../repos/builtin/packages/dwz/package.py | 2 - .../repos/builtin/packages/ecflow/package.py | 6 +- .../builtin/packages/ecmwf-atlas/package.py | 4 +- .../builtin/packages/elbencho/package.py | 4 + .../packages/environment-modules/package.py | 19 +- .../builtin/packages/epics-base/package.py | 1 + .../repos/builtin/packages/esmf/package.py | 3 +- .../repos/builtin/packages/fairmq/package.py | 42 +- .../repos/builtin/packages/fdb/package.py | 2 +- .../builtin/packages/flux-core/package.py | 1 + .../builtin/packages/flux-sched/package.py | 1 + .../builtin/packages/flux-security/package.py | 1 + .../repos/builtin/packages/fmt/package.py | 2 + .../repos/builtin/packages/form/package.py | 2 +- .../repos/builtin/packages/fplo/package.py | 2 +- .../repos/builtin/packages/gbl/package.py | 1 - .../repos/builtin/packages/gdal/package.py | 23 +- .../repos/builtin/packages/geant4/package.py | 3 +- .../repos/builtin/packages/geos/package.py | 10 + .../repos/builtin/packages/ginkgo/package.py | 64 +- .../repos/builtin/packages/glab/package.py | 1 + .../repos/builtin/packages/gmake/package.py | 2 + .../repos/builtin/packages/gotcha/package.py | 1 + .../builtin/packages/gperftools/package.py | 2 + .../repos/builtin/packages/gromacs/package.py | 54 +- .../repos/builtin/packages/gzip/package.py | 19 +- .../repos/builtin/packages/hdf5/package.py | 15 +- .../repos/builtin/packages/heffte/package.py | 9 + .../repos/builtin/packages/hpcc/package.py | 5 +- .../builtin/packages/hpctoolkit/package.py | 5 + .../builtin/packages/hpx-kokkos/package.py | 2 + .../repos/builtin/packages/hpx/package.py | 2 + .../builtin/packages/hydrogen/package.py | 353 +++++------ .../repos/builtin/packages/hypre/package.py | 4 + .../builtin/packages/intel-mkl/package.py | 3 +- .../packages/intel-oneapi-advisor/package.py | 2 +- .../intel-oneapi-inspector/package.py | 2 +- .../packages/intel-oneapi-mkl/package.py | 18 +- .../packages/intel-oneapi-vtune/package.py | 2 +- .../packages/intel-parallel-studio/package.py | 3 +- .../builtin/packages/intel-xed/package.py | 11 +- .../builtin/packages/interproscan/package.py | 6 +- .../repos/builtin/packages/ispc/package.py | 18 +- .../repos/builtin/packages/itk/package.py | 2 +- .../builtin/packages/jemalloc/package.py | 2 - .../repos/builtin/packages/julia/package.py | 4 +- .../builtin/packages/justbuild/package.py | 1 + .../repos/builtin/packages/lammps/package.py | 2 +- .../lbann/lbann_v0.104_build_cleanup.patch | 39 ++ .../repos/builtin/packages/lbann/package.py | 202 ++----- .../repos/builtin/packages/lcio/package.py | 1 + .../repos/builtin/packages/lcov/package.py | 32 +- .../repos/builtin/packages/ldak/package.py | 2 +- .../repos/builtin/packages/lemon/package.py | 27 + .../builtin/packages/libevent/package.py | 7 + .../repos/builtin/packages/libffi/package.py | 5 + .../builtin/packages/libgcrypt/package.py | 1 + .../repos/builtin/packages/libgit2/package.py | 2 + .../builtin/packages/libjpeg-turbo/package.py | 16 + .../repos/builtin/packages/libksba/package.py | 1 + .../repos/builtin/packages/likwid/package.py | 38 ++ .../repos/builtin/packages/llvm/package.py | 11 +- .../repos/builtin/packages/lmod/package.py | 1 + .../repos/builtin/packages/mapl/package.py | 17 + .../repos/builtin/packages/metkit/package.py | 4 + .../repos/builtin/packages/mfem/package.py | 3 + .../builtin/packages/migraphx/package.py | 1 + .../builtin/packages/millepede/package.py | 2 - .../builtin/packages/mimalloc/package.py | 2 + .../0001-add-half-include-path-5.6.patch | 13 + .../0001-add-half-include-path.patch | 21 + ...0002-add-half-include-path-for-tests.patch | 62 ++ .../builtin/packages/mivisionx/package.py | 127 +++- .../repos/builtin/packages/molgw/package.py | 4 +- .../repos/builtin/packages/mpich/package.py | 17 +- .../packages/mrtrix3/fix_includes.patch | 26 + .../repos/builtin/packages/mrtrix3/package.py | 11 +- .../repos/builtin/packages/mumps/package.py | 4 +- .../repos/builtin/packages/ncview/package.py | 3 +- .../repos/builtin/packages/npm/package.py | 37 +- .../repos/builtin/packages/octave/package.py | 2 +- .../repos/builtin/packages/octopus/package.py | 2 +- .../builtin/packages/openblas/package.py | 16 + .../packages/openimagedenoise/package.py | 1 + .../repos/builtin/packages/openmpi/package.py | 2 +- .../repos/builtin/packages/openssl/package.py | 4 + .../repos/builtin/packages/openvkl/package.py | 2 + .../repos/builtin/packages/ospray/package.py | 10 +- .../builtin/packages/pacparser/package.py | 2 - .../builtin/packages/paraview/package.py | 5 +- .../builtin/packages/patchelf/package.py | 9 +- .../packages/perl-class-singleton/package.py | 15 + .../packages/perl-datetime-locale/package.py | 17 + .../perl-datetime-timezone/package.py | 15 + .../builtin/packages/perl-datetime/package.py | 17 + .../packages/perl-devel-cover/package.py | 15 + .../packages/perl-file-sharedir/package.py | 17 + .../packages/perl-file-spec/package.py | 15 + .../packages/perl-memory-process/package.py | 15 + .../perl-namespace-autoclean/package.py | 15 + .../perl-params-validationcompiler/package.py | 16 + .../builtin/packages/perl-specio/package.py | 15 + .../builtin/packages/pflotran/package.py | 6 + .../builtin/packages/photos-f/package.py | 2 - .../packages/pika-algorithms/package.py | 2 + .../repos/builtin/packages/pika/package.py | 2 + .../repos/builtin/packages/podio/package.py | 4 +- .../builtin/packages/py-abipy/package.py | 2 +- .../builtin/packages/py-archspec/package.py | 3 +- .../builtin/packages/py-async-lru/package.py | 2 - .../package.py | 2 - .../builtin/packages/py-black/package.py | 18 +- .../builtin/packages/py-bokeh/package.py | 19 +- .../repos/builtin/packages/py-cleo/package.py | 25 +- .../repos/builtin/packages/py-cppy/package.py | 2 - .../builtin/packages/py-crashtest/package.py | 2 + .../packages/py-dlio-profiler-py/package.py | 13 +- .../builtin/packages/py-elephant/package.py | 56 +- .../builtin/packages/py-geomdl/package.py | 26 + .../builtin/packages/py-gitpython/package.py | 2 + .../packages/py-grpcio-tools/package.py | 6 +- .../builtin/packages/py-grpcio/package.py | 6 +- .../repos/builtin/packages/py-h5py/package.py | 24 +- .../packages/py-imagecodecs/package.py | 3 +- .../packages/py-jarowinkler/package.py | 21 + .../packages/py-jsonpath-ng/package.py | 6 +- .../builtin/packages/py-kombu/package.py | 4 +- .../packages/py-libensemble/package.py | 3 +- .../builtin/packages/py-lightning/package.py | 2 + .../builtin/packages/py-macs3/package.py | 2 + .../builtin/packages/py-matplotlib/package.py | 2 + .../repos/builtin/packages/py-mypy/package.py | 34 +- .../builtin/packages/py-nanobind/package.py | 3 + .../builtin/packages/py-numpy/package.py | 1 + .../packages/py-nvidia-dali/package.py | 24 +- .../builtin/packages/py-pandas/package.py | 1 + .../builtin/packages/py-pdbfixer/package.py | 2 +- .../builtin/packages/py-pygithub/package.py | 15 +- .../builtin/packages/py-pynucleus/package.py | 15 +- .../builtin/packages/py-quantities/package.py | 13 +- .../packages/py-rapidfuzz-capi/package.py | 21 + .../builtin/packages/py-rapidfuzz/package.py | 6 +- .../builtin/packages/py-scipy/package.py | 1 + .../py-tensorflow-datasets/package.py | 4 +- .../packages/py-torch-cluster/package.py | 19 +- .../builtin/packages/py-torch/package.py | 9 +- .../builtin/packages/py-torchaudio/package.py | 2 + .../builtin/packages/py-torchdata/package.py | 2 + .../builtin/packages/py-torchgeo/package.py | 1 + .../builtin/packages/py-torchtext/package.py | 2 + .../packages/py-torchvision/package.py | 2 + .../packages/py-xyzservices/package.py | 23 + .../builtin/packages/q-e-sirius/package.py | 2 +- .../repos/builtin/packages/qmcpack/package.py | 2 +- .../packages/quantum-espresso/package.py | 12 +- .../repos/builtin/packages/qwt/package.py | 4 +- .../repos/builtin/packages/r-rlang/package.py | 2 + var/spack/repos/builtin/packages/r/package.py | 70 +-- .../repos/builtin/packages/raja/package.py | 5 + .../repos/builtin/packages/rclone/package.py | 1 + .../repos/builtin/packages/restic/package.py | 1 + .../packages/riscv-gnu-toolchain/package.py | 42 ++ .../builtin/packages/rkcommon/package.py | 1 + .../repos/builtin/packages/root/package.py | 9 +- .../packages/rust-bootstrap/package.py | 11 + .../repos/builtin/packages/rust/package.py | 73 ++- .../builtin/packages/scafacos/package.py | 40 ++ .../repos/builtin/packages/sherpa/package.py | 2 +- .../repos/builtin/packages/sleef/package.py | 5 +- .../builtin/packages/slurm-drmaa/package.py | 2 +- .../repos/builtin/packages/stdexec/package.py | 2 + .../builtin/packages/sundials/package.py | 2 + .../builtin/packages/superlu-dist/package.py | 29 +- .../builtin/packages/taskflow/package.py | 1 + .../repos/builtin/packages/tcl/package.py | 2 + .../builtin/packages/tracy-client/package.py | 2 + .../repos/builtin/packages/tracy/package.py | 2 + .../repos/builtin/packages/ut/package.py | 2 + .../repos/builtin/packages/vc/package.py | 1 + .../repos/builtin/packages/votca/package.py | 1 + .../repos/builtin/packages/whip/package.py | 2 + .../builtin/packages/xrdcl-record/package.py | 2 - .../repos/builtin/packages/xsdk/package.py | 97 ++- .../tutorial/packages/libpspio/package.py | 37 ++ 301 files changed, 4868 insertions(+), 2489 deletions(-) create mode 100644 lib/spack/docs/frequently_asked_questions.rst create mode 100644 lib/spack/docs/packages_yaml.rst create mode 100644 lib/spack/spack/cmd/common/confirmation.py create mode 100644 lib/spack/spack/cmd/deconcretize.py create mode 100644 lib/spack/spack/test/cmd/deconcretize.py delete mode 100644 lib/spack/spack/test/data/modules/lmod/blacklist.yaml delete mode 100644 lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml delete mode 100644 lib/spack/spack/test/data/modules/tcl/blacklist.yaml delete mode 100644 lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml delete mode 100644 lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml create mode 100644 var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py create mode 100644 var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py create mode 100644 var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch create mode 100644 var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch create mode 100644 var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch create mode 100644 var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch create mode 100644 var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch create mode 100644 var/spack/repos/builtin/packages/perl-class-singleton/package.py create mode 100644 var/spack/repos/builtin/packages/perl-datetime-locale/package.py create mode 100644 var/spack/repos/builtin/packages/perl-datetime-timezone/package.py create mode 100644 var/spack/repos/builtin/packages/perl-datetime/package.py create mode 100644 var/spack/repos/builtin/packages/perl-devel-cover/package.py create mode 100644 var/spack/repos/builtin/packages/perl-file-sharedir/package.py create mode 100644 var/spack/repos/builtin/packages/perl-file-spec/package.py create mode 100644 var/spack/repos/builtin/packages/perl-memory-process/package.py create mode 100644 var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py create mode 100644 var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py create mode 100644 var/spack/repos/builtin/packages/perl-specio/package.py create mode 100644 var/spack/repos/builtin/packages/py-geomdl/package.py create mode 100644 var/spack/repos/builtin/packages/py-jarowinkler/package.py create mode 100644 var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py create mode 100644 var/spack/repos/builtin/packages/py-xyzservices/package.py create mode 100644 var/spack/repos/builtin/packages/scafacos/package.py create mode 100644 var/spack/repos/tutorial/packages/libpspio/package.py diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml index 807bf6c858d25d..8bda55c2e76c25 100644 --- a/.github/workflows/build-containers.yml +++ b/.github/workflows/build-containers.yml @@ -38,12 +38,11 @@ jobs: # Meaning of the various items in the matrix list # 0: Container name (e.g. ubuntu-bionic) # 1: Platforms to build for - # 2: Base image (e.g. ubuntu:18.04) + # 2: Base image (e.g. ubuntu:22.04) dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'], [centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'], [centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'], [leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'], - [ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'], [ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'], [ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'], [almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'], @@ -58,18 +57,20 @@ jobs: - name: Checkout uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 - - name: Set Container Tag Normal (Nightly) - run: | - container="${{ matrix.dockerfile[0] }}:latest" - echo "container=${container}" >> $GITHUB_ENV - echo "versioned=${container}" >> $GITHUB_ENV - - # On a new release create a container with the same tag as the release. - - name: Set Container Tag on Release - if: github.event_name == 'release' - run: | - versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}" - echo "versioned=${versioned}" >> $GITHUB_ENV + - uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 + id: docker_meta + with: + images: | + ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }} + ${{ github.repository_owner }}/${{ matrix.dockerfile[0] }} + tags: | + type=schedule,pattern=nightly + type=schedule,pattern=develop + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + type=ref,event=branch + type=ref,event=pr - name: Generate the Dockerfile env: @@ -92,13 +93,13 @@ jobs: path: dockerfiles - name: Set up QEMU - uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1 + uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1 + uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 - name: Log in to GitHub Container Registry - uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1 + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d with: registry: ghcr.io username: ${{ github.actor }} @@ -106,21 +107,18 @@ jobs: - name: Log in to DockerHub if: github.event_name != 'pull_request' - uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1 + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build & Deploy ${{ matrix.dockerfile[0] }} - uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2 + uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 with: context: dockerfiles/${{ matrix.dockerfile[0] }} platforms: ${{ matrix.dockerfile[1] }} push: ${{ github.event_name != 'pull_request' }} cache-from: type=gha cache-to: type=gha,mode=max - tags: | - spack/${{ env.container }} - spack/${{ env.versioned }} - ghcr.io/spack/${{ env.container }} - ghcr.io/spack/${{ env.versioned }} + tags: ${{ steps.docker_meta.outputs.tags }} + labels: ${{ steps.docker_meta.outputs.labels }} diff --git a/.github/workflows/style/requirements.txt b/.github/workflows/style/requirements.txt index 0822ba39339737..aadcd83c09fd32 100644 --- a/.github/workflows/style/requirements.txt +++ b/.github/workflows/style/requirements.txt @@ -1,4 +1,4 @@ -black==23.10.1 +black==23.11.0 clingo==5.6.2 flake8==6.1.0 isort==5.12.0 diff --git a/etc/spack/defaults/modules.yaml b/etc/spack/defaults/modules.yaml index 6ba4de769b884a..75ec3661174378 100644 --- a/etc/spack/defaults/modules.yaml +++ b/etc/spack/defaults/modules.yaml @@ -46,12 +46,10 @@ modules: tcl: all: autoload: direct - hide_implicits: true # Default configurations if lmod is enabled lmod: all: autoload: direct - hide_implicits: true hierarchy: - mpi diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index 402b33f6a2585b..0f53355a81dd80 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -37,7 +37,11 @@ to enable reuse for a single installation, and you can use: spack install --fresh to do a fresh install if ``reuse`` is enabled by default. -``reuse: true`` is the default. +``reuse: dependencies`` is the default. + +.. seealso:: + + FAQ: :ref:`Why does Spack pick particular versions and variants? ` ------------------------------------------ Selection of the target microarchitectures @@ -99,551 +103,3 @@ while `py-numpy` still needs an older version: Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the default behavior is ``duplicates:strategy:minimal``. - -.. _build-settings: - -================================ -Package Settings (packages.yaml) -================================ - -Spack allows you to customize how your software is built through the -``packages.yaml`` file. Using it, you can make Spack prefer particular -implementations of virtual dependencies (e.g., MPI or BLAS/LAPACK), -or you can make it prefer to build with particular compilers. You can -also tell Spack to use *external* software installations already -present on your system. - -At a high level, the ``packages.yaml`` file is structured like this: - -.. code-block:: yaml - - packages: - package1: - # settings for package1 - package2: - # settings for package2 - # ... - all: - # settings that apply to all packages. - -So you can either set build preferences specifically for *one* package, -or you can specify that certain settings should apply to *all* packages. -The types of settings you can customize are described in detail below. - -Spack's build defaults are in the default -``etc/spack/defaults/packages.yaml`` file. You can override them in -``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more -details on how this works, see :ref:`configuration-scopes`. - -.. _sec-external-packages: - ------------------ -External Packages ------------------ - -Spack can be configured to use externally-installed -packages rather than building its own packages. This may be desirable -if machines ship with system packages, such as a customized MPI -that should be used instead of Spack building its own MPI. - -External packages are configured through the ``packages.yaml`` file. -Here's an example of an external configuration: - -.. code-block:: yaml - - packages: - openmpi: - externals: - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.4.3 - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" - prefix: /opt/openmpi-1.4.3-debug - - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.6.5-intel - -This example lists three installations of OpenMPI, one built with GCC, -one built with GCC and debug information, and another built with Intel. -If Spack is asked to build a package that uses one of these MPIs as a -dependency, it will use the pre-installed OpenMPI in -the given directory. Note that the specified path is the top-level -install prefix, not the ``bin`` subdirectory. - -``packages.yaml`` can also be used to specify modules to load instead -of the installation prefixes. The following example says that module -``CMake/3.7.2`` provides cmake version 3.7.2. - -.. code-block:: yaml - - cmake: - externals: - - spec: cmake@3.7.2 - modules: - - CMake/3.7.2 - -Each ``packages.yaml`` begins with a ``packages:`` attribute, followed -by a list of package names. To specify externals, add an ``externals:`` -attribute under the package name, which lists externals. -Each external should specify a ``spec:`` string that should be as -well-defined as reasonably possible. If a -package lacks a spec component, such as missing a compiler or -package version, then Spack will guess the missing component based -on its most-favored packages, and it may guess incorrectly. - -Each package version and compiler listed in an external should -have entries in Spack's packages and compiler configuration, even -though the package and compiler may not ever be built. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Prevent packages from being built from sources -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Adding an external spec in ``packages.yaml`` allows Spack to use an external location, -but it does not prevent Spack from building packages from sources. In the above example, -Spack might choose for many valid reasons to start building and linking with the -latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions. - -To prevent this, the ``packages.yaml`` configuration also allows packages -to be flagged as non-buildable. The previous example could be modified to -be: - -.. code-block:: yaml - - packages: - openmpi: - externals: - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.4.3 - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" - prefix: /opt/openmpi-1.4.3-debug - - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.6.5-intel - buildable: False - -The addition of the ``buildable`` flag tells Spack that it should never build -its own version of OpenMPI from sources, and it will instead always rely on a pre-built -OpenMPI. - -.. note:: - - If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag) - pre-built specs include specs already available from a local store, an upstream store, a registered - buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only - external specs in ``packages.yaml`` are included in the list of pre-built specs. - -If an external module is specified as not buildable, then Spack will load the -external module into the build environment which can be used for linking. - -The ``buildable`` does not need to be paired with external packages. -It could also be used alone to forbid packages that may be -buggy or otherwise undesirable. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Non-buildable virtual packages -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Virtual packages in Spack can also be specified as not buildable, and -external implementations can be provided. In the example above, -OpenMPI is configured as not buildable, but Spack will often prefer -other MPI implementations over the externally available OpenMPI. Spack -can be configured with every MPI provider not buildable individually, -but more conveniently: - -.. code-block:: yaml - - packages: - mpi: - buildable: False - openmpi: - externals: - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.4.3 - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" - prefix: /opt/openmpi-1.4.3-debug - - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.6.5-intel - -Spack can then use any of the listed external implementations of MPI -to satisfy a dependency, and will choose depending on the compiler and -architecture. - -In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers -(available via stores or buildcaches) are not wanted, Spack can be configured to require -specs matching only the available externals: - -.. code-block:: yaml - - packages: - mpi: - buildable: False - require: - - one_of: [ - "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64", - "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug", - "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" - ] - openmpi: - externals: - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.4.3 - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" - prefix: /opt/openmpi-1.4.3-debug - - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.6.5-intel - -This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused, -unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see -:ref:`package-requirements`. - -.. _cmd-spack-external-find: - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Automatically Find External Packages -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -You can run the :ref:`spack external find ` command -to search for system-provided packages and add them to ``packages.yaml``. -After running this command your ``packages.yaml`` may include new entries: - -.. code-block:: yaml - - packages: - cmake: - externals: - - spec: cmake@3.17.2 - prefix: /usr - -Generally this is useful for detecting a small set of commonly-used packages; -for now this is generally limited to finding build-only dependencies. -Specific limitations include: - -* Packages are not discoverable by default: For a package to be - discoverable with ``spack external find``, it needs to add special - logic. See :ref:`here ` for more details. -* The logic does not search through module files, it can only detect - packages with executables defined in ``PATH``; you can help Spack locate - externals which use module files by loading any associated modules for - packages that you want Spack to know about before running - ``spack external find``. -* Spack does not overwrite existing entries in the package configuration: - If there is an external defined for a spec at any configuration scope, - then Spack will not add a new external entry (``spack config blame packages`` - can help locate all external entries). - -.. _package-requirements: - --------------------- -Package Requirements --------------------- - -Spack can be configured to always use certain compilers, package -versions, and variants during concretization through package -requirements. - -Package requirements are useful when you find yourself repeatedly -specifying the same constraints on the command line, and wish that -Spack respects these constraints whether you mention them explicitly -or not. Another use case is specifying constraints that should apply -to all root specs in an environment, without having to repeat the -constraint everywhere. - -Apart from that, requirements config is more flexible than constraints -on the command line, because it can specify constraints on packages -*when they occur* as a dependency. In contrast, on the command line it -is not possible to specify constraints on dependencies while also keeping -those dependencies optional. - -^^^^^^^^^^^^^^^^^^^ -Requirements syntax -^^^^^^^^^^^^^^^^^^^ - -The package requirements configuration is specified in ``packages.yaml``, -keyed by package name and expressed using the Spec syntax. In the simplest -case you can specify attributes that you always want the package to have -by providing a single spec string to ``require``: - -.. code-block:: yaml - - packages: - libfabric: - require: "@1.13.2" - -In the above example, ``libfabric`` will always build with version 1.13.2. If you -need to compose multiple configuration scopes ``require`` accepts a list of -strings: - -.. code-block:: yaml - - packages: - libfabric: - require: - - "@1.13.2" - - "%gcc" - -In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC -as a compiler. - -For more complex use cases, require accepts also a list of objects. These objects -must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings, -and they can optionally have a ``when`` and a ``message`` attribute: - -.. code-block:: yaml - - packages: - openmpi: - require: - - any_of: ["@4.1.5", "%gcc"] - message: "in this example only 4.1.5 can build with other compilers" - -``any_of`` is a list of specs. One of those specs must be satisfied -and it is also allowed for the concretized spec to match more than one. -In the above example, that means you could build ``openmpi@4.1.5%gcc``, -``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but -not ``openmpi@3.9%clang``. - -If a custom message is provided, and the requirement is not satisfiable, -Spack will print the custom error message: - -.. code-block:: console - - $ spack spec openmpi@3.9%clang - ==> Error: in this example only 4.1.5 can build with other compilers - -We could express a similar requirement using the ``when`` attribute: - -.. code-block:: yaml - - packages: - openmpi: - require: - - any_of: ["%gcc"] - when: "@:4.1.4" - message: "in this example only 4.1.5 can build with other compilers" - -In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC. -For readability, Spack also allows a ``spec`` key accepting a string when there is only a single -constraint: - -.. code-block:: yaml - - packages: - openmpi: - require: - - spec: "%gcc" - when: "@:4.1.4" - message: "in this example only 4.1.5 can build with other compilers" - -This code snippet and the one before it are semantically equivalent. - -Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final -concretized spec must match one and only one of them: - -.. code-block:: yaml - - packages: - mpich: - require: - - one_of: ["+cuda", "+rocm"] - -In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``. - -.. note:: - - For ``any_of`` and ``one_of``, the order of specs indicates a - preference: items that appear earlier in the list are preferred - (note that these preferences can be ignored in favor of others). - -.. note:: - - When using a conditional requirement, Spack is allowed to actively avoid the triggering - condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in - the optimization criteria. To check the current optimization criteria and their - priorities you can run ``spack solve zlib``. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Setting default requirements -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -You can also set default requirements for all packages under ``all`` -like this: - -.. code-block:: yaml - - packages: - all: - require: '%clang' - -which means every spec will be required to use ``clang`` as a compiler. - -Note that in this case ``all`` represents a *default set of requirements* - -if there are specific package requirements, then the default requirements -under ``all`` are disregarded. For example, with a configuration like this: - -.. code-block:: yaml - - packages: - all: - require: '%clang' - cmake: - require: '%gcc' - -Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake`` -dependencies) to use ``clang``. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Setting requirements on virtual specs -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -A requirement on a virtual spec applies whenever that virtual is present in the DAG. -This can be useful for fixing which virtual provider you want to use: - -.. code-block:: yaml - - packages: - mpi: - require: 'mvapich2 %gcc' - -With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``. - -Requirements on the virtual spec and on the specific provider are both applied, if -present. For instance with a configuration like: - -.. code-block:: yaml - - packages: - mpi: - require: 'mvapich2 %gcc' - mvapich2: - require: '~cuda' - -you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider. - -.. _package-preferences: - -------------------- -Package Preferences -------------------- - -In some cases package requirements can be too strong, and package -preferences are the better option. Package preferences do not impose -constraints on packages for particular versions or variants values, -they rather only set defaults -- the concretizer is free to change -them if it must due to other constraints. Also note that package -preferences are of lower priority than reuse of already installed -packages. - -Here's an example ``packages.yaml`` file that sets preferred packages: - -.. code-block:: yaml - - packages: - opencv: - compiler: [gcc@4.9] - variants: +debug - gperftools: - version: [2.2, 2.4, 2.3] - all: - compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi] - target: [sandybridge] - providers: - mpi: [mvapich2, mpich, openmpi] - -At a high level, this example is specifying how packages are preferably -concretized. The opencv package should prefer using GCC 4.9 and -be built with debug options. The gperftools package should prefer version -2.2 over 2.4. Every package on the system should prefer mvapich2 for -its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9). -These options are used to fill in implicit defaults. Any of them can be overwritten -on the command line if explicitly requested. - -Package preferences accept the follow keys or components under -the specific package (or ``all``) section: ``compiler``, ``variants``, -``version``, ``providers``, and ``target``. Each component has an -ordered list of spec ``constraints``, with earlier entries in the -list being preferred over later entries. - -Sometimes a package installation may have constraints that forbid -the first concretization rule, in which case Spack will use the first -legal concretization rule. Going back to the example, if a user -requests gperftools 2.3 or later, then Spack will install version 2.4 -as the 2.4 version of gperftools is preferred over 2.3. - -An explicit concretization rule in the preferred section will always -take preference over unlisted concretizations. In the above example, -xlc isn't listed in the compiler list. Every listed compiler from -gcc to pgi will thus be preferred over the xlc compiler. - -The syntax for the ``provider`` section differs slightly from other -concretization rules. A provider lists a value that packages may -``depends_on`` (e.g, MPI) and a list of rules for fulfilling that -dependency. - -.. _package_permissions: - -------------------- -Package Permissions -------------------- - -Spack can be configured to assign permissions to the files installed -by a package. - -In the ``packages.yaml`` file under ``permissions``, the attributes -``read``, ``write``, and ``group`` control the package -permissions. These attributes can be set per-package, or for all -packages under ``all``. If permissions are set under ``all`` and for a -specific package, the package-specific settings take precedence. - -The ``read`` and ``write`` attributes take one of ``user``, ``group``, -and ``world``. - -.. code-block:: yaml - - packages: - all: - permissions: - write: group - group: spack - my_app: - permissions: - read: group - group: my_team - -The permissions settings describe the broadest level of access to -installations of the specified packages. The execute permissions of -the file are set to the same level as read permissions for those files -that are executable. The default setting for ``read`` is ``world``, -and for ``write`` is ``user``. In the example above, installations of -``my_app`` will be installed with user and group permissions but no -world permissions, and owned by the group ``my_team``. All other -packages will be installed with user and group write privileges, and -world read privileges. Those packages will be owned by the group -``spack``. - -The ``group`` attribute assigns a Unix-style group to a package. All -files installed by the package will be owned by the assigned group, -and the sticky group bit will be set on the install prefix and all -directories inside the install prefix. This will ensure that even -manually placed files within the install prefix are owned by the -assigned group. If no group is assigned, Spack will allow the OS -default behavior to go as expected. - ----------------------------- -Assigning Package Attributes ----------------------------- - -You can assign class-level attributes in the configuration: - -.. code-block:: yaml - - packages: - mpileaks: - # Override existing attributes - url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz - # ... or add new ones - x: 1 - -Attributes set this way will be accessible to any method executed -in the package.py file (e.g. the ``install()`` method). Values for these -attributes may be any value parseable by yaml. - -These can only be applied to specific packages, not "all" or -virtual packages. diff --git a/lib/spack/docs/build_systems/intelpackage.rst b/lib/spack/docs/build_systems/intelpackage.rst index d64fd469712299..9afe1a8b919543 100644 --- a/lib/spack/docs/build_systems/intelpackage.rst +++ b/lib/spack/docs/build_systems/intelpackage.rst @@ -392,7 +392,7 @@ See section :ref:`Configuration Scopes ` for an explanation about the different files and section -:ref:`Build customization ` +:ref:`Build customization ` for specifics and examples for ``packages.yaml`` files. .. If your system administrator did not provide modules for pre-installed Intel diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst index 7026825fa8b3d0..f79f300f4c7f3f 100644 --- a/lib/spack/docs/configuration.rst +++ b/lib/spack/docs/configuration.rst @@ -17,7 +17,7 @@ case you want to skip directly to specific docs: * :ref:`config.yaml ` * :ref:`mirrors.yaml ` * :ref:`modules.yaml ` -* :ref:`packages.yaml ` +* :ref:`packages.yaml ` * :ref:`repos.yaml ` You can also add any of these as inline configuration in the YAML @@ -243,9 +243,11 @@ lower-precedence settings. Completely ignoring higher-level configuration options is supported with the ``::`` notation for keys (see :ref:`config-overrides` below). -There are also special notations for string concatenation and precendense override. -Using the ``+:`` notation can be used to force *prepending* strings or lists. For lists, this is identical -to the default behavior. Using the ``-:`` works similarly, but for *appending* values. +There are also special notations for string concatenation and precendense override: + +* ``+:`` will force *prepending* strings or lists. For lists, this is the default behavior. +* ``-:`` works similarly, but for *appending* values. + :ref:`config-prepend-append` ^^^^^^^^^^^ diff --git a/lib/spack/docs/frequently_asked_questions.rst b/lib/spack/docs/frequently_asked_questions.rst new file mode 100644 index 00000000000000..345fa1a81a816a --- /dev/null +++ b/lib/spack/docs/frequently_asked_questions.rst @@ -0,0 +1,77 @@ +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other + Spack Project Developers. See the top-level COPYRIGHT file for details. + + SPDX-License-Identifier: (Apache-2.0 OR MIT) + +========================== +Frequently Asked Questions +========================== + +This page contains answers to frequently asked questions about Spack. +If you have questions that are not answered here, feel free to ask on +`Slack `_ or `GitHub Discussions +`_. If you've learned the +answer to a question that you think should be here, please consider +contributing to this page. + +.. _faq-concretizer-precedence: + +----------------------------------------------------- +Why does Spack pick particular versions and variants? +----------------------------------------------------- + +This question comes up in a variety of forms: + + 1. Why does Spack seem to ignore my package preferences from ``packages.yaml`` config? + 2. Why does Spack toggle a variant instead of using the default from the ``package.py`` file? + +The short answer is that Spack always picks an optimal configuration +based on a complex set of criteria\ [#f1]_. These criteria are more nuanced +than always choosing the latest versions or default variants. + +.. note:: + + As a rule of thumb: requirements + constraints > reuse > preferences > defaults. + +The following set of criteria (from lowest to highest precedence) explain +common cases where concretization output may seem surprising at first. + +1. :ref:`Package preferences ` configured in ``packages.yaml`` + override variant defaults from ``package.py`` files, and influence the optimal + ordering of versions. Preferences are specified as follows: + + .. code-block:: yaml + + packages: + foo: + version: [1.0, 1.1] + variants: ~mpi + +2. :ref:`Reuse concretization ` configured in ``concretizer.yaml`` + overrides preferences, since it's typically faster to reuse an existing spec than to + build a preferred one from sources. When build caches are enabled, specs may be reused + from a remote location too. Reuse concretization is configured as follows: + + .. code-block:: yaml + + concretizer: + reuse: dependencies # other options are 'true' and 'false' + +3. :ref:`Package requirements ` configured in ``packages.yaml``, + and constraints from the command line as well as ``package.py`` files override all + of the above. Requirements are specified as follows: + + .. code-block:: yaml + + packages: + foo: + require: + - "@1.2: +mpi" + +Requirements and constraints restrict the set of possible solutions, while reuse +behavior and preferences influence what an optimal solution looks like. + + +.. rubric:: Footnotes + +.. [#f1] The exact list of criteria can be retrieved with the ``spack solve`` command diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index 0dd27a2444516a..7607181ada5947 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -55,6 +55,7 @@ or refer to the full manual below. getting_started basic_usage replace_conda_homebrew + frequently_asked_questions .. toctree:: :maxdepth: 2 @@ -70,7 +71,7 @@ or refer to the full manual below. configuration config_yaml - bootstrapping + packages_yaml build_settings environments containers @@ -78,6 +79,7 @@ or refer to the full manual below. module_file_support repositories binary_caches + bootstrapping command_index chain extensions diff --git a/lib/spack/docs/packages_yaml.rst b/lib/spack/docs/packages_yaml.rst new file mode 100644 index 00000000000000..e08f51e612a318 --- /dev/null +++ b/lib/spack/docs/packages_yaml.rst @@ -0,0 +1,559 @@ +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other + Spack Project Developers. See the top-level COPYRIGHT file for details. + + SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +.. _packages-config: + +================================ +Package Settings (packages.yaml) +================================ + +Spack allows you to customize how your software is built through the +``packages.yaml`` file. Using it, you can make Spack prefer particular +implementations of virtual dependencies (e.g., MPI or BLAS/LAPACK), +or you can make it prefer to build with particular compilers. You can +also tell Spack to use *external* software installations already +present on your system. + +At a high level, the ``packages.yaml`` file is structured like this: + +.. code-block:: yaml + + packages: + package1: + # settings for package1 + package2: + # settings for package2 + # ... + all: + # settings that apply to all packages. + +So you can either set build preferences specifically for *one* package, +or you can specify that certain settings should apply to *all* packages. +The types of settings you can customize are described in detail below. + +Spack's build defaults are in the default +``etc/spack/defaults/packages.yaml`` file. You can override them in +``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more +details on how this works, see :ref:`configuration-scopes`. + +.. _sec-external-packages: + +----------------- +External Packages +----------------- + +Spack can be configured to use externally-installed +packages rather than building its own packages. This may be desirable +if machines ship with system packages, such as a customized MPI +that should be used instead of Spack building its own MPI. + +External packages are configured through the ``packages.yaml`` file. +Here's an example of an external configuration: + +.. code-block:: yaml + + packages: + openmpi: + externals: + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.4.3 + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" + prefix: /opt/openmpi-1.4.3-debug + - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.6.5-intel + +This example lists three installations of OpenMPI, one built with GCC, +one built with GCC and debug information, and another built with Intel. +If Spack is asked to build a package that uses one of these MPIs as a +dependency, it will use the pre-installed OpenMPI in +the given directory. Note that the specified path is the top-level +install prefix, not the ``bin`` subdirectory. + +``packages.yaml`` can also be used to specify modules to load instead +of the installation prefixes. The following example says that module +``CMake/3.7.2`` provides cmake version 3.7.2. + +.. code-block:: yaml + + cmake: + externals: + - spec: cmake@3.7.2 + modules: + - CMake/3.7.2 + +Each ``packages.yaml`` begins with a ``packages:`` attribute, followed +by a list of package names. To specify externals, add an ``externals:`` +attribute under the package name, which lists externals. +Each external should specify a ``spec:`` string that should be as +well-defined as reasonably possible. If a +package lacks a spec component, such as missing a compiler or +package version, then Spack will guess the missing component based +on its most-favored packages, and it may guess incorrectly. + +Each package version and compiler listed in an external should +have entries in Spack's packages and compiler configuration, even +though the package and compiler may not ever be built. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Prevent packages from being built from sources +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Adding an external spec in ``packages.yaml`` allows Spack to use an external location, +but it does not prevent Spack from building packages from sources. In the above example, +Spack might choose for many valid reasons to start building and linking with the +latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions. + +To prevent this, the ``packages.yaml`` configuration also allows packages +to be flagged as non-buildable. The previous example could be modified to +be: + +.. code-block:: yaml + + packages: + openmpi: + externals: + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.4.3 + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" + prefix: /opt/openmpi-1.4.3-debug + - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.6.5-intel + buildable: False + +The addition of the ``buildable`` flag tells Spack that it should never build +its own version of OpenMPI from sources, and it will instead always rely on a pre-built +OpenMPI. + +.. note:: + + If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag) + pre-built specs include specs already available from a local store, an upstream store, a registered + buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only + external specs in ``packages.yaml`` are included in the list of pre-built specs. + +If an external module is specified as not buildable, then Spack will load the +external module into the build environment which can be used for linking. + +The ``buildable`` does not need to be paired with external packages. +It could also be used alone to forbid packages that may be +buggy or otherwise undesirable. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Non-buildable virtual packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Virtual packages in Spack can also be specified as not buildable, and +external implementations can be provided. In the example above, +OpenMPI is configured as not buildable, but Spack will often prefer +other MPI implementations over the externally available OpenMPI. Spack +can be configured with every MPI provider not buildable individually, +but more conveniently: + +.. code-block:: yaml + + packages: + mpi: + buildable: False + openmpi: + externals: + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.4.3 + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" + prefix: /opt/openmpi-1.4.3-debug + - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.6.5-intel + +Spack can then use any of the listed external implementations of MPI +to satisfy a dependency, and will choose depending on the compiler and +architecture. + +In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers +(available via stores or buildcaches) are not wanted, Spack can be configured to require +specs matching only the available externals: + +.. code-block:: yaml + + packages: + mpi: + buildable: False + require: + - one_of: [ + "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64", + "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug", + "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" + ] + openmpi: + externals: + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.4.3 + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" + prefix: /opt/openmpi-1.4.3-debug + - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.6.5-intel + +This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused, +unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see +:ref:`package-requirements`. + +.. _cmd-spack-external-find: + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Automatically Find External Packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can run the :ref:`spack external find ` command +to search for system-provided packages and add them to ``packages.yaml``. +After running this command your ``packages.yaml`` may include new entries: + +.. code-block:: yaml + + packages: + cmake: + externals: + - spec: cmake@3.17.2 + prefix: /usr + +Generally this is useful for detecting a small set of commonly-used packages; +for now this is generally limited to finding build-only dependencies. +Specific limitations include: + +* Packages are not discoverable by default: For a package to be + discoverable with ``spack external find``, it needs to add special + logic. See :ref:`here ` for more details. +* The logic does not search through module files, it can only detect + packages with executables defined in ``PATH``; you can help Spack locate + externals which use module files by loading any associated modules for + packages that you want Spack to know about before running + ``spack external find``. +* Spack does not overwrite existing entries in the package configuration: + If there is an external defined for a spec at any configuration scope, + then Spack will not add a new external entry (``spack config blame packages`` + can help locate all external entries). + +.. _package-requirements: + +-------------------- +Package Requirements +-------------------- + +Spack can be configured to always use certain compilers, package +versions, and variants during concretization through package +requirements. + +Package requirements are useful when you find yourself repeatedly +specifying the same constraints on the command line, and wish that +Spack respects these constraints whether you mention them explicitly +or not. Another use case is specifying constraints that should apply +to all root specs in an environment, without having to repeat the +constraint everywhere. + +Apart from that, requirements config is more flexible than constraints +on the command line, because it can specify constraints on packages +*when they occur* as a dependency. In contrast, on the command line it +is not possible to specify constraints on dependencies while also keeping +those dependencies optional. + +.. seealso:: + + FAQ: :ref:`Why does Spack pick particular versions and variants? ` + + +^^^^^^^^^^^^^^^^^^^ +Requirements syntax +^^^^^^^^^^^^^^^^^^^ + +The package requirements configuration is specified in ``packages.yaml``, +keyed by package name and expressed using the Spec syntax. In the simplest +case you can specify attributes that you always want the package to have +by providing a single spec string to ``require``: + +.. code-block:: yaml + + packages: + libfabric: + require: "@1.13.2" + +In the above example, ``libfabric`` will always build with version 1.13.2. If you +need to compose multiple configuration scopes ``require`` accepts a list of +strings: + +.. code-block:: yaml + + packages: + libfabric: + require: + - "@1.13.2" + - "%gcc" + +In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC +as a compiler. + +For more complex use cases, require accepts also a list of objects. These objects +must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings, +and they can optionally have a ``when`` and a ``message`` attribute: + +.. code-block:: yaml + + packages: + openmpi: + require: + - any_of: ["@4.1.5", "%gcc"] + message: "in this example only 4.1.5 can build with other compilers" + +``any_of`` is a list of specs. One of those specs must be satisfied +and it is also allowed for the concretized spec to match more than one. +In the above example, that means you could build ``openmpi@4.1.5%gcc``, +``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but +not ``openmpi@3.9%clang``. + +If a custom message is provided, and the requirement is not satisfiable, +Spack will print the custom error message: + +.. code-block:: console + + $ spack spec openmpi@3.9%clang + ==> Error: in this example only 4.1.5 can build with other compilers + +We could express a similar requirement using the ``when`` attribute: + +.. code-block:: yaml + + packages: + openmpi: + require: + - any_of: ["%gcc"] + when: "@:4.1.4" + message: "in this example only 4.1.5 can build with other compilers" + +In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC. +For readability, Spack also allows a ``spec`` key accepting a string when there is only a single +constraint: + +.. code-block:: yaml + + packages: + openmpi: + require: + - spec: "%gcc" + when: "@:4.1.4" + message: "in this example only 4.1.5 can build with other compilers" + +This code snippet and the one before it are semantically equivalent. + +Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final +concretized spec must match one and only one of them: + +.. code-block:: yaml + + packages: + mpich: + require: + - one_of: ["+cuda", "+rocm"] + +In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``. + +.. note:: + + For ``any_of`` and ``one_of``, the order of specs indicates a + preference: items that appear earlier in the list are preferred + (note that these preferences can be ignored in favor of others). + +.. note:: + + When using a conditional requirement, Spack is allowed to actively avoid the triggering + condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in + the optimization criteria. To check the current optimization criteria and their + priorities you can run ``spack solve zlib``. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Setting default requirements +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can also set default requirements for all packages under ``all`` +like this: + +.. code-block:: yaml + + packages: + all: + require: '%clang' + +which means every spec will be required to use ``clang`` as a compiler. + +Note that in this case ``all`` represents a *default set of requirements* - +if there are specific package requirements, then the default requirements +under ``all`` are disregarded. For example, with a configuration like this: + +.. code-block:: yaml + + packages: + all: + require: '%clang' + cmake: + require: '%gcc' + +Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake`` +dependencies) to use ``clang``. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Setting requirements on virtual specs +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A requirement on a virtual spec applies whenever that virtual is present in the DAG. +This can be useful for fixing which virtual provider you want to use: + +.. code-block:: yaml + + packages: + mpi: + require: 'mvapich2 %gcc' + +With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``. + +Requirements on the virtual spec and on the specific provider are both applied, if +present. For instance with a configuration like: + +.. code-block:: yaml + + packages: + mpi: + require: 'mvapich2 %gcc' + mvapich2: + require: '~cuda' + +you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider. + +.. _package-preferences: + +------------------- +Package Preferences +------------------- + +In some cases package requirements can be too strong, and package +preferences are the better option. Package preferences do not impose +constraints on packages for particular versions or variants values, +they rather only set defaults. The concretizer is free to change +them if it must, due to other constraints, and also prefers reusing +installed packages over building new ones that are a better match for +preferences. + +.. seealso:: + + FAQ: :ref:`Why does Spack pick particular versions and variants? ` + + +Most package preferences (``compilers``, ``target`` and ``providers``) +can only be set globally under the ``all`` section of ``packages.yaml``: + +.. code-block:: yaml + + packages: + all: + compiler: [gcc@12.2.0, clang@12:, oneapi@2023:] + target: [x86_64_v3] + providers: + mpi: [mvapich2, mpich, openmpi] + +These preferences override Spack's default and effectively reorder priorities +when looking for the best compiler, target or virtual package provider. Each +preference takes an ordered list of spec constraints, with earlier entries in +the list being preferred over later entries. + +In the example above all packages prefer to be compiled with ``gcc@12.2.0``, +to target the ``x86_64_v3`` microarchitecture and to use ``mvapich2`` if they +depend on ``mpi``. + +The ``variants`` and ``version`` preferences can be set under +package specific sections of the ``packages.yaml`` file: + +.. code-block:: yaml + + packages: + opencv: + variants: +debug + gperftools: + version: [2.2, 2.4, 2.3] + +In this case, the preference for ``opencv`` is to build with debug options, while +``gperftools`` prefers version 2.2 over 2.4. + +Any preference can be overwritten on the command line if explicitly requested. + +Preferences cannot overcome explicit constraints, as they only set a preferred +ordering among homogeneous attribute values. Going back to the example, if +``gperftools@2.3:`` was requested, then Spack will install version 2.4 +since the most preferred version 2.2 is prohibited by the version constraint. + +.. _package_permissions: + +------------------- +Package Permissions +------------------- + +Spack can be configured to assign permissions to the files installed +by a package. + +In the ``packages.yaml`` file under ``permissions``, the attributes +``read``, ``write``, and ``group`` control the package +permissions. These attributes can be set per-package, or for all +packages under ``all``. If permissions are set under ``all`` and for a +specific package, the package-specific settings take precedence. + +The ``read`` and ``write`` attributes take one of ``user``, ``group``, +and ``world``. + +.. code-block:: yaml + + packages: + all: + permissions: + write: group + group: spack + my_app: + permissions: + read: group + group: my_team + +The permissions settings describe the broadest level of access to +installations of the specified packages. The execute permissions of +the file are set to the same level as read permissions for those files +that are executable. The default setting for ``read`` is ``world``, +and for ``write`` is ``user``. In the example above, installations of +``my_app`` will be installed with user and group permissions but no +world permissions, and owned by the group ``my_team``. All other +packages will be installed with user and group write privileges, and +world read privileges. Those packages will be owned by the group +``spack``. + +The ``group`` attribute assigns a Unix-style group to a package. All +files installed by the package will be owned by the assigned group, +and the sticky group bit will be set on the install prefix and all +directories inside the install prefix. This will ensure that even +manually placed files within the install prefix are owned by the +assigned group. If no group is assigned, Spack will allow the OS +default behavior to go as expected. + +---------------------------- +Assigning Package Attributes +---------------------------- + +You can assign class-level attributes in the configuration: + +.. code-block:: yaml + + packages: + mpileaks: + # Override existing attributes + url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz + # ... or add new ones + x: 1 + +Attributes set this way will be accessible to any method executed +in the package.py file (e.g. the ``install()`` method). Values for these +attributes may be any value parseable by yaml. + +These can only be applied to specific packages, not "all" or +virtual packages. diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 3dd1c7952d12e7..84046a654e93a4 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2337,7 +2337,7 @@ window while a batch job is running ``spack install`` on the same or overlapping dependencies without any process trying to re-do the work of another. -For example, if you are using SLURM, you could launch an installation +For example, if you are using Slurm, you could launch an installation of ``mpich`` using the following command: .. code-block:: console diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 10e19f093e5eec..f4333b9aaef672 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -5,9 +5,9 @@ sphinx-rtd-theme==1.3.0 python-levenshtein==0.23.0 docutils==0.18.1 pygments==2.16.1 -urllib3==2.0.7 +urllib3==2.1.0 pytest==7.4.3 isort==5.12.0 -black==23.10.1 +black==23.11.0 flake8==6.1.0 -mypy==1.6.1 +mypy==1.7.0 diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py index 23cd44e6044a3f..2e8bf3a4f8b80b 100644 --- a/lib/spack/external/__init__.py +++ b/lib/spack/external/__init__.py @@ -18,7 +18,7 @@ * Homepage: https://pypi.python.org/pypi/archspec * Usage: Labeling, comparison and detection of microarchitectures -* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec) +* Version: 0.2.2 (commit 1dc58a5776dd77e6fc6e4ba5626af5b1fb24996e) astunparse ---------------- diff --git a/lib/spack/external/archspec/__init__.py b/lib/spack/external/archspec/__init__.py index dfad9f3743d526..22a430894b4af0 100644 --- a/lib/spack/external/archspec/__init__.py +++ b/lib/spack/external/archspec/__init__.py @@ -1,2 +1,2 @@ """Init file to avoid namespace packages""" -__version__ = "0.2.1" +__version__ = "0.2.2" diff --git a/lib/spack/external/archspec/json/cpu/microarchitectures.json b/lib/spack/external/archspec/json/cpu/microarchitectures.json index 2ddad29345441b..1e77caba4aea00 100644 --- a/lib/spack/external/archspec/json/cpu/microarchitectures.json +++ b/lib/spack/external/archspec/json/cpu/microarchitectures.json @@ -2318,6 +2318,26 @@ ] } }, + "power10": { + "from": ["power9"], + "vendor": "IBM", + "generation": 10, + "features": [], + "compilers": { + "gcc": [ + { + "versions": "11.1:", + "flags": "-mcpu={name} -mtune={name}" + } + ], + "clang": [ + { + "versions": "11.0:", + "flags": "-mcpu={name} -mtune={name}" + } + ] + } + }, "ppc64le": { "from": [], "vendor": "generic", @@ -2405,6 +2425,29 @@ ] } }, + "power10le": { + "from": ["power9le"], + "vendor": "IBM", + "generation": 10, + "features": [], + "compilers": { + "gcc": [ + { + "name": "power10", + "versions": "11.1:", + "flags": "-mcpu={name} -mtune={name}" + } + ], + "clang": [ + { + "versions": "11.0:", + "family": "ppc64le", + "name": "power10", + "flags": "-mcpu={name} -mtune={name}" + } + ] + } + }, "aarch64": { "from": [], "vendor": "generic", @@ -2592,6 +2635,37 @@ ] } }, + "armv9.0a": { + "from": ["armv8.5a"], + "vendor": "generic", + "features": [], + "compilers": { + "gcc": [ + { + "versions": "12:", + "flags": "-march=armv9-a -mtune=generic" + } + ], + "clang": [ + { + "versions": "14:", + "flags": "-march=armv9-a -mtune=generic" + } + ], + "apple-clang": [ + { + "versions": ":", + "flags": "-march=armv9-a -mtune=generic" + } + ], + "arm": [ + { + "versions": ":", + "flags": "-march=armv9-a -mtune=generic" + } + ] + } + }, "thunderx2": { "from": ["armv8.1a"], "vendor": "Cavium", @@ -2813,8 +2887,12 @@ ], "arm" : [ { - "versions": "20:", + "versions": "20:21.9", "flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto" + }, + { + "versions": "22:", + "flags" : "-mcpu=neoverse-n1" } ], "nvhpc" : [ @@ -2942,7 +3020,7 @@ }, { "versions": "22:", - "flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng" + "flags" : "-mcpu=neoverse-v1" } ], "nvhpc" : [ @@ -2954,6 +3032,126 @@ ] } }, + "neoverse_v2": { + "from": ["neoverse_n1", "armv9.0a"], + "vendor": "ARM", + "features": [ + "fp", + "asimd", + "evtstrm", + "aes", + "pmull", + "sha1", + "sha2", + "crc32", + "atomics", + "fphp", + "asimdhp", + "cpuid", + "asimdrdm", + "jscvt", + "fcma", + "lrcpc", + "dcpop", + "sha3", + "sm3", + "sm4", + "asimddp", + "sha512", + "sve", + "asimdfhm", + "dit", + "uscat", + "ilrcpc", + "flagm", + "ssbs", + "sb", + "paca", + "pacg", + "dcpodp", + "sve2", + "sveaes", + "svepmull", + "svebitperm", + "svesha3", + "svesm4", + "flagm2", + "frint", + "svei8mm", + "svebf16", + "i8mm", + "bf16", + "dgh", + "bti" + ], + "compilers" : { + "gcc": [ + { + "versions": "4.8:5.99", + "flags": "-march=armv8-a" + }, + { + "versions": "6:6.99", + "flags" : "-march=armv8.1-a" + }, + { + "versions": "7.0:7.99", + "flags" : "-march=armv8.2-a -mtune=cortex-a72" + }, + { + "versions": "8.0:8.99", + "flags" : "-march=armv8.4-a+sve -mtune=cortex-a72" + }, + { + "versions": "9.0:9.99", + "flags" : "-march=armv8.5-a+sve -mtune=cortex-a76" + }, + { + "versions": "10.0:11.99", + "flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77" + }, + { + "versions": "12.0:12.99", + "flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710" + }, + { + "versions": "13.0:", + "flags" : "-mcpu=neoverse-v2" + } + ], + "clang" : [ + { + "versions": "9.0:10.99", + "flags" : "-march=armv8.5-a+sve" + }, + { + "versions": "11.0:13.99", + "flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16" + }, + { + "versions": "14.0:15.99", + "flags" : "-march=armv9-a+i8mm+bf16" + }, + { + "versions": "16.0:", + "flags" : "-mcpu=neoverse-v2" + } + ], + "arm" : [ + { + "versions": "23.04.0:", + "flags" : "-mcpu=neoverse-v2" + } + ], + "nvhpc" : [ + { + "versions": "23.3:", + "name": "neoverse-v2", + "flags": "-tp {name}" + } + ] + } + }, "m1": { "from": ["armv8.4a"], "vendor": "Apple", diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 2fd13d0fe38b83..c2fa6aa90e6e39 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -4,7 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) #: PEP440 canonical ... string -__version__ = "0.21.0.dev0" +__version__ = "0.22.0.dev0" spack_version = __version__ diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index 8b13ffc7cf72db..d0a68cf2121316 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -40,6 +40,7 @@ def _search_duplicate_compilers(error_cls): import collections.abc import glob import inspect +import io import itertools import pathlib import pickle @@ -54,6 +55,7 @@ def _search_duplicate_compilers(error_cls): import spack.repo import spack.spec import spack.util.crypto +import spack.util.spack_yaml as syaml import spack.variant #: Map an audit tag to a list of callables implementing checks @@ -250,6 +252,40 @@ def _search_duplicate_specs_in_externals(error_cls): return errors +@config_packages +def _deprecated_preferences(error_cls): + """Search package preferences deprecated in v0.21 (and slated for removal in v0.22)""" + # TODO (v0.22): remove this audit as the attributes will not be allowed in config + errors = [] + packages_yaml = spack.config.CONFIG.get_config("packages") + + def make_error(attribute_name, config_data, summary): + s = io.StringIO() + s.write("Occurring in the following file:\n") + dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name) + syaml.dump_config(dict_view, stream=s, blame=True) + return error_cls(summary=summary, details=[s.getvalue()]) + + if "all" in packages_yaml and "version" in packages_yaml["all"]: + summary = "Using the deprecated 'version' attribute under 'packages:all'" + errors.append(make_error("version", packages_yaml["all"], summary)) + + for package_name in packages_yaml: + if package_name == "all": + continue + + package_conf = packages_yaml[package_name] + for attribute in ("compiler", "providers", "target"): + if attribute not in package_conf: + continue + summary = ( + f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'" + ) + errors.append(make_error(attribute, package_conf, summary)) + + return errors + + #: Sanity checks on package directives package_directives = AuditClass( group="packages", @@ -776,7 +812,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls ) except Exception: summary = ( - "{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}" + "{0}: dependency on {1} cannot be satisfied by known versions of {1.name}" ).format(pkg_name, s) details = ["happening in " + filename] if dependency_pkg_cls is not None: @@ -818,6 +854,53 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls): return errors +@package_directives +def _named_specs_in_when_arguments(pkgs, error_cls): + """Reports named specs in the 'when=' attribute of a directive. + + Note that 'conflicts' is the only directive allowing that. + """ + errors = [] + for pkg_name in pkgs: + pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) + + def _extracts_errors(triggers, summary): + _errors = [] + for trigger in list(triggers): + when_spec = spack.spec.Spec(trigger) + if when_spec.name is not None and when_spec.name != pkg_name: + details = [f"using '{trigger}', should be '^{trigger}'"] + _errors.append(error_cls(summary=summary, details=details)) + return _errors + + for dname, triggers in pkg_cls.dependencies.items(): + summary = f"{pkg_name}: wrong 'when=' condition for the '{dname}' dependency" + errors.extend(_extracts_errors(triggers, summary)) + + for vname, (variant, triggers) in pkg_cls.variants.items(): + summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant" + errors.extend(_extracts_errors(triggers, summary)) + + for provided, triggers in pkg_cls.provided.items(): + summary = f"{pkg_name}: wrong 'when=' condition for the '{provided}' virtual" + errors.extend(_extracts_errors(triggers, summary)) + + for _, triggers in pkg_cls.requirements.items(): + triggers = [when_spec for when_spec, _, _ in triggers] + summary = f"{pkg_name}: wrong 'when=' condition in 'requires' directive" + errors.extend(_extracts_errors(triggers, summary)) + + triggers = list(pkg_cls.patches) + summary = f"{pkg_name}: wrong 'when=' condition in 'patch' directives" + errors.extend(_extracts_errors(triggers, summary)) + + triggers = list(pkg_cls.resources) + summary = f"{pkg_name}: wrong 'when=' condition in 'resource' directives" + errors.extend(_extracts_errors(triggers, summary)) + + return llnl.util.lang.dedupe(errors) + + #: Sanity checks on package directives external_detection = AuditClass( group="externals", diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 6a49ab445e71d1..8cfb891640026c 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -66,8 +66,9 @@ from spack.stage import Stage from spack.util.executable import which -_build_cache_relative_path = "build_cache" -_build_cache_keys_relative_path = "_pgp" +BUILD_CACHE_RELATIVE_PATH = "build_cache" +BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp" +CURRENT_BUILD_CACHE_LAYOUT_VERSION = 1 class BuildCacheDatabase(spack_db.Database): @@ -481,7 +482,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}): scheme = urllib.parse.urlparse(mirror_url).scheme if scheme != "oci" and not web_util.url_exists( - url_util.join(mirror_url, _build_cache_relative_path, "index.json") + url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json") ): return False @@ -600,6 +601,10 @@ def __init__(self, msg): super().__init__(msg) +class InvalidMetadataFile(spack.error.SpackError): + pass + + class UnsignedPackageException(spack.error.SpackError): """ Raised if installation of unsigned package is attempted without @@ -614,11 +619,11 @@ def compute_hash(data): def build_cache_relative_path(): - return _build_cache_relative_path + return BUILD_CACHE_RELATIVE_PATH def build_cache_keys_relative_path(): - return _build_cache_keys_relative_path + return BUILD_CACHE_KEYS_RELATIVE_PATH def build_cache_prefix(prefix): @@ -1401,7 +1406,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option spec_dict = sjson.load(content) else: raise ValueError("{0} not a valid spec file type".format(spec_file)) - spec_dict["buildcache_layout_version"] = 1 + spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum} with open(specfile_path, "w") as outfile: @@ -1560,6 +1565,42 @@ def _delete_staged_downloads(download_result): download_result["specfile_stage"].destroy() +def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, int]: + """Read and validate a spec file, returning the spec dict with its layout version, or raising + InvalidMetadataFile if invalid.""" + try: + with open(path, "rb") as f: + binary_content = f.read() + except OSError: + raise InvalidMetadataFile(f"No such file: {path}") + + # In the future we may support transparently decompressing compressed spec files. + if binary_content[:2] == b"\x1f\x8b": + raise InvalidMetadataFile("Compressed spec files are not supported") + + try: + as_string = binary_content.decode("utf-8") + if path.endswith(".json.sig"): + spec_dict = Spec.extract_json_from_clearsig(as_string) + else: + spec_dict = json.loads(as_string) + except Exception as e: + raise InvalidMetadataFile(f"Could not parse {path} due to: {e}") from e + + # Ensure this version is not too new. + try: + layout_version = int(spec_dict.get("buildcache_layout_version", 0)) + except ValueError as e: + raise InvalidMetadataFile("Could not parse layout version") from e + + if layout_version > max_supported_layout: + raise InvalidMetadataFile( + f"Layout version {layout_version} is too new for this version of Spack" + ) + + return spec_dict, layout_version + + def download_tarball(spec, unsigned=False, mirrors_for_spec=None): """ Download binary tarball for given package into stage area, returning @@ -1652,6 +1693,18 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): try: local_specfile_stage.fetch() local_specfile_stage.check() + try: + _get_valid_spec_file( + local_specfile_stage.save_filename, + CURRENT_BUILD_CACHE_LAYOUT_VERSION, + ) + except InvalidMetadataFile as e: + tty.warn( + f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} " + f"from {mirror} due to invalid metadata file: {e}" + ) + local_specfile_stage.destroy() + continue except Exception: continue local_specfile_stage.cache_local() @@ -1674,14 +1727,26 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): else: ext = "json.sig" if try_signed else "json" - specfile_path = url_util.join(mirror, _build_cache_relative_path, specfile_prefix) + specfile_path = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, specfile_prefix) specfile_url = f"{specfile_path}.{ext}" - spackfile_url = url_util.join(mirror, _build_cache_relative_path, tarball) + spackfile_url = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, tarball) local_specfile_stage = try_fetch(specfile_url) if local_specfile_stage: local_specfile_path = local_specfile_stage.save_filename signature_verified = False + try: + _get_valid_spec_file( + local_specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + except InvalidMetadataFile as e: + tty.warn( + f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} " + f"from {mirror} due to invalid metadata file: {e}" + ) + local_specfile_stage.destroy() + continue + if try_signed and not unsigned: # If we found a signed specfile at the root, try to verify # the signature immediately. We will not download the @@ -2001,24 +2066,16 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti ) specfile_path = download_result["specfile_stage"].save_filename - - with open(specfile_path, "r") as inputfile: - content = inputfile.read() - if specfile_path.endswith(".json.sig"): - spec_dict = Spec.extract_json_from_clearsig(content) - else: - spec_dict = sjson.load(content) - + spec_dict, layout_version = _get_valid_spec_file( + specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) bchecksum = spec_dict["binary_cache_checksum"] filename = download_result["tarball_stage"].save_filename signature_verified = download_result["signature_verified"] tmpdir = None - if ( - "buildcache_layout_version" not in spec_dict - or int(spec_dict["buildcache_layout_version"]) < 1 - ): + if layout_version == 0: # Handle the older buildcache layout where the .spack file # contains a spec json, maybe an .asc file (signature), # and another tarball containing the actual install tree. @@ -2029,7 +2086,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti _delete_staged_downloads(download_result) shutil.rmtree(tmpdir) raise e - else: + elif layout_version == 1: # Newer buildcache layout: the .spack file contains just # in the install tree, the signature, if it exists, is # wrapped around the spec.json at the root. If sig verify @@ -2053,7 +2110,6 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti raise NoChecksumException( tarfile_path, size, contents, "sha256", expected, local_checksum ) - try: with closing(tarfile.open(tarfile_path, "r")) as tar: # Remove install prefix from tarfil to extract directly into spec.prefix @@ -2184,10 +2240,10 @@ def try_direct_fetch(spec, mirrors=None): for mirror in binary_mirrors: buildcache_fetch_url_json = url_util.join( - mirror.fetch_url, _build_cache_relative_path, specfile_name + mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_name ) buildcache_fetch_url_signed_json = url_util.join( - mirror.fetch_url, _build_cache_relative_path, signed_specfile_name + mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, signed_specfile_name ) try: _, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json) @@ -2292,7 +2348,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None): for mirror in mirror_collection.values(): fetch_url = mirror.fetch_url keys_url = url_util.join( - fetch_url, _build_cache_relative_path, _build_cache_keys_relative_path + fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH ) keys_index = url_util.join(keys_url, "index.json") @@ -2357,7 +2413,7 @@ def push_keys(*mirrors, **kwargs): for mirror in mirrors: push_url = getattr(mirror, "push_url", mirror) keys_url = url_util.join( - push_url, _build_cache_relative_path, _build_cache_keys_relative_path + push_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH ) keys_local = url_util.local_file_path(keys_url) @@ -2495,11 +2551,11 @@ def download_buildcache_entry(file_descriptions, mirror_url=None): ) if mirror_url: - mirror_root = os.path.join(mirror_url, _build_cache_relative_path) + mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH) return _download_buildcache_entry(mirror_root, file_descriptions) for mirror in spack.mirror.MirrorCollection(binary=True).values(): - mirror_root = os.path.join(mirror.fetch_url, _build_cache_relative_path) + mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH) if _download_buildcache_entry(mirror_root, file_descriptions): return True @@ -2590,7 +2646,7 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen): def get_remote_hash(self): # Failure to fetch index.json.hash is not fatal - url_index_hash = url_util.join(self.url, _build_cache_relative_path, "index.json.hash") + url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash") try: response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers)) except urllib.error.URLError: @@ -2611,7 +2667,7 @@ def conditional_fetch(self) -> FetchIndexResult: return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) # Otherwise, download index.json - url_index = url_util.join(self.url, _build_cache_relative_path, "index.json") + url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json") try: response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) @@ -2655,7 +2711,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen): def conditional_fetch(self) -> FetchIndexResult: # Just do a conditional fetch immediately - url = url_util.join(self.url, _build_cache_relative_path, "index.json") + url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json") headers = { "User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": '"{}"'.format(self.etag), diff --git a/lib/spack/spack/bootstrap/_common.py b/lib/spack/spack/bootstrap/_common.py index 69f32d62639d81..0b8192f77f807d 100644 --- a/lib/spack/spack/bootstrap/_common.py +++ b/lib/spack/spack/bootstrap/_common.py @@ -213,7 +213,8 @@ def _root_spec(spec_str: str) -> str: if str(spack.platforms.host()) == "darwin": spec_str += " %apple-clang" elif str(spack.platforms.host()) == "windows": - spec_str += " %msvc" + # TODO (johnwparent): Remove version constraint when clingo patch is up + spec_str += " %msvc@:19.37" else: spec_str += " %gcc" diff --git a/lib/spack/spack/bootstrap/config.py b/lib/spack/spack/bootstrap/config.py index e38c5669d993a8..6786bc0d3ead00 100644 --- a/lib/spack/spack/bootstrap/config.py +++ b/lib/spack/spack/bootstrap/config.py @@ -143,7 +143,9 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]: def _add_compilers_if_missing() -> None: arch = spack.spec.ArchSpec.frontend_arch() if not spack.compilers.compilers_for_arch(arch): - new_compilers = spack.compilers.find_new_compilers() + new_compilers = spack.compilers.find_new_compilers( + mixed_toolchain=sys.platform == "darwin" + ) if new_compilers: spack.compilers.add_compilers_to_config(new_compilers, init_config=False) diff --git a/lib/spack/spack/build_systems/cached_cmake.py b/lib/spack/spack/build_systems/cached_cmake.py index d85c2b7e199352..74304f1dc63293 100644 --- a/lib/spack/spack/build_systems/cached_cmake.py +++ b/lib/spack/spack/build_systems/cached_cmake.py @@ -34,6 +34,11 @@ def cmake_cache_option(name, boolean_value, comment="", force=False): return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str) +def cmake_cache_filepath(name, value, comment=""): + """Generate a string for a cmake cache variable of type FILEPATH""" + return 'set({0} "{1}" CACHE FILEPATH "{2}")\n'.format(name, value, comment) + + class CachedCMakeBuilder(CMakeBuilder): #: Phases of a Cached CMake package #: Note: the initconfig phase is used for developer builds as a final phase to stop on @@ -257,6 +262,15 @@ def initconfig_hardware_entries(self): entries.append( cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc)) ) + llvm_bin = spec["llvm-amdgpu"].prefix.bin + llvm_prefix = spec["llvm-amdgpu"].prefix + # Some ROCm systems seem to point to //rocm-/ and + # others point to //rocm-/llvm + if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm": + llvm_bin = os.path.join(llvm_prefix, "llvm/bin/") + entries.append( + cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++")) + ) archs = self.spec.variants["amdgpu_target"].value if archs[0] != "none": arch_str = ";".join(archs) @@ -277,7 +291,7 @@ def std_initconfig_entries(self): "#------------------{0}".format("-" * 60), "# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path), "#------------------{0}\n".format("-" * 60), - cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path), + cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path), self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"), ] diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py index 1961eb312cee3c..f90312f5796e15 100644 --- a/lib/spack/spack/build_systems/oneapi.py +++ b/lib/spack/spack/build_systems/oneapi.py @@ -9,11 +9,10 @@ import shutil from os.path import basename, dirname, isdir -from llnl.util.filesystem import find_headers, find_libraries, join_path +from llnl.util.filesystem import find_headers, find_libraries, join_path, mkdirp from llnl.util.link_tree import LinkTree from spack.directives import conflicts, variant -from spack.package import mkdirp from spack.util.environment import EnvironmentModifications from spack.util.executable import Executable @@ -180,6 +179,35 @@ def libs(self): return find_libraries("*", root=lib_path, shared=True, recursive=True) +class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage): + """Base class for Intel oneAPI library packages with SDK components. + + Contains some convenient default implementations for libraries + that expose functionality in sdk subdirectories. + Implement the method directly in the package if something + different is needed. + + """ + + @property + def include(self): + return join_path(self.component_prefix, "sdk", "include") + + @property + def headers(self): + return find_headers("*", self.include, recursive=True) + + @property + def lib(self): + lib_path = join_path(self.component_prefix, "sdk", "lib64") + lib_path = lib_path if isdir(lib_path) else dirname(lib_path) + return lib_path + + @property + def libs(self): + return find_libraries("*", root=self.lib, shared=True, recursive=True) + + class IntelOneApiStaticLibraryList: """Provides ld_flags when static linking is needed @@ -212,3 +240,7 @@ def link_flags(self): @property def ld_flags(self): return "{0} {1}".format(self.search_flags, self.link_flags) + + +#: Tuple of Intel math libraries, exported to packages +INTEL_MATH_LIBRARIES = ("intel-mkl", "intel-oneapi-mkl", "intel-parallel-studio") diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index afad3b7a45197e..ac308045742f9b 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -46,7 +46,22 @@ from spack.reporters import CDash, CDashConfiguration from spack.reporters.cdash import build_stamp as cdash_build_stamp -JOB_RETRY_CONDITIONS = ["always"] +# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions +JOB_RETRY_CONDITIONS = [ + # "always", + "unknown_failure", + "script_failure", + "api_failure", + "stuck_or_timeout_failure", + "runner_system_failure", + "runner_unsupported", + "stale_schedule", + # "job_execution_timeout", + "archived_failure", + "unmet_prerequisites", + "scheduler_failure", + "data_integrity_failure", +] TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror" SPACK_RESERVED_TAGS = ["public", "protected", "notary"] diff --git a/lib/spack/spack/cmd/audit.py b/lib/spack/spack/cmd/audit.py index 86eea9f7bc8b9a..58d7a5362cf56d 100644 --- a/lib/spack/spack/cmd/audit.py +++ b/lib/spack/spack/cmd/audit.py @@ -2,6 +2,8 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import warnings + import llnl.util.tty as tty import llnl.util.tty.colify import llnl.util.tty.color as cl @@ -52,8 +54,10 @@ def setup_parser(subparser): def configs(parser, args): - reports = spack.audit.run_group(args.subcommand) - _process_reports(reports) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + reports = spack.audit.run_group(args.subcommand) + _process_reports(reports) def packages(parser, args): diff --git a/lib/spack/spack/cmd/common/confirmation.py b/lib/spack/spack/cmd/common/confirmation.py new file mode 100644 index 00000000000000..8a5cd2592b44e9 --- /dev/null +++ b/lib/spack/spack/cmd/common/confirmation.py @@ -0,0 +1,30 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import sys +from typing import List + +import llnl.util.tty as tty + +import spack.cmd + +display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4} + + +def confirm_action(specs: List[spack.spec.Spec], participle: str, noun: str): + """Display the list of specs to be acted on and ask for confirmation. + + Args: + specs: specs to be removed + participle: action expressed as a participle, e.g. "uninstalled" + noun: action expressed as a noun, e.g. "uninstallation" + """ + tty.msg(f"The following {len(specs)} packages will be {participle}:\n") + spack.cmd.display_specs(specs, **display_args) + print("") + answer = tty.get_yes_or_no("Do you want to proceed?", default=False) + if not answer: + tty.msg(f"Aborting {noun}") + sys.exit(0) diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index 07006afc2cc8f0..76eb8d31508a67 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -31,6 +31,19 @@ def setup_parser(subparser): aliases=["add"], help="search the system for compilers to add to Spack configuration", ) + mixed_toolchain_group = find_parser.add_mutually_exclusive_group() + mixed_toolchain_group.add_argument( + "--mixed-toolchain", + action="store_true", + default=sys.platform == "darwin", + help="Allow mixed toolchains (for example: clang, clang++, gfortran)", + ) + mixed_toolchain_group.add_argument( + "--no-mixed-toolchain", + action="store_false", + dest="mixed_toolchain", + help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)", + ) find_parser.add_argument("add_paths", nargs=argparse.REMAINDER) find_parser.add_argument( "--scope", @@ -86,7 +99,9 @@ def compiler_find(args): # Below scope=None because we want new compilers that don't appear # in any other configuration. - new_compilers = spack.compilers.find_new_compilers(paths, scope=None) + new_compilers = spack.compilers.find_new_compilers( + paths, scope=None, mixed_toolchain=args.mixed_toolchain + ) if new_compilers: spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False) n = len(new_compilers) diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py index c4446b475afcbe..14514400a86ceb 100644 --- a/lib/spack/spack/cmd/config.py +++ b/lib/spack/spack/cmd/config.py @@ -407,7 +407,9 @@ def config_prefer_upstream(args): pkgs = {} for spec in pref_specs: # Collect all the upstream compilers and versions for this package. - pkg = pkgs.get(spec.name, {"version": [], "compiler": []}) + pkg = pkgs.get(spec.name, {"version": []}) + all = pkgs.get("all", {"compiler": []}) + pkgs["all"] = all pkgs[spec.name] = pkg # We have no existing variant if this is our first added version. @@ -418,8 +420,8 @@ def config_prefer_upstream(args): pkg["version"].append(version) compiler = str(spec.compiler) - if compiler not in pkg["compiler"]: - pkg["compiler"].append(compiler) + if compiler not in all["compiler"]: + all["compiler"].append(compiler) # Get and list all the variants that differ from the default. variants = [] diff --git a/lib/spack/spack/cmd/deconcretize.py b/lib/spack/spack/cmd/deconcretize.py new file mode 100644 index 00000000000000..dbcf72ea8b3a29 --- /dev/null +++ b/lib/spack/spack/cmd/deconcretize.py @@ -0,0 +1,103 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import argparse +import sys +from typing import List + +import llnl.util.tty as tty + +import spack.cmd +import spack.cmd.common.arguments as arguments +import spack.cmd.common.confirmation as confirmation +import spack.environment as ev +import spack.spec + +description = "remove specs from the concretized lockfile of an environment" +section = "environments" +level = "long" + +# Arguments for display_specs when we find ambiguity +display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4} + + +def setup_parser(subparser): + subparser.add_argument( + "--root", action="store_true", help="deconcretize only specific environment roots" + ) + arguments.add_common_arguments(subparser, ["yes_to_all", "specs"]) + subparser.add_argument( + "-a", + "--all", + action="store_true", + dest="all", + help="deconcretize ALL specs that match each supplied spec", + ) + + +def get_deconcretize_list( + args: argparse.Namespace, specs: List[spack.spec.Spec], env: ev.Environment +) -> List[spack.spec.Spec]: + """ + Get list of environment roots to deconcretize + """ + env_specs = [s for _, s in env.concretized_specs()] + to_deconcretize = [] + errors = [] + + for s in specs: + if args.root: + # find all roots matching given spec + to_deconc = [e for e in env_specs if e.satisfies(s)] + else: + # find all roots matching or depending on a matching spec + to_deconc = [e for e in env_specs if any(d.satisfies(s) for d in e.traverse())] + + if len(to_deconc) < 1: + tty.warn(f"No matching specs to deconcretize for {s}") + + elif len(to_deconc) > 1 and not args.all: + errors.append((s, to_deconc)) + + to_deconcretize.extend(to_deconc) + + if errors: + for spec, matching in errors: + tty.error(f"{spec} matches multiple concrete specs:") + sys.stderr.write("\n") + spack.cmd.display_specs(matching, output=sys.stderr, **display_args) + sys.stderr.write("\n") + sys.stderr.flush() + tty.die("Use '--all' to deconcretize all matching specs, or be more specific") + + return to_deconcretize + + +def deconcretize_specs(args, specs): + env = spack.cmd.require_active_env(cmd_name="deconcretize") + + if args.specs: + deconcretize_list = get_deconcretize_list(args, specs, env) + else: + deconcretize_list = [s for _, s in env.concretized_specs()] + + if not args.yes_to_all: + confirmation.confirm_action(deconcretize_list, "deconcretized", "deconcretization") + + with env.write_transaction(): + for spec in deconcretize_list: + env.deconcretize(spec) + env.write() + + +def deconcretize(parser, args): + if not args.specs and not args.all: + tty.die( + "deconcretize requires at least one spec argument.", + " Use `spack deconcretize --all` to deconcretize ALL specs.", + ) + + specs = spack.cmd.parse_specs(args.specs) if args.specs else [any] + deconcretize_specs(args, specs) diff --git a/lib/spack/spack/cmd/diff.py b/lib/spack/spack/cmd/diff.py index c654f6a5b8c6db..e321c28afbf43f 100644 --- a/lib/spack/spack/cmd/diff.py +++ b/lib/spack/spack/cmd/diff.py @@ -200,6 +200,8 @@ def diff(parser, args): specs = [] for spec in spack.cmd.parse_specs(args.specs): + # If the spec has a hash, check it before disambiguating + spec.replace_hash() if spec.concrete: specs.append(spec) else: diff --git a/lib/spack/spack/cmd/gc.py b/lib/spack/spack/cmd/gc.py index e4da6a103daf75..9918bf7479fd05 100644 --- a/lib/spack/spack/cmd/gc.py +++ b/lib/spack/spack/cmd/gc.py @@ -6,6 +6,7 @@ import llnl.util.tty as tty import spack.cmd.common.arguments +import spack.cmd.common.confirmation import spack.cmd.uninstall import spack.environment as ev import spack.store @@ -41,6 +42,6 @@ def gc(parser, args): return if not args.yes_to_all: - spack.cmd.uninstall.confirm_removal(specs) + spack.cmd.common.confirmation.confirm_action(specs, "uninstalled", "uninstallation") spack.cmd.uninstall.do_uninstall(specs, force=False) diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index 5e667f487686e1..1f90831f65ebd3 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys import textwrap from itertools import zip_longest @@ -16,6 +17,7 @@ import spack.install_test import spack.repo import spack.spec +import spack.version from spack.package_base import preferred_version description = "get detailed information on a particular package" @@ -53,6 +55,7 @@ def setup_parser(subparser): ("--tags", print_tags.__doc__), ("--tests", print_tests.__doc__), ("--virtuals", print_virtuals.__doc__), + ("--variants-by-name", "list variants in strict name order; don't group by condition"), ] for opt, help_comment in options: subparser.add_argument(opt, action="store_true", help=help_comment) @@ -77,35 +80,10 @@ def license(s): class VariantFormatter: - def __init__(self, variants): - self.variants = variants + def __init__(self, pkg): + self.variants = pkg.variants self.headers = ("Name [Default]", "When", "Allowed values", "Description") - # Formats - fmt_name = "{0} [{1}]" - - # Initialize column widths with the length of the - # corresponding headers, as they cannot be shorter - # than that - self.column_widths = [len(x) for x in self.headers] - - # Expand columns based on max line lengths - for k, e in variants.items(): - v, w = e - candidate_max_widths = ( - len(fmt_name.format(k, self.default(v))), # Name [Default] - len(str(w)), - len(v.allowed_values), # Allowed values - len(v.description), # Description - ) - - self.column_widths = ( - max(self.column_widths[0], candidate_max_widths[0]), - max(self.column_widths[1], candidate_max_widths[1]), - max(self.column_widths[2], candidate_max_widths[2]), - max(self.column_widths[3], candidate_max_widths[3]), - ) - # Don't let name or possible values be less than max widths _, cols = tty.terminal_size() max_name = min(self.column_widths[0], 30) @@ -137,6 +115,8 @@ def default(self, v): def lines(self): if not self.variants: yield " None" + return + else: yield " " + self.fmt % self.headers underline = tuple([w * "=" for w in self.column_widths]) @@ -159,7 +139,7 @@ def lines(self): yield " " + self.fmt % t -def print_dependencies(pkg): +def print_dependencies(pkg, args): """output build, link, and run package dependencies""" for deptype in ("build", "link", "run"): @@ -172,7 +152,7 @@ def print_dependencies(pkg): color.cprint(" None") -def print_detectable(pkg): +def print_detectable(pkg, args): """output information on external detection""" color.cprint("") @@ -200,7 +180,7 @@ def print_detectable(pkg): color.cprint(" False") -def print_maintainers(pkg): +def print_maintainers(pkg, args): """output package maintainers""" if len(pkg.maintainers) > 0: @@ -209,7 +189,7 @@ def print_maintainers(pkg): color.cprint(section_title("Maintainers: ") + mnt) -def print_phases(pkg): +def print_phases(pkg, args): """output installation phases""" if hasattr(pkg.builder, "phases") and pkg.builder.phases: @@ -221,7 +201,7 @@ def print_phases(pkg): color.cprint(phase_str) -def print_tags(pkg): +def print_tags(pkg, args): """output package tags""" color.cprint("") @@ -233,7 +213,7 @@ def print_tags(pkg): color.cprint(" None") -def print_tests(pkg): +def print_tests(pkg, args): """output relevant build-time and stand-alone tests""" # Some built-in base packages (e.g., Autotools) define callback (e.g., @@ -271,18 +251,171 @@ def print_tests(pkg): color.cprint(" None") -def print_variants(pkg): +def _fmt_value(v): + if v is None or isinstance(v, bool): + return str(v).lower() + else: + return str(v) + + +def _fmt_name_and_default(variant): + """Print colorized name [default] for a variant.""" + return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}") + + +def _fmt_when(when, indent): + return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(when)}") + + +def _fmt_variant_description(variant, width, indent): + """Format a variant's description, preserving explicit line breaks.""" + return "\n".join( + textwrap.fill( + line, width=width, initial_indent=indent * " ", subsequent_indent=indent * " " + ) + for line in variant.description.split("\n") + ) + + +def _fmt_variant(variant, max_name_default_len, indent, when=None, out=None): + out = out or sys.stdout + + _, cols = tty.terminal_size() + + name_and_default = _fmt_name_and_default(variant) + name_default_len = color.clen(name_and_default) + + values = variant.values + if not isinstance(variant.values, (tuple, list, spack.variant.DisjointSetsOfValues)): + values = [variant.values] + + # put 'none' first, sort the rest by value + sorted_values = sorted(values, key=lambda v: (v != "none", v)) + + pad = 4 # min padding between 'name [default]' and values + value_indent = (indent + max_name_default_len + pad) * " " # left edge of values + + # This preserves any formatting (i.e., newlines) from how the description was + # written in package.py, but still wraps long lines for small terminals. + # This allows some packages to provide detailed help on their variants (see, e.g., gasnet). + formatted_values = "\n".join( + textwrap.wrap( + f"{', '.join(_fmt_value(v) for v in sorted_values)}", + width=cols - 2, + initial_indent=value_indent, + subsequent_indent=value_indent, + ) + ) + formatted_values = formatted_values[indent + name_default_len + pad :] + + # name [default] value1, value2, value3, ... + padding = pad * " " + color.cprint(f"{indent * ' '}{name_and_default}{padding}@c{{{formatted_values}}}", stream=out) + + # when + description_indent = indent + 4 + if when is not None and when != spack.spec.Spec(): + out.write(_fmt_when(when, description_indent - 2)) + out.write("\n") + + # description, preserving explicit line breaks from the way it's written in the package file + out.write(_fmt_variant_description(variant, cols - 2, description_indent)) + out.write("\n") + + +def _variants_by_name_when(pkg): + """Adaptor to get variants keyed by { name: { when: { [Variant...] } }.""" + # TODO: replace with pkg.variants_by_name(when=True) when unified directive dicts are merged. + variants = {} + for name, (variant, whens) in pkg.variants.items(): + for when in whens: + variants.setdefault(name, {}).setdefault(when, []).append(variant) + return variants + + +def _variants_by_when_name(pkg): + """Adaptor to get variants keyed by { when: { name: Variant } }""" + # TODO: replace with pkg.variants when unified directive dicts are merged. + variants = {} + for name, (variant, whens) in pkg.variants.items(): + for when in whens: + variants.setdefault(when, {})[name] = variant + return variants + + +def _print_variants_header(pkg): """output variants""" + if not pkg.variants: + print(" None") + return + color.cprint("") color.cprint(section_title("Variants:")) - formatter = VariantFormatter(pkg.variants) - for line in formatter.lines: - color.cprint(color.cescape(line)) + variants_by_name = _variants_by_name_when(pkg) + + # Calculate the max length of the "name [default]" part of the variant display + # This lets us know where to print variant values. + max_name_default_len = max( + color.clen(_fmt_name_and_default(variant)) + for name, when_variants in variants_by_name.items() + for variants in when_variants.values() + for variant in variants + ) + + return max_name_default_len, variants_by_name + + +def _unconstrained_ver_first(item): + """sort key that puts specs with open version ranges first""" + spec, _ = item + return (spack.version.any_version not in spec.versions, spec) + + +def print_variants_grouped_by_when(pkg): + max_name_default_len, _ = _print_variants_header(pkg) + indent = 4 + variants = _variants_by_when_name(pkg) + for when, variants_by_name in sorted(variants.items(), key=_unconstrained_ver_first): + padded_values = max_name_default_len + 4 + start_indent = indent -def print_versions(pkg): + if when != spack.spec.Spec(): + sys.stdout.write("\n") + sys.stdout.write(_fmt_when(when, indent)) + sys.stdout.write("\n") + + # indent names slightly inside 'when', but line up values + padded_values -= 2 + start_indent += 2 + + for name, variant in sorted(variants_by_name.items()): + _fmt_variant(variant, padded_values, start_indent, None, out=sys.stdout) + + +def print_variants_by_name(pkg): + max_name_default_len, variants_by_name = _print_variants_header(pkg) + max_name_default_len += 4 + + indent = 4 + for name, when_variants in variants_by_name.items(): + for when, variants in sorted(when_variants.items(), key=_unconstrained_ver_first): + for variant in variants: + _fmt_variant(variant, max_name_default_len, indent, when, out=sys.stdout) + sys.stdout.write("\n") + + +def print_variants(pkg, args): + """output variants""" + if args.variants_by_name: + print_variants_by_name(pkg) + else: + print_variants_grouped_by_when(pkg) + + +def print_versions(pkg, args): """output versions""" color.cprint("") @@ -300,18 +433,24 @@ def print_versions(pkg): pad = padder(pkg.versions, 4) preferred = preferred_version(pkg) - url = "" - if pkg.has_code: - url = fs.for_package_version(pkg, preferred) + def get_url(version): + try: + return fs.for_package_version(pkg, version) + except spack.fetch_strategy.InvalidArgsError: + return "No URL" + + url = get_url(preferred) if pkg.has_code else "" line = version(" {0}".format(pad(preferred))) + color.cescape(url) - color.cprint(line) + color.cwrite(line) + + print() safe = [] deprecated = [] for v in reversed(sorted(pkg.versions)): if pkg.has_code: - url = fs.for_package_version(pkg, v) + url = get_url(v) if pkg.versions[v].get("deprecated", False): deprecated.append((v, url)) else: @@ -329,7 +468,7 @@ def print_versions(pkg): color.cprint(line) -def print_virtuals(pkg): +def print_virtuals(pkg, args): """output virtual packages""" color.cprint("") @@ -352,7 +491,7 @@ def print_virtuals(pkg): color.cprint(" None") -def print_licenses(pkg): +def print_licenses(pkg, args): """Output the licenses of the project.""" color.cprint("") @@ -384,7 +523,8 @@ def info(parser, args): else: color.cprint(" None") - color.cprint(section_title("Homepage: ") + pkg.homepage) + if getattr(pkg, "homepage"): + color.cprint(section_title("Homepage: ") + pkg.homepage) # Now output optional information in expected order sections = [ @@ -401,6 +541,6 @@ def info(parser, args): ] for print_it, func in sections: if print_it: - func(pkg) + func(pkg, args) color.cprint("") diff --git a/lib/spack/spack/cmd/tutorial.py b/lib/spack/spack/cmd/tutorial.py index a07824f922a74d..5759912b66ffc7 100644 --- a/lib/spack/spack/cmd/tutorial.py +++ b/lib/spack/spack/cmd/tutorial.py @@ -23,7 +23,7 @@ # tutorial configuration parameters -tutorial_branch = "releases/v0.20" +tutorial_branch = "releases/v0.21" tutorial_mirror = "file:///mirror" tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub") diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index bc6a71cef10f1f..3288404151d230 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -11,10 +11,9 @@ import spack.cmd import spack.cmd.common.arguments as arguments +import spack.cmd.common.confirmation as confirmation import spack.environment as ev -import spack.error import spack.package_base -import spack.repo import spack.spec import spack.store import spack.traverse as traverse @@ -278,7 +277,7 @@ def uninstall_specs(args, specs): return if not args.yes_to_all: - confirm_removal(uninstall_list) + confirmation.confirm_action(uninstall_list, "uninstalled", "uninstallation") # Uninstall everything on the list do_uninstall(uninstall_list, args.force) @@ -292,21 +291,6 @@ def uninstall_specs(args, specs): env.regenerate_views() -def confirm_removal(specs: List[spack.spec.Spec]): - """Display the list of specs to be removed and ask for confirmation. - - Args: - specs: specs to be removed - """ - tty.msg("The following {} packages will be uninstalled:\n".format(len(specs))) - spack.cmd.display_specs(specs, **display_args) - print("") - answer = tty.get_yes_or_no("Do you want to proceed?", default=False) - if not answer: - tty.msg("Aborting uninstallation") - sys.exit(0) - - def uninstall(parser, args): if not args.specs and not args.all: tty.die( diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 3f9663d21eaff0..6366fc321b3d06 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -10,7 +10,7 @@ import itertools import multiprocessing.pool import os -from typing import Dict, List +from typing import Dict, List, Optional, Tuple import archspec.cpu @@ -21,6 +21,7 @@ import spack.compiler import spack.config import spack.error +import spack.operating_systems import spack.paths import spack.platforms import spack.spec @@ -223,13 +224,16 @@ def all_compiler_specs(scope=None, init_config=True): ] -def find_compilers(path_hints=None): +def find_compilers( + path_hints: Optional[List[str]] = None, *, mixed_toolchain=False +) -> List["spack.compiler.Compiler"]: """Return the list of compilers found in the paths given as arguments. Args: - path_hints (list or None): list of path hints where to look for. - A sensible default based on the ``PATH`` environment variable - will be used if the value is None + path_hints: list of path hints where to look for. A sensible default based on the ``PATH`` + environment variable will be used if the value is None + mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for + a certain language """ if path_hints is None: path_hints = get_path("PATH") @@ -250,7 +254,7 @@ def find_compilers(path_hints=None): finally: tp.close() - def valid_version(item): + def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool: value, error = item if error is None: return True @@ -262,25 +266,37 @@ def valid_version(item): pass return False - def remove_errors(item): + def remove_errors( + item: Tuple[Optional[DetectVersionArgs], Optional[str]] + ) -> DetectVersionArgs: value, _ = item + assert value is not None return value - return make_compiler_list(map(remove_errors, filter(valid_version, detected_versions))) + return make_compiler_list( + [remove_errors(detected) for detected in detected_versions if valid_version(detected)], + mixed_toolchain=mixed_toolchain, + ) -def find_new_compilers(path_hints=None, scope=None): +def find_new_compilers( + path_hints: Optional[List[str]] = None, + scope: Optional[str] = None, + *, + mixed_toolchain: bool = False, +): """Same as ``find_compilers`` but return only the compilers that are not already in compilers.yaml. Args: - path_hints (list or None): list of path hints where to look for. - A sensible default based on the ``PATH`` environment variable - will be used if the value is None - scope (str): scope to look for a compiler. If None consider the - merged configuration. + path_hints: list of path hints where to look for. A sensible default based on the ``PATH`` + environment variable will be used if the value is None + scope: scope to look for a compiler. If None consider the merged configuration. + mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for + a certain language """ - compilers = find_compilers(path_hints) + compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain) + return select_new_compilers(compilers, scope) @@ -638,7 +654,9 @@ def all_compiler_types(): ) -def arguments_to_detect_version_fn(operating_system, paths): +def arguments_to_detect_version_fn( + operating_system: spack.operating_systems.OperatingSystem, paths: List[str] +) -> List[DetectVersionArgs]: """Returns a list of DetectVersionArgs tuples to be used in a corresponding function to detect compiler versions. @@ -646,8 +664,7 @@ def arguments_to_detect_version_fn(operating_system, paths): function by providing a method called with the same name. Args: - operating_system (spack.operating_systems.OperatingSystem): the operating system - on which we are looking for compilers + operating_system: the operating system on which we are looking for compilers paths: paths to search for compilers Returns: @@ -656,10 +673,10 @@ def arguments_to_detect_version_fn(operating_system, paths): compilers in this OS. """ - def _default(search_paths): - command_arguments = [] + def _default(search_paths: List[str]) -> List[DetectVersionArgs]: + command_arguments: List[DetectVersionArgs] = [] files_to_be_tested = fs.files_in(*search_paths) - for compiler_name in spack.compilers.supported_compilers_for_host_platform(): + for compiler_name in supported_compilers_for_host_platform(): compiler_cls = class_for_compiler_name(compiler_name) for language in ("cc", "cxx", "f77", "fc"): @@ -684,7 +701,9 @@ def _default(search_paths): return fn(paths) -def detect_version(detect_version_args): +def detect_version( + detect_version_args: DetectVersionArgs, +) -> Tuple[Optional[DetectVersionArgs], Optional[str]]: """Computes the version of a compiler and adds it to the information passed as input. @@ -693,8 +712,7 @@ def detect_version(detect_version_args): needs to be checked by the code dispatching the calls. Args: - detect_version_args (DetectVersionArgs): information on the - compiler for which we should detect the version. + detect_version_args: information on the compiler for which we should detect the version. Returns: A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the @@ -710,7 +728,7 @@ def _default(fn_args): path = fn_args.path # Get compiler names and the callback to detect their versions - callback = getattr(compiler_cls, "{0}_version".format(language)) + callback = getattr(compiler_cls, f"{language}_version") try: version = callback(path) @@ -736,13 +754,15 @@ def _default(fn_args): return fn(detect_version_args) -def make_compiler_list(detected_versions): +def make_compiler_list( + detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False +) -> List["spack.compiler.Compiler"]: """Process a list of detected versions and turn them into a list of compiler specs. Args: - detected_versions (list): list of DetectVersionArgs containing a - valid version + detected_versions: list of DetectVersionArgs containing a valid version + mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing Returns: list: list of Compiler objects @@ -751,7 +771,7 @@ def make_compiler_list(detected_versions): sorted_compilers = sorted(detected_versions, key=group_fn) # Gather items in a dictionary by the id, name variation and language - compilers_d = {} + compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {} for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn): compiler_id, name_variation, language = sort_key by_compiler_id = compilers_d.setdefault(compiler_id, {}) @@ -760,7 +780,7 @@ def make_compiler_list(detected_versions): def _default_make_compilers(cmp_id, paths): operating_system, compiler_name, version = cmp_id - compiler_cls = spack.compilers.class_for_compiler_name(compiler_name) + compiler_cls = class_for_compiler_name(compiler_name) spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}") paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] # TODO: johnwparent - revist the following line as per discussion at: @@ -782,13 +802,14 @@ def _default_make_compilers(cmp_id, paths): getattr(variation, "suffix", None), ) - compilers = [] + # Flatten to a list of compiler id, primary variation and compiler dictionary + flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = [] for compiler_id, by_compiler_id in compilers_d.items(): ordered = sorted(by_compiler_id, key=sort_fn) selected_variation = ordered[0] selected = by_compiler_id[selected_variation] - # fill any missing parts from subsequent entries + # Fill any missing parts from subsequent entries (without mixing toolchains) for lang in ["cxx", "f77", "fc"]: if lang not in selected: next_lang = next( @@ -797,14 +818,63 @@ def _default_make_compilers(cmp_id, paths): if next_lang: selected[lang] = next_lang - operating_system, _, _ = compiler_id - make_compilers = getattr(operating_system, "make_compilers", _default_make_compilers) + flat_compilers.append((compiler_id, selected_variation, selected)) - compilers.extend(make_compilers(compiler_id, selected)) + # Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested) + if mixed_toolchain: + make_mixed_toolchain(flat_compilers) + + # Finally, create the compiler list + compilers = [] + for compiler_id, _, compiler in flat_compilers: + make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers) + compilers.extend(make_compilers(compiler_id, compiler)) return compilers +def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None: + """Add missing compilers across toolchains when they are missing for a particular language. + This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a + fortran compiler (no flang).""" + + # First collect the clangs that are missing a fortran compiler + clangs_without_flang = [ + (id, variation, compiler) + for id, variation, compiler in compilers + if id.compiler_name in ("clang", "apple-clang") + and "f77" not in compiler + and "fc" not in compiler + ] + if not clangs_without_flang: + return + + # Filter on GCCs with fortran compiler + gccs_with_fortran = [ + (id, variation, compiler) + for id, variation, compiler in compilers + if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler + ] + + # Sort these GCCs by "best variation" (no prefix / suffix first) + gccs_with_fortran.sort( + key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None)) + ) + + # Attach the optimal GCC fortran compiler to the clangs that don't have one + for clang_id, _, clang_compiler in clangs_without_flang: + gcc_compiler = next( + (gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None + ) + + if not gcc_compiler: + continue + + # Update the fc / f77 entries + clang_compiler["f77"] = gcc_compiler["f77"] + clang_compiler["fc"] = gcc_compiler["fc"] + + def is_mixed_toolchain(compiler): """Returns True if the current compiler is a mixed toolchain, False otherwise. diff --git a/lib/spack/spack/compilers/aocc.py b/lib/spack/spack/compilers/aocc.py index a642960b7df522..33039bf07d1874 100644 --- a/lib/spack/spack/compilers/aocc.py +++ b/lib/spack/spack/compilers/aocc.py @@ -5,7 +5,6 @@ import os import re -import sys import llnl.util.lang @@ -41,7 +40,6 @@ def debug_flags(self): "-gdwarf-5", "-gline-tables-only", "-gmodules", - "-gz", "-g", ] @@ -114,17 +112,6 @@ def extract_version_from_output(cls, output): return ".".join(match.groups()) return "unknown" - @classmethod - def fc_version(cls, fortran_compiler): - if sys.platform == "darwin": - return cls.default_version("clang") - - return cls.default_version(fortran_compiler) - - @classmethod - def f77_version(cls, f77): - return cls.fc_version(f77) - @property def stdcxx_libs(self): return ("-lstdc++",) diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py index a9356227de5d76..5e63526df619c2 100644 --- a/lib/spack/spack/compilers/clang.py +++ b/lib/spack/spack/compilers/clang.py @@ -5,7 +5,6 @@ import os import re -import sys import llnl.util.lang @@ -39,10 +38,10 @@ class Clang(Compiler): cxx_names = ["clang++"] # Subclasses use possible names of Fortran 77 compiler - f77_names = ["flang", "gfortran", "xlf_r"] + f77_names = ["flang"] # Subclasses use possible names of Fortran 90 compiler - fc_names = ["flang", "gfortran", "xlf90_r"] + fc_names = ["flang"] version_argument = "--version" @@ -56,7 +55,6 @@ def debug_flags(self): "-gdwarf-5", "-gline-tables-only", "-gmodules", - "-gz", "-g", ] @@ -182,16 +180,3 @@ def extract_version_from_output(cls, output): if match: ver = match.group(match.lastindex) return ver - - @classmethod - def fc_version(cls, fc): - # We could map from gcc/gfortran version to clang version, but on macOS - # we normally mix any version of gfortran with any version of clang. - if sys.platform == "darwin": - return cls.default_version("clang") - else: - return cls.default_version(fc) - - @classmethod - def f77_version(cls, f77): - return cls.fc_version(f77) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index f252fbc05df944..ecda8c36b0f0ba 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -1522,14 +1522,18 @@ def _query( # TODO: like installed and known that can be queried? Or are # TODO: these really special cases that only belong here? - # Just look up concrete specs with hashes; no fancy search. - if isinstance(query_spec, spack.spec.Spec) and query_spec.concrete: - # TODO: handling of hashes restriction is not particularly elegant. - hash_key = query_spec.dag_hash() - if hash_key in self._data and (not hashes or hash_key in hashes): - return [self._data[hash_key].spec] - else: - return [] + if query_spec is not any: + if not isinstance(query_spec, spack.spec.Spec): + query_spec = spack.spec.Spec(query_spec) + + # Just look up concrete specs with hashes; no fancy search. + if query_spec.concrete: + # TODO: handling of hashes restriction is not particularly elegant. + hash_key = query_spec.dag_hash() + if hash_key in self._data and (not hashes or hash_key in hashes): + return [self._data[hash_key].spec] + else: + return [] # Abstract specs require more work -- currently we test # against everything. @@ -1537,6 +1541,9 @@ def _query( start_date = start_date or datetime.datetime.min end_date = end_date or datetime.datetime.max + # save specs whose name doesn't match for last, to avoid a virtual check + deferred = [] + for key, rec in self._data.items(): if hashes is not None and rec.spec.dag_hash() not in hashes: continue @@ -1561,8 +1568,26 @@ def _query( if not (start_date < inst_date < end_date): continue - if query_spec is any or rec.spec.satisfies(query_spec): + if query_spec is any: results.append(rec.spec) + continue + + # check anon specs and exact name matches first + if not query_spec.name or rec.spec.name == query_spec.name: + if rec.spec.satisfies(query_spec): + results.append(rec.spec) + + # save potential virtual matches for later, but not if we already found a match + elif not results: + deferred.append(rec.spec) + + # Checking for virtuals is expensive, so we save it for last and only if needed. + # If we get here, we didn't find anything in the DB that matched by name. + # If we did fine something, the query spec can't be virtual b/c we matched an actual + # package installation, so skip the virtual check entirely. If we *didn't* find anything, + # check all the deferred specs *if* the query is virtual. + if not results and query_spec is not any and deferred and query_spec.virtual: + results = [spec for spec in deferred if spec.satisfies(query_spec)] return results diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 8ddd7f8d3bc2fb..5d6273506ec9c6 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1358,7 +1358,7 @@ def concretize(self, force=False, tests=False): # Remove concrete specs that no longer correlate to a user spec for spec in set(self.concretized_user_specs) - set(self.user_specs): - self.deconcretize(spec) + self.deconcretize(spec, concrete=False) # Pick the right concretization strategy if self.unify == "when_possible": @@ -1373,15 +1373,36 @@ def concretize(self, force=False, tests=False): msg = "concretization strategy not implemented [{0}]" raise SpackEnvironmentError(msg.format(self.unify)) - def deconcretize(self, spec): + def deconcretize(self, spec: spack.spec.Spec, concrete: bool = True): + """ + Remove specified spec from environment concretization + + Arguments: + spec: Spec to deconcretize. This must be a root of the environment + concrete: If True, find all instances of spec as concrete in the environemnt. + If False, find a single instance of the abstract spec as root of the environment. + """ # spec has to be a root of the environment - index = self.concretized_user_specs.index(spec) - dag_hash = self.concretized_order.pop(index) - del self.concretized_user_specs[index] + if concrete: + dag_hash = spec.dag_hash() + + pairs = zip(self.concretized_user_specs, self.concretized_order) + filtered = [(spec, h) for spec, h in pairs if h != dag_hash] + # Cannot use zip and unpack two values; it fails if filtered is empty + self.concretized_user_specs = [s for s, _ in filtered] + self.concretized_order = [h for _, h in filtered] + else: + index = self.concretized_user_specs.index(spec) + dag_hash = self.concretized_order.pop(index) + + del self.concretized_user_specs[index] # If this was the only user spec that concretized to this concrete spec, remove it if dag_hash not in self.concretized_order: - del self.specs_by_hash[dag_hash] + # if we deconcretized a dependency that doesn't correspond to a root, it + # won't be here. + if dag_hash in self.specs_by_hash: + del self.specs_by_hash[dag_hash] def _get_specs_to_concretize( self, @@ -1739,11 +1760,14 @@ def _env_modifications_for_view( self, view: ViewDescriptor, reverse: bool = False ) -> spack.util.environment.EnvironmentModifications: try: - mods = uenv.environment_modifications_for_specs(*self.concrete_roots(), view=view) + with spack.store.STORE.db.read_transaction(): + installed_roots = [s for s in self.concrete_roots() if s.installed] + mods = uenv.environment_modifications_for_specs(*installed_roots, view=view) except Exception as e: # Failing to setup spec-specific changes shouldn't be a hard error. tty.warn( - "couldn't load runtime environment due to {}: {}".format(e.__class__.__name__, e) + f"could not {'unload' if reverse else 'load'} runtime environment due " + f"to {e.__class__.__name__}: {e}" ) return spack.util.environment.EnvironmentModifications() return mods.reversed() if reverse else mods diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 87408d363ad1bd..5f28ab480cb02a 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -1023,7 +1023,7 @@ def finish_parse_and_run(parser, cmd_name, cmd, env_format_error): """Finish parsing after we know the command to run.""" # add the found command to the parser and re-run then re-parse command = parser.add_command(cmd_name) - args, unknown = parser.parse_known_args(cmd) + args, unknown = parser.parse_known_args() # Now that we know what command this is and what its args are, determine # whether we can continue with a bad environment and raise if not. diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 465fed0324f15a..bccc6805cb8c6e 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -62,7 +62,7 @@ #: config section for this file def configuration(module_set_name): - config_path = "modules:%s" % module_set_name + config_path = f"modules:{module_set_name}" return spack.config.get(config_path, {}) @@ -96,10 +96,10 @@ def _check_tokens_are_valid(format_string, message): named_tokens = re.findall(r"{(\w*)}", format_string) invalid_tokens = [x for x in named_tokens if x.lower() not in _valid_tokens] if invalid_tokens: - msg = message - msg += " [{0}]. ".format(", ".join(invalid_tokens)) - msg += 'Did you check your "modules.yaml" configuration?' - raise RuntimeError(msg) + raise RuntimeError( + f"{message} [{', '.join(invalid_tokens)}]. " + f"Did you check your 'modules.yaml' configuration?" + ) def update_dictionary_extending_lists(target, update): @@ -219,7 +219,7 @@ def root_path(name, module_set_name): """ defaults = {"lmod": "$spack/share/spack/lmod", "tcl": "$spack/share/spack/modules"} # Root folders where the various module files should be written - roots = spack.config.get("modules:%s:roots" % module_set_name, {}) + roots = spack.config.get(f"modules:{module_set_name}:roots", {}) # Merge config values into the defaults so we prefer configured values roots = spack.config.merge_yaml(defaults, roots) @@ -262,7 +262,7 @@ def read_module_index(root): index_path = os.path.join(root, "module-index.yaml") if not os.path.exists(index_path): return {} - with open(index_path, "r") as index_file: + with open(index_path) as index_file: return _read_module_index(index_file) @@ -310,21 +310,21 @@ def upstream_module(self, spec, module_type): if db_for_spec in self.upstream_dbs: db_index = self.upstream_dbs.index(db_for_spec) elif db_for_spec: - raise spack.error.SpackError("Unexpected: {0} is installed locally".format(spec)) + raise spack.error.SpackError(f"Unexpected: {spec} is installed locally") else: - raise spack.error.SpackError("Unexpected: no install DB found for {0}".format(spec)) + raise spack.error.SpackError(f"Unexpected: no install DB found for {spec}") module_index = self.module_indices[db_index] module_type_index = module_index.get(module_type, {}) if not module_type_index: tty.debug( - "No {0} modules associated with the Spack instance where" - " {1} is installed".format(module_type, spec) + f"No {module_type} modules associated with the Spack instance " + f"where {spec} is installed" ) return None if spec.dag_hash() in module_type_index: return module_type_index[spec.dag_hash()] else: - tty.debug("No module is available for upstream package {0}".format(spec)) + tty.debug(f"No module is available for upstream package {spec}") return None @@ -486,43 +486,35 @@ def excluded(self): spec = self.spec conf = self.module.configuration(self.name) - # Compute the list of include rules that match - include_rules = conf.get("include", []) - include_matches = [x for x in include_rules if spec.satisfies(x)] - - # Compute the list of exclude rules that match - exclude_rules = conf.get("exclude", []) - exclude_matches = [x for x in exclude_rules if spec.satisfies(x)] + # Compute the list of matching include / exclude rules, and whether excluded as implicit + include_matches = [x for x in conf.get("include", []) if spec.satisfies(x)] + exclude_matches = [x for x in conf.get("exclude", []) if spec.satisfies(x)] + excluded_as_implicit = not self.explicit and conf.get("exclude_implicits", False) def debug_info(line_header, match_list): if match_list: - msg = "\t{0} : {1}".format(line_header, spec.cshort_spec) - tty.debug(msg) + tty.debug(f"\t{line_header} : {spec.cshort_spec}") for rule in match_list: - tty.debug("\t\tmatches rule: {0}".format(rule)) + tty.debug(f"\t\tmatches rule: {rule}") debug_info("INCLUDE", include_matches) debug_info("EXCLUDE", exclude_matches) - if not include_matches and exclude_matches: - return True + if excluded_as_implicit: + tty.debug(f"\tEXCLUDED_AS_IMPLICIT : {spec.cshort_spec}") - return False + return not include_matches and (exclude_matches or excluded_as_implicit) @property def hidden(self): """Returns True if the module has been hidden, False otherwise.""" - # A few variables for convenience of writing the method - spec = self.spec conf = self.module.configuration(self.name) - hidden_as_implicit = not self.explicit and conf.get( - "hide_implicits", conf.get("exclude_implicits", False) - ) + hidden_as_implicit = not self.explicit and conf.get("hide_implicits", False) if hidden_as_implicit: - tty.debug(f"\tHIDDEN_AS_IMPLICIT : {spec.cshort_spec}") + tty.debug(f"\tHIDDEN_AS_IMPLICIT : {self.spec.cshort_spec}") return hidden_as_implicit @@ -611,7 +603,7 @@ def filename(self): # Just the name of the file filename = self.use_name if self.extension: - filename = "{0}.{1}".format(self.use_name, self.extension) + filename = f"{self.use_name}.{self.extension}" # Architecture sub-folder arch_folder_conf = spack.config.get("modules:%s:arch_folder" % self.conf.name, True) if arch_folder_conf: @@ -679,7 +671,7 @@ def configure_options(self): return msg if os.path.exists(pkg.install_configure_args_path): - with open(pkg.install_configure_args_path, "r") as args_file: + with open(pkg.install_configure_args_path) as args_file: return spack.util.path.padding_filter(args_file.read()) # Returning a false-like value makes the default templates skip @@ -894,7 +886,7 @@ def _get_template(self): # 2. template specified in a package directly # 3. default template (must be defined, check in __init__) module_system_name = str(self.module.__name__).split(".")[-1] - package_attribute = "{0}_template".format(module_system_name) + package_attribute = f"{module_system_name}_template" choices = [ self.conf.template, getattr(self.spec.package, package_attribute, None), @@ -960,7 +952,7 @@ def write(self, overwrite=False): # Attribute from package module_name = str(self.module.__name__).split(".")[-1] - attr_name = "{0}_context".format(module_name) + attr_name = f"{module_name}_context" pkg_update = getattr(self.spec.package, attr_name, {}) context.update(pkg_update) @@ -1010,7 +1002,7 @@ def update_module_hiddenness(self, remove=False): if modulerc_exists: # retrieve modulerc content - with open(modulerc_path, "r") as f: + with open(modulerc_path) as f: content = f.readlines() content = "".join(content).split("\n") # remove last empty item if any diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index c537a7103afbca..f38ebec2992fcf 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -32,6 +32,7 @@ from spack.build_systems.bundle import BundlePackage from spack.build_systems.cached_cmake import ( CachedCMakePackage, + cmake_cache_filepath, cmake_cache_option, cmake_cache_path, cmake_cache_string, @@ -49,7 +50,9 @@ from spack.build_systems.nmake import NMakePackage from spack.build_systems.octave import OctavePackage from spack.build_systems.oneapi import ( + INTEL_MATH_LIBRARIES, IntelOneApiLibraryPackage, + IntelOneApiLibraryPackageWithSdk, IntelOneApiPackage, IntelOneApiStaticLibraryList, ) diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py index adf1a935861010..fb4130d345d02b 100644 --- a/lib/spack/spack/schema/modules.py +++ b/lib/spack/spack/schema/modules.py @@ -18,9 +18,7 @@ #: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE spec_regex = ( r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|hide|" - r"whitelist|blacklist|" # DEPRECATED: remove in 0.20. - r"include|exclude|" # use these more inclusive/consistent options - r"projections|naming_scheme|core_compilers|all)(^\w[\w-]*)" + r"include|exclude|projections|naming_scheme|core_compilers|all)(^\w[\w-]*)" ) #: Matches a valid name for a module set @@ -46,14 +44,7 @@ "default": {}, "additionalProperties": False, "properties": { - # DEPRECATED: remove in 0.20. - "environment_blacklist": { - "type": "array", - "default": [], - "items": {"type": "string"}, - }, - # use exclude_env_vars instead - "exclude_env_vars": {"type": "array", "default": [], "items": {"type": "string"}}, + "exclude_env_vars": {"type": "array", "default": [], "items": {"type": "string"}} }, }, "template": {"type": "string"}, @@ -80,11 +71,6 @@ "properties": { "verbose": {"type": "boolean", "default": False}, "hash_length": {"type": "integer", "minimum": 0, "default": 7}, - # DEPRECATED: remove in 0.20. - "whitelist": array_of_strings, - "blacklist": array_of_strings, - "blacklist_implicits": {"type": "boolean", "default": False}, - # whitelist/blacklist have been replaced with include/exclude "include": array_of_strings, "exclude": array_of_strings, "exclude_implicits": {"type": "boolean", "default": False}, @@ -188,52 +174,3 @@ "additionalProperties": False, "properties": properties, } - - -# deprecated keys and their replacements -old_to_new_key = {"exclude_implicits": "hide_implicits"} - - -def update_keys(data, key_translations): - """Change blacklist/whitelist to exclude/include. - - Arguments: - data (dict): data from a valid modules configuration. - key_translations (dict): A dictionary of keys to translate to - their respective values. - - Return: - (bool) whether anything was changed in data - """ - changed = False - - if isinstance(data, dict): - keys = list(data.keys()) - for key in keys: - value = data[key] - - translation = key_translations.get(key) - if translation: - data[translation] = data.pop(key) - changed = True - - changed |= update_keys(value, key_translations) - - elif isinstance(data, list): - for elt in data: - changed |= update_keys(elt, key_translations) - - return changed - - -def update(data): - """Update the data in place to remove deprecated properties. - - Args: - data (dict): dictionary to be updated - - Returns: - True if data was changed, False otherwise - """ - # translate blacklist/whitelist to exclude/include - return update_keys(data, old_to_new_key) diff --git a/lib/spack/spack/schema/packages.py b/lib/spack/spack/schema/packages.py index 2cc4534d0711fa..2802f8952947aa 100644 --- a/lib/spack/spack/schema/packages.py +++ b/lib/spack/spack/schema/packages.py @@ -8,6 +8,68 @@ :lines: 13- """ +permissions = { + "type": "object", + "additionalProperties": False, + "properties": { + "read": {"type": "string", "enum": ["user", "group", "world"]}, + "write": {"type": "string", "enum": ["user", "group", "world"]}, + "group": {"type": "string"}, + }, +} + +variants = {"oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]} + +requirements = { + "oneOf": [ + # 'require' can be a list of requirement_groups. + # each requirement group is a list of one or more + # specs. Either at least one or exactly one spec + # in the group must be satisfied (depending on + # whether you use "any_of" or "one_of", + # repectively) + { + "type": "array", + "items": { + "oneOf": [ + { + "type": "object", + "additionalProperties": False, + "properties": { + "one_of": {"type": "array", "items": {"type": "string"}}, + "any_of": {"type": "array", "items": {"type": "string"}}, + "spec": {"type": "string"}, + "message": {"type": "string"}, + "when": {"type": "string"}, + }, + }, + {"type": "string"}, + ] + }, + }, + # Shorthand for a single requirement group with + # one member + {"type": "string"}, + ] +} + +permissions = { + "type": "object", + "additionalProperties": False, + "properties": { + "read": {"type": "string", "enum": ["user", "group", "world"]}, + "write": {"type": "string", "enum": ["user", "group", "world"]}, + "group": {"type": "string"}, + }, +} + +package_attributes = { + "type": "object", + "additionalProperties": False, + "patternProperties": {r"\w+": {}}, +} + +REQUIREMENT_URL = "https://spack.readthedocs.io/en/latest/packages_yaml.html#package-requirements" #: Properties for inclusion in other schemas properties = { @@ -15,57 +77,14 @@ "type": "object", "default": {}, "additionalProperties": False, - "patternProperties": { - r"\w[\w-]*": { # package name + "properties": { + "all": { # package name "type": "object", "default": {}, "additionalProperties": False, "properties": { - "require": { - "oneOf": [ - # 'require' can be a list of requirement_groups. - # each requirement group is a list of one or more - # specs. Either at least one or exactly one spec - # in the group must be satisfied (depending on - # whether you use "any_of" or "one_of", - # repectively) - { - "type": "array", - "items": { - "oneOf": [ - { - "type": "object", - "additionalProperties": False, - "properties": { - "one_of": { - "type": "array", - "items": {"type": "string"}, - }, - "any_of": { - "type": "array", - "items": {"type": "string"}, - }, - "spec": {"type": "string"}, - "message": {"type": "string"}, - "when": {"type": "string"}, - }, - }, - {"type": "string"}, - ] - }, - }, - # Shorthand for a single requirement group with - # one member - {"type": "string"}, - ] - }, - "version": { - "type": "array", - "default": [], - # version strings (type should be string, number is still possible - # but deprecated. this is to avoid issues with e.g. 3.10 -> 3.1) - "items": {"anyOf": [{"type": "string"}, {"type": "number"}]}, - }, + "require": requirements, + "version": {}, # Here only to warn users on ignored properties "target": { "type": "array", "default": [], @@ -78,22 +97,10 @@ "items": {"type": "string"}, }, # compiler specs "buildable": {"type": "boolean", "default": True}, - "permissions": { - "type": "object", - "additionalProperties": False, - "properties": { - "read": {"type": "string", "enum": ["user", "group", "world"]}, - "write": {"type": "string", "enum": ["user", "group", "world"]}, - "group": {"type": "string"}, - }, - }, + "permissions": permissions, # If 'get_full_repo' is promoted to a Package-level # attribute, it could be useful to set it here - "package_attributes": { - "type": "object", - "additionalProperties": False, - "patternProperties": {r"\w+": {}}, - }, + "package_attributes": package_attributes, "providers": { "type": "object", "default": {}, @@ -106,12 +113,40 @@ } }, }, - "variants": { - "oneOf": [ - {"type": "string"}, - {"type": "array", "items": {"type": "string"}}, - ] + "variants": variants, + }, + "deprecatedProperties": { + "properties": ["version"], + "message": "setting version preferences in the 'all' section of packages.yaml " + "is deprecated and will be removed in v0.22\n\n\tThese preferences " + "will be ignored by Spack. You can set them only in package-specific sections " + "of the same file.\n", + "error": False, + }, + } + }, + "patternProperties": { + r"(?!^all$)(^\w[\w-]*)": { # package name + "type": "object", + "default": {}, + "additionalProperties": False, + "properties": { + "require": requirements, + "version": { + "type": "array", + "default": [], + # version strings + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]}, }, + "target": {}, # Here only to warn users on ignored properties + "compiler": {}, # Here only to warn users on ignored properties + "buildable": {"type": "boolean", "default": True}, + "permissions": permissions, + # If 'get_full_repo' is promoted to a Package-level + # attribute, it could be useful to set it here + "package_attributes": package_attributes, + "providers": {}, # Here only to warn users on ignored properties + "variants": variants, "externals": { "type": "array", "items": { @@ -127,6 +162,18 @@ }, }, }, + "deprecatedProperties": { + "properties": ["target", "compiler", "providers"], + "message": "setting 'compiler:', 'target:' or 'provider:' preferences in " + "a package-specific section of packages.yaml is deprecated, and will be " + "removed in v0.22.\n\n\tThese preferences will be ignored by Spack, and " + "can be set only in the 'all' section of the same file. " + "You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, " + "including files:lines where the deprecated attributes are used.\n\n" + "\tUse requirements to enforce conditions on specific packages: " + f"{REQUIREMENT_URL}\n", + "error": False, + }, } }, } diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 270c9f478f2890..4e331d3e77de89 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -712,7 +712,7 @@ def _get_cause_tree( (condition_id, set_id) in which the latter idea means that the condition represented by the former held in the condition set represented by the latter. """ - seen = set(seen) | set(cause) + seen.add(cause) parents = [c for e, c in condition_causes if e == cause and c not in seen] local = "required because %s " % conditions[cause[0]] @@ -811,7 +811,14 @@ def on_model(model): errors = sorted( [(int(priority), msg, args) for priority, msg, *args in error_args], reverse=True ) - msg = self.message(errors) + try: + msg = self.message(errors) + except Exception as e: + msg = ( + f"unexpected error during concretization [{str(e)}]. " + f"Please report a bug at https://github.com/spack/spack/issues" + ) + raise spack.error.SpackError(msg) raise UnsatisfiableSpecError(msg) @@ -1257,32 +1264,9 @@ def compiler_facts(self): matches = sorted(indexed_possible_compilers, key=lambda x: ppk(x[1].spec)) for weight, (compiler_id, cspec) in enumerate(matches): - f = fn.default_compiler_preference(compiler_id, weight) + f = fn.compiler_weight(compiler_id, weight) self.gen.fact(f) - def package_compiler_defaults(self, pkg): - """Facts about packages' compiler prefs.""" - - packages = spack.config.get("packages") - pkg_prefs = packages.get(pkg.name) - if not pkg_prefs or "compiler" not in pkg_prefs: - return - - compiler_list = self.possible_compilers - compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True) - ppk = spack.package_prefs.PackagePrefs(pkg.name, "compiler", all=False) - matches = sorted(compiler_list, key=lambda x: ppk(x.spec)) - - for i, compiler in enumerate(reversed(matches)): - self.gen.fact( - fn.pkg_fact( - pkg.name, - fn.node_compiler_preference( - compiler.spec.name, compiler.spec.version, -i * 100 - ), - ) - ) - def package_requirement_rules(self, pkg): rules = self.requirement_rules_from_package_py(pkg) rules.extend(self.requirement_rules_from_packages_yaml(pkg)) @@ -1374,9 +1358,6 @@ def pkg_rules(self, pkg, tests): # conflicts self.conflict_rules(pkg) - # default compilers for this package - self.package_compiler_defaults(pkg) - # virtuals self.package_provider_rules(pkg) @@ -1672,6 +1653,7 @@ def virtual_preferences(self, pkg_name, func): for i, provider in enumerate(providers): provider_name = spack.spec.Spec(provider).name func(vspec, provider_name, i) + self.gen.newline() def provider_defaults(self): self.gen.h2("Default virtual providers") @@ -1864,8 +1846,8 @@ def preferred_variants(self, pkg_name): fn.variant_default_value_from_packages_yaml(pkg_name, variant.name, value) ) - def target_preferences(self, pkg_name): - key_fn = spack.package_prefs.PackagePrefs(pkg_name, "target") + def target_preferences(self): + key_fn = spack.package_prefs.PackagePrefs("all", "target") if not self.target_specs_cache: self.target_specs_cache = [ @@ -1875,17 +1857,25 @@ def target_preferences(self, pkg_name): package_targets = self.target_specs_cache[:] package_targets.sort(key=key_fn) - - offset = 0 - best_default = self.default_targets[0][1] for i, preferred in enumerate(package_targets): - if str(preferred.architecture.target) == best_default and i != 0: - offset = 100 - self.gen.fact( - fn.pkg_fact( - pkg_name, fn.target_weight(str(preferred.architecture.target), i + offset) - ) - ) + self.gen.fact(fn.target_weight(str(preferred.architecture.target), i)) + + def flag_defaults(self): + self.gen.h2("Compiler flag defaults") + + # types of flags that can be on specs + for flag in spack.spec.FlagMap.valid_compiler_flags(): + self.gen.fact(fn.flag_type(flag)) + self.gen.newline() + + # flags from compilers.yaml + compilers = all_compilers_in_config() + for compiler in compilers: + for name, flags in compiler.flags.items(): + for flag in flags: + self.gen.fact( + fn.compiler_version_flag(compiler.name, compiler.version, name, flag) + ) def spec_clauses(self, *args, **kwargs): """Wrap a call to `_spec_clauses()` into a try/except block that @@ -1938,7 +1928,7 @@ class Head: node_flag = fn.attr("node_flag_set") node_flag_source = fn.attr("node_flag_source") node_flag_propagate = fn.attr("node_flag_propagate") - variant_possible_prop = fn.attr("variant_possible_prop") + variant_propagation_candidate = fn.attr("variant_propagation_candidate") class Body: node = fn.attr("node") @@ -1952,7 +1942,7 @@ class Body: node_flag = fn.attr("node_flag") node_flag_source = fn.attr("node_flag_source") node_flag_propagate = fn.attr("node_flag_propagate") - variant_possible_prop = fn.attr("variant_possible_prop") + variant_propagation_candidate = fn.attr("variant_propagation_candidate") f = Body if body else Head @@ -2001,7 +1991,9 @@ class Body: clauses.append(f.variant_value(spec.name, vname, value)) if variant.propagate: - clauses.append(f.variant_possible_prop(spec.name, vname, value, spec.name)) + clauses.append( + f.variant_propagation_candidate(spec.name, vname, value, spec.name) + ) # Tell the concretizer that this is a possible value for the # variant, to account for things like int/str values where we @@ -2339,6 +2331,8 @@ def target_defaults(self, specs): self.default_targets = list(sorted(set(self.default_targets))) + self.target_preferences() + def virtual_providers(self): self.gen.h2("Virtual providers") msg = ( @@ -2660,7 +2654,6 @@ def setup( self.pkg_rules(pkg, tests=self.tests) self.gen.h2("Package preferences: %s" % pkg) self.preferred_variants(pkg) - self.target_preferences(pkg) self.gen.h1("Develop specs") # Inject dev_path from environment diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 5c7a332d3f1024..d5f24ddc3b33f0 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -589,21 +589,15 @@ possible_provider_weight(DependencyNode, VirtualNode, 0, "external") :- provider(DependencyNode, VirtualNode), external(DependencyNode). -% A provider mentioned in packages.yaml can use a weight -% according to its priority in the list of providers -possible_provider_weight(node(DependencyID, Dependency), node(VirtualID, Virtual), Weight, "packages_yaml") - :- provider(node(DependencyID, Dependency), node(VirtualID, Virtual)), - depends_on(node(ID, Package), node(DependencyID, Dependency)), - pkg_fact(Package, provider_preference(Virtual, Dependency, Weight)). - % A provider mentioned in the default configuration can use a weight % according to its priority in the list of providers -possible_provider_weight(node(DependencyID, Dependency), node(VirtualID, Virtual), Weight, "default") - :- provider(node(DependencyID, Dependency), node(VirtualID, Virtual)), - default_provider_preference(Virtual, Dependency, Weight). +possible_provider_weight(node(ProviderID, Provider), node(VirtualID, Virtual), Weight, "default") + :- provider(node(ProviderID, Provider), node(VirtualID, Virtual)), + default_provider_preference(Virtual, Provider, Weight). % Any provider can use 100 as a weight, which is very high and discourage its use -possible_provider_weight(node(DependencyID, Dependency), VirtualNode, 100, "fallback") :- provider(node(DependencyID, Dependency), VirtualNode). +possible_provider_weight(node(ProviderID, Provider), VirtualNode, 100, "fallback") + :- provider(node(ProviderID, Provider), VirtualNode). % do not warn if generated program contains none of these. #defined virtual/1. @@ -763,27 +757,36 @@ node_has_variant(node(ID, Package), Variant) :- pkg_fact(Package, variant(Variant)), attr("node", node(ID, Package)). -attr("variant_possible_prop", PackageNode, Variant, Value, Source) :- +% Variant propagation is forwarded to dependencies +attr("variant_propagation_candidate", PackageNode, Variant, Value, Source) :- attr("node", PackageNode), depends_on(ParentNode, PackageNode), attr("variant_value", node(_, Source), Variant, Value), - attr("variant_possible_prop", ParentNode, Variant, _, Source). - -attr("variant_propagate", PackageNode, Variant, Value, Source) :- - attr("variant_possible_prop", PackageNode, Variant, Value, Source), - not attr("variant_set", PackageNode, Variant). + attr("variant_propagation_candidate", ParentNode, Variant, _, Source). -attr("variant_value", node(ID, Package), Variant, Value) :- - attr("node", node(ID, Package)), +% If the node is a candidate, and it has the variant and value, +% then those variant and value should be propagated +attr("variant_propagate", node(ID, Package), Variant, Value, Source) :- + attr("variant_propagation_candidate", node(ID, Package), Variant, Value, Source), node_has_variant(node(ID, Package), Variant), - attr("variant_propagate", node(ID, Package), Variant, Value, _), - pkg_fact(Package, variant_possible_value(Variant, Value)). + pkg_fact(Package, variant_possible_value(Variant, Value)), + not attr("variant_set", node(ID, Package), Variant). + +% Propagate the value, if there is the corresponding attribute +attr("variant_value", PackageNode, Variant, Value) :- attr("variant_propagate", PackageNode, Variant, Value, _). +% If a variant is propagated, we cannot have extraneous values (this is for multi valued variants) +variant_is_propagated(PackageNode, Variant) :- attr("variant_propagate", PackageNode, Variant, _, _). +:- variant_is_propagated(PackageNode, Variant), + attr("variant_value", PackageNode, Variant, Value), + not attr("variant_propagate", PackageNode, Variant, Value, _). + +% Cannot receive different values from different sources on the same variant error(100, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, Package, Value1, Value2) :- attr("variant_propagate", node(X, Package), Variant, Value1, Source1), attr("variant_propagate", node(X, Package), Variant, Value2, Source2), node_has_variant(node(X, Package), Variant), - Value1 < Value2. + Value1 < Value2, Source1 < Source2. % a variant cannot be set if it is not a variant on the package error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package) @@ -1063,7 +1066,7 @@ attr("node_target", PackageNode, Target) node_target_weight(node(ID, Package), Weight) :- attr("node", node(ID, Package)), attr("node_target", node(ID, Package), Target), - pkg_fact(Package, target_weight(Target, Weight)). + target_weight(Target, Weight). % compatibility rules for targets among nodes node_target_match(ParentNode, DependencyNode) @@ -1185,23 +1188,17 @@ compiler_mismatch_required(PackageNode, DependencyNode) #defined allow_compiler/2. % compilers weighted by preference according to packages.yaml -compiler_weight(node(ID, Package), Weight) +node_compiler_weight(node(ID, Package), Weight) :- node_compiler(node(ID, Package), CompilerID), compiler_name(CompilerID, Compiler), compiler_version(CompilerID, V), - pkg_fact(Package, node_compiler_preference(Compiler, V, Weight)). -compiler_weight(node(ID, Package), Weight) - :- node_compiler(node(ID, Package), CompilerID), - compiler_name(CompilerID, Compiler), - compiler_version(CompilerID, V), - not pkg_fact(Package, node_compiler_preference(Compiler, V, _)), - default_compiler_preference(CompilerID, Weight). -compiler_weight(node(ID, Package), 100) + compiler_weight(CompilerID, Weight). + +node_compiler_weight(node(ID, Package), 100) :- node_compiler(node(ID, Package), CompilerID), compiler_name(CompilerID, Compiler), compiler_version(CompilerID, V), - not pkg_fact(Package, node_compiler_preference(Compiler, V, _)), - not default_compiler_preference(CompilerID, _). + not compiler_weight(CompilerID, _). % For the time being, be strict and reuse only if the compiler match one we have on the system error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missing_compilers:true if intended.", Package, Compiler, Version) @@ -1209,7 +1206,7 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_miss not node_compiler(node(ID, Package), _). #defined node_compiler_preference/4. -#defined default_compiler_preference/3. +#defined compiler_weight/3. %----------------------------------------------------------------------------- % Compiler flags @@ -1533,7 +1530,7 @@ opt_criterion(15, "non-preferred compilers"). #minimize{ 0@15: #true }. #minimize{ Weight@15+Priority,PackageNode - : compiler_weight(PackageNode, Weight), + : node_compiler_weight(PackageNode, Weight), build_priority(PackageNode, Priority) }. diff --git a/lib/spack/spack/solver/heuristic.lp b/lib/spack/spack/solver/heuristic.lp index 745ea4f9625f17..cc87207047d438 100644 --- a/lib/spack/spack/solver/heuristic.lp +++ b/lib/spack/spack/solver/heuristic.lp @@ -16,9 +16,9 @@ #heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true] #heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true] #heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true] -#heuristic attr("node_target", node(0, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("root", node(0, Package)). [35, true] +#heuristic attr("node_target", node(0, Package), Target) : target_weight(Target, 0), attr("root", node(0, Package)). [35, true] #heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true] -#heuristic node_compiler(node(0, Package), CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true] +#heuristic node_compiler(node(0, Package), CompilerID) : compiler_weight(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true] % Providers #heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true] diff --git a/lib/spack/spack/solver/heuristic_separate.lp b/lib/spack/spack/solver/heuristic_separate.lp index cb4345f3be3245..caa47aa09d84d1 100644 --- a/lib/spack/spack/solver/heuristic_separate.lp +++ b/lib/spack/spack/solver/heuristic_separate.lp @@ -13,7 +13,7 @@ #heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true] #heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true] #heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true] -#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true] +#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true] % node(ID, _), split build dependencies #heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] @@ -21,4 +21,4 @@ #heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] #heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] #heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] -#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] +#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index 20802bbdd80e91..ea9caf7fc031ee 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -4,7 +4,9 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import filecmp import glob +import gzip import io +import json import os import platform import sys @@ -1112,3 +1114,77 @@ def test_tarfile_of_spec_prefix(tmpdir): assert tar.getmember(f"{expected_prefix}/b_directory/file").isreg() assert tar.getmember(f"{expected_prefix}/c_directory").isdir() assert tar.getmember(f"{expected_prefix}/c_directory/file").isreg() + + +@pytest.mark.parametrize("layout,expect_success", [(None, True), (1, True), (2, False)]) +def test_get_valid_spec_file(tmp_path, layout, expect_success): + # Test reading a spec.json file that does not specify a layout version. + spec_dict = Spec("example").to_dict() + path = tmp_path / "spec.json" + effective_layout = layout or 0 # If not specified it should be 0 + + # Add a layout version + if layout is not None: + spec_dict["buildcache_layout_version"] = layout + + # Save to file + with open(path, "w") as f: + json.dump(spec_dict, f) + + try: + spec_dict_disk, layout_disk = bindist._get_valid_spec_file( + str(path), max_supported_layout=1 + ) + assert expect_success + assert spec_dict_disk == spec_dict + assert layout_disk == effective_layout + except bindist.InvalidMetadataFile: + assert not expect_success + + +def test_get_valid_spec_file_doesnt_exist(tmp_path): + with pytest.raises(bindist.InvalidMetadataFile, match="No such file"): + bindist._get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1) + + +def test_get_valid_spec_file_gzipped(tmp_path): + # Create a gzipped file, contents don't matter + path = tmp_path / "spec.json.gz" + with gzip.open(path, "wb") as f: + f.write(b"hello") + with pytest.raises( + bindist.InvalidMetadataFile, match="Compressed spec files are not supported" + ): + bindist._get_valid_spec_file(str(path), max_supported_layout=1) + + +@pytest.mark.parametrize("filename", ["spec.json", "spec.json.sig"]) +def test_get_valid_spec_file_no_json(tmp_path, filename): + tmp_path.joinpath(filename).write_text("not json") + with pytest.raises(bindist.InvalidMetadataFile): + bindist._get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1) + + +def test_download_tarball_with_unsupported_layout_fails(tmp_path, mutable_config, capsys): + layout_version = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + 1 + spec = Spec("gmake@4.4.1%gcc@13.1.0 arch=linux-ubuntu23.04-zen2") + spec._mark_concrete() + spec_dict = spec.to_dict() + spec_dict["buildcache_layout_version"] = layout_version + + # Setup a basic local build cache structure + path = ( + tmp_path / bindist.build_cache_relative_path() / bindist.tarball_name(spec, ".spec.json") + ) + path.parent.mkdir(parents=True) + with open(path, "w") as f: + json.dump(spec_dict, f) + + # Configure as a mirror. + mirror_cmd("add", "test-mirror", str(tmp_path)) + + # Shouldn't be able "download" this. + assert bindist.download_tarball(spec, unsigned=True) is None + + # And there should be a warning about an unsupported layout version. + assert f"Layout version {layout_version} is too new" in capsys.readouterr().err diff --git a/lib/spack/spack/test/cmd/compiler.py b/lib/spack/spack/test/cmd/compiler.py index 9bc2049fdfbe9f..1cea72d3b25ad6 100644 --- a/lib/spack/spack/test/cmd/compiler.py +++ b/lib/spack/spack/test/cmd/compiler.py @@ -4,12 +4,14 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import shutil -import sys import pytest +import spack.cmd.compiler import spack.compilers import spack.main +import spack.spec +import spack.util.pattern import spack.version compiler = spack.main.SpackCommand("compiler") @@ -146,7 +148,7 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable): compilers_before_find = set(spack.compilers.all_compiler_specs()) args = spack.util.pattern.Bunch( - all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None + all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None, mixed_toolchain=False ) spack.cmd.compiler.compiler_find(args) compilers_after_find = set(spack.compilers.all_compiler_specs()) @@ -159,10 +161,15 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable): @pytest.mark.not_on_windows("Cannot execute bash script on Windows") @pytest.mark.regression("17590") -def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir): +@pytest.mark.parametrize("mixed_toolchain", [True, False]) +def test_compiler_find_mixed_suffixes( + mixed_toolchain, no_compilers_yaml, working_env, compilers_dir +): """Ensure that we'll mix compilers with different suffixes when necessary.""" os.environ["PATH"] = str(compilers_dir) - output = compiler("find", "--scope=site") + output = compiler( + "find", "--scope=site", "--mixed-toolchain" if mixed_toolchain else "--no-mixed-toolchain" + ) assert "clang@11.0.0" in output assert "gcc@8.4.0" in output @@ -176,9 +183,8 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_ assert clang["paths"] == { "cc": str(compilers_dir / "clang"), "cxx": str(compilers_dir / "clang++"), - # we only auto-detect mixed clang on macos - "f77": gfortran_path if sys.platform == "darwin" else None, - "fc": gfortran_path if sys.platform == "darwin" else None, + "f77": gfortran_path if mixed_toolchain else None, + "fc": gfortran_path if mixed_toolchain else None, } assert gcc["paths"] == { diff --git a/lib/spack/spack/test/cmd/config.py b/lib/spack/spack/test/cmd/config.py index 4f3d5afe770e0c..7247ce97531dcd 100644 --- a/lib/spack/spack/test/cmd/config.py +++ b/lib/spack/spack/test/cmd/config.py @@ -215,10 +215,10 @@ def test_config_add_override_leaf(mutable_empty_config): def test_config_add_update_dict(mutable_empty_config): - config("add", "packages:all:version:[1.0.0]") + config("add", "packages:hdf5:version:[1.0.0]") output = config("get", "packages") - expected = "packages:\n all:\n version: [1.0.0]\n" + expected = "packages:\n hdf5:\n version: [1.0.0]\n" assert output == expected @@ -352,8 +352,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir): contents = """spack: packages: all: - version: - - 1.0.0 + target: [x86_64] """ # create temp file and add it to config @@ -368,8 +367,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir): # added config comes before prior config expected = """packages: all: - version: - - 1.0.0 + target: [x86_64] compiler: [gcc] """ @@ -381,7 +379,7 @@ def test_config_add_invalid_file_fails(tmpdir): # invalid because version requires a list contents = """spack: packages: - all: + hdf5: version: 1.0.0 """ @@ -631,14 +629,11 @@ def test_config_prefer_upstream( packages = syaml.load(open(cfg_file))["packages"] # Make sure only the non-default variants are set. - assert packages["boost"] == { - "compiler": ["gcc@=10.2.1"], - "variants": "+debug +graph", - "version": ["1.63.0"], - } - assert packages["dependency-install"] == {"compiler": ["gcc@=10.2.1"], "version": ["2.0"]} + assert packages["all"] == {"compiler": ["gcc@=10.2.1"]} + assert packages["boost"] == {"variants": "+debug +graph", "version": ["1.63.0"]} + assert packages["dependency-install"] == {"version": ["2.0"]} # Ensure that neither variant gets listed for hdf5, since they conflict - assert packages["hdf5"] == {"compiler": ["gcc@=10.2.1"], "version": ["2.3"]} + assert packages["hdf5"] == {"version": ["2.3"]} # Make sure a message about the conflicting hdf5's was given. assert "- hdf5" in output diff --git a/lib/spack/spack/test/cmd/deconcretize.py b/lib/spack/spack/test/cmd/deconcretize.py new file mode 100644 index 00000000000000..30e39604bf4d4d --- /dev/null +++ b/lib/spack/spack/test/cmd/deconcretize.py @@ -0,0 +1,78 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import pytest + +import spack.environment as ev +from spack.main import SpackCommand, SpackCommandError + +deconcretize = SpackCommand("deconcretize") + + +@pytest.fixture(scope="function") +def test_env(mutable_mock_env_path, config, mock_packages): + ev.create("test") + with ev.read("test") as e: + e.add("a@2.0 foobar=bar ^b@1.0") + e.add("a@1.0 foobar=bar ^b@0.9") + e.concretize() + e.write() + + +def test_deconcretize_dep(test_env): + with ev.read("test") as e: + deconcretize("-y", "b@1.0") + specs = [s for s, _ in e.concretized_specs()] + + assert len(specs) == 1 + assert specs[0].satisfies("a@1.0") + + +def test_deconcretize_all_dep(test_env): + with ev.read("test") as e: + with pytest.raises(SpackCommandError): + deconcretize("-y", "b") + deconcretize("-y", "--all", "b") + specs = [s for s, _ in e.concretized_specs()] + + assert len(specs) == 0 + + +def test_deconcretize_root(test_env): + with ev.read("test") as e: + output = deconcretize("-y", "--root", "b@1.0") + assert "No matching specs to deconcretize" in output + assert len(e.concretized_order) == 2 + + deconcretize("-y", "--root", "a@2.0") + specs = [s for s, _ in e.concretized_specs()] + + assert len(specs) == 1 + assert specs[0].satisfies("a@1.0") + + +def test_deconcretize_all_root(test_env): + with ev.read("test") as e: + with pytest.raises(SpackCommandError): + deconcretize("-y", "--root", "a") + + output = deconcretize("-y", "--root", "--all", "b") + assert "No matching specs to deconcretize" in output + assert len(e.concretized_order) == 2 + + deconcretize("-y", "--root", "--all", "a") + specs = [s for s, _ in e.concretized_specs()] + + assert len(specs) == 0 + + +def test_deconcretize_all(test_env): + with ev.read("test") as e: + with pytest.raises(SpackCommandError): + deconcretize() + deconcretize("-y", "--all") + specs = [s for s, _ in e.concretized_specs()] + + assert len(specs) == 0 diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index a06fdbd8cf8b2f..3fd40867eb7133 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -53,6 +53,7 @@ stage = SpackCommand("stage") uninstall = SpackCommand("uninstall") find = SpackCommand("find") +module = SpackCommand("module") sep = os.sep @@ -284,7 +285,7 @@ def setup_error(pkg, env): _, err = capfd.readouterr() assert "cmake-client had issues!" in err - assert "Warning: couldn't load runtime environment" in err + assert "Warning: could not load runtime environment" in err def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch, monkeypatch): @@ -502,12 +503,12 @@ def test_env_activate_broken_view( # test that Spack detects the missing package and fails gracefully with spack.repo.use_repositories(mock_custom_repository): wrong_repo = env("activate", "--sh", "test") - assert "Warning: couldn't load runtime environment" in wrong_repo + assert "Warning: could not load runtime environment" in wrong_repo assert "Unknown namespace: builtin.mock" in wrong_repo # test replacing repo fixes it normal_repo = env("activate", "--sh", "test") - assert "Warning: couldn't load runtime environment" not in normal_repo + assert "Warning: could not load runtime environment" not in normal_repo assert "Unknown namespace: builtin.mock" not in normal_repo @@ -1105,13 +1106,14 @@ def test_multi_env_remove(mutable_mock_env_path, monkeypatch, answer): assert all(e in env("list") for e in environments) -def test_env_loads(install_mockery, mock_fetch): +def test_env_loads(install_mockery, mock_fetch, mock_modules_root): env("create", "test") with ev.read("test"): add("mpileaks") concretize() install("--fake") + module("tcl", "refresh", "-y") with ev.read("test"): env("loads") @@ -2621,7 +2623,7 @@ def test_env_write_only_non_default_nested(tmpdir): - matrix: - [mpileaks] packages: - mpileaks: + all: compiler: [gcc] view: true """ diff --git a/lib/spack/spack/test/cmd/info.py b/lib/spack/spack/test/cmd/info.py index c4528f9852e284..9493e1d17fef8b 100644 --- a/lib/spack/spack/test/cmd/info.py +++ b/lib/spack/spack/test/cmd/info.py @@ -25,7 +25,7 @@ def parser(): def print_buffer(monkeypatch): buffer = [] - def _print(*args): + def _print(*args, **kwargs): buffer.extend(args) monkeypatch.setattr(spack.cmd.info.color, "cprint", _print, raising=False) @@ -33,10 +33,11 @@ def _print(*args): @pytest.mark.parametrize( - "pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk"] # a BundlePackage + "pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk", "gasnet", "warpx"] ) -def test_it_just_runs(pkg): - info(pkg) +@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]]) +def test_it_just_runs(pkg, extra_args): + info(pkg, *extra_args) def test_info_noversion(mock_packages, print_buffer): @@ -78,7 +79,8 @@ def test_is_externally_detectable(pkg_query, expected, parser, print_buffer): "gcc", # This should ensure --test's c_names processing loop covered ], ) -def test_info_fields(pkg_query, parser, print_buffer): +@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]]) +def test_info_fields(pkg_query, extra_args, parser, print_buffer): expected_fields = ( "Description:", "Homepage:", @@ -91,7 +93,7 @@ def test_info_fields(pkg_query, parser, print_buffer): "Licenses:", ) - args = parser.parse_args(["--all", pkg_query]) + args = parser.parse_args(["--all", pkg_query] + extra_args) spack.cmd.info.info(parser, args) for text in expected_fields: diff --git a/lib/spack/spack/test/compilers/basics.py b/lib/spack/spack/test/compilers/basics.py index 512defb195370f..910c9e87d9335f 100644 --- a/lib/spack/spack/test/compilers/basics.py +++ b/lib/spack/spack/test/compilers/basics.py @@ -422,7 +422,6 @@ def test_clang_flags(): "-gdwarf-5", "-gline-tables-only", "-gmodules", - "-gz", "-g", ], "clang@3.3", @@ -445,7 +444,6 @@ def test_aocc_flags(): "-gdwarf-5", "-gline-tables-only", "-gmodules", - "-gz", "-g", ], "aocc@2.2.0", diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index d3815d367424ed..d19884b67a53ac 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -462,45 +462,32 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): @pytest.mark.only_clingo( "Optional compiler propagation isn't deprecated for original concretizer" ) - @pytest.mark.parametrize("unify", [True, False, "when_possible"]) - def test_concretize_environment_propagated_disabled_variant( - self, unify, tmpdir, mutable_mock_env_path - ): - """Ensure that variants are propagated in a concrete environment""" - path = tmpdir.join("spack.yaml") - - with tmpdir.as_cwd(): - with open(str(path), "w") as f: - f.write( - """\ -spack: - specs: - - ascent ~~shared +adios2 -""" - ) - - SpackCommand("env")("create", "test", str(path)) - - test = spack.environment.read("test") - test.unify = unify - test.concretize() - - for spec in test.specs_by_hash.values(): - for dep in spec.dependencies(): - if dep.name == "adios2": - assert dep.satisfies("~shared") - assert dep.satisfies("^bzip2 ~shared") - - @pytest.mark.only_clingo( - "Optional compiler propagation isn't deprecated for original concretizer" + @pytest.mark.parametrize( + "spec_str,expected_propagation", + [ + ("hypre~~shared ^openblas+shared", [("hypre", "~shared"), ("openblas", "+shared")]), + # Propagates past a node that doesn't have the variant + ("hypre~~shared ^openblas", [("hypre", "~shared"), ("openblas", "~shared")]), + ( + "ascent~~shared +adios2", + [("ascent", "~shared"), ("adios2", "~shared"), ("bzip2", "~shared")], + ), + # Propagates below a node that uses the other value explicitly + ( + "ascent~~shared +adios2 ^adios2+shared", + [("ascent", "~shared"), ("adios2", "+shared"), ("bzip2", "~shared")], + ), + ( + "ascent++shared +adios2 ^adios2~shared", + [("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "+shared")], + ), + ], ) - def test_concretize_propagate_disabled_variant(self): - """Test a package variant value was passed from its parent.""" - spec = Spec("ascent~~shared +adios2") - spec.concretize() - - for dep in spec.traverse(): - assert dep.satisfies("~shared") + def test_concretize_propagate_disabled_variant(self, spec_str, expected_propagation): + """Tests various patterns of boolean variant propagation""" + spec = Spec(spec_str).concretized() + for key, expected_satisfies in expected_propagation: + spec[key].satisfies(expected_satisfies) @pytest.mark.only_clingo( "Optional compiler propagation isn't deprecated for original concretizer" @@ -518,13 +505,11 @@ def test_concretize_propagated_variant_is_not_passed_to_dependent(self): ) def test_concretize_propagate_specified_variant(self): """Test that only the specified variant is propagated to the dependencies""" - spec = Spec("splice-b++bar") + spec = Spec("parent-foo-bar ~~foo") spec.concretize() - assert spec.satisfies("+bar") and spec.satisfies("^splice-a+bar") - assert spec.satisfies("+bar") and spec.satisfies("^splice-z+bar") - assert spec.satisfies("+foo") and not spec.satisfies("^splice-a+foo") - assert spec.satisfies("+foo") and not spec.satisfies("^splice-z+foo") + assert spec.satisfies("~foo") and spec.satisfies("^dependency-foo-bar~foo") + assert spec.satisfies("+bar") and not spec.satisfies("^dependency-foo-bar+bar") @pytest.mark.only_clingo("Original concretizer is allowed to forego variant propagation") def test_concretize_propagate_multivalue_variant(self): diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py index 20d0e1ae91ad84..d061f9a8f555c5 100644 --- a/lib/spack/spack/test/concretize_preferences.py +++ b/lib/spack/spack/test/concretize_preferences.py @@ -105,17 +105,13 @@ def test_preferred_variants_from_wildcard(self): @pytest.mark.parametrize( "compiler_str,spec_str", - [("gcc@4.5.0", "mpileaks"), ("clang@12.0.0", "mpileaks"), ("gcc@4.5.0", "openmpi")], + [("gcc@=4.5.0", "mpileaks"), ("clang@=12.0.0", "mpileaks"), ("gcc@=4.5.0", "openmpi")], ) def test_preferred_compilers(self, compiler_str, spec_str): """Test preferred compilers are applied correctly""" - spec = Spec(spec_str) - update_packages(spec.name, "compiler", [compiler_str]) - spec.concretize() - # note: lhs has concrete compiler version, rhs still abstract. - # Could be made more strict by checking for equality with `gcc@=4.5.0` - # etc. - assert spec.compiler.satisfies(CompilerSpec(compiler_str)) + update_packages("all", "compiler", [compiler_str]) + spec = spack.spec.Spec(spec_str).concretized() + assert spec.compiler == CompilerSpec(compiler_str) @pytest.mark.only_clingo("Use case not supported by the original concretizer") def test_preferred_target(self, mutable_mock_repo): @@ -124,7 +120,7 @@ def test_preferred_target(self, mutable_mock_repo): default = str(spec.target) preferred = str(spec.target.family) - update_packages("mpich", "target", [preferred]) + update_packages("all", "target", [preferred]) spec = concretize("mpich") assert str(spec.target) == preferred @@ -132,7 +128,7 @@ def test_preferred_target(self, mutable_mock_repo): assert str(spec["mpileaks"].target) == preferred assert str(spec["mpich"].target) == preferred - update_packages("mpileaks", "target", [default]) + update_packages("all", "target", [default]) spec = concretize("mpileaks") assert str(spec["mpileaks"].target) == default assert str(spec["mpich"].target) == default diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index f7bf7d75698cd1..5f544a31296a68 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -78,7 +78,7 @@ def env_yaml(tmpdir): verify_ssl: False dirty: False packages: - libelf: + all: compiler: [ 'gcc@4.5.3' ] repos: - /x/y/z @@ -942,7 +942,7 @@ def test_single_file_scope(config, env_yaml): # from the single-file config assert spack.config.get("config:verify_ssl") is False assert spack.config.get("config:dirty") is False - assert spack.config.get("packages:libelf:compiler") == ["gcc@4.5.3"] + assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3"] # from the lower config scopes assert spack.config.get("config:checksum") is True @@ -965,7 +965,7 @@ def test_single_file_scope_section_override(tmpdir, config): config: verify_ssl: False packages:: - libelf: + all: compiler: [ 'gcc@4.5.3' ] repos: - /x/y/z @@ -977,7 +977,7 @@ def test_single_file_scope_section_override(tmpdir, config): with spack.config.override(scope): # from the single-file config assert spack.config.get("config:verify_ssl") is False - assert spack.config.get("packages:libelf:compiler") == ["gcc@4.5.3"] + assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3"] # from the lower config scopes assert spack.config.get("config:checksum") is True diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 514b1e91542403..fb7608a56bd377 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -6,6 +6,7 @@ import collections import datetime import errno +import functools import inspect import itertools import json @@ -1967,3 +1968,14 @@ def __exit__(self, *args): pass monkeypatch.setattr(spack.cmd.buildcache, "_make_pool", MockPool) + + +def _root_path(x, y, *, path): + return path + + +@pytest.fixture +def mock_modules_root(tmp_path, monkeypatch): + """Sets the modules root to a temporary directory, to avoid polluting configuration scopes.""" + fn = functools.partial(_root_path, path=str(tmp_path)) + monkeypatch.setattr(spack.modules.common, "root_path", fn) diff --git a/lib/spack/spack/test/data/config/modules.yaml b/lib/spack/spack/test/data/config/modules.yaml index 28e2ec91b3d962..f217dd7eaf3379 100644 --- a/lib/spack/spack/test/data/config/modules.yaml +++ b/lib/spack/spack/test/data/config/modules.yaml @@ -14,12 +14,7 @@ # ~/.spack/modules.yaml # ------------------------------------------------------------------------- modules: - default: - enable: - - tcl - roots: - tcl: $user_cache_path/tcl - lmod: $user_cache_path/lmod + default: {} prefix_inspections: bin: - PATH diff --git a/lib/spack/spack/test/data/modules/lmod/blacklist.yaml b/lib/spack/spack/test/data/modules/lmod/blacklist.yaml deleted file mode 100644 index 8c88214380f28b..00000000000000 --- a/lib/spack/spack/test/data/modules/lmod/blacklist.yaml +++ /dev/null @@ -1,14 +0,0 @@ -# DEPRECATED: remove this in v0.20 -# See `exclude.yaml` for the new syntax -enable: - - lmod -lmod: - core_compilers: - - 'clang@3.3' - hierarchy: - - mpi - blacklist: - - callpath - - all: - autoload: direct diff --git a/lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml b/lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml deleted file mode 100644 index 997501e08ba454..00000000000000 --- a/lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml +++ /dev/null @@ -1,30 +0,0 @@ -# DEPRECATED: remove this in v0.20 -# See `alter_environment.yaml` for the new syntax -enable: - - lmod -lmod: - core_compilers: - - 'clang@3.3' - - hierarchy: - - mpi - - all: - autoload: none - filter: - environment_blacklist: - - CMAKE_PREFIX_PATH - environment: - set: - '{name}_ROOT': '{prefix}' - - 'platform=test target=x86_64': - environment: - set: - FOO: 'foo' - unset: - - BAR - - 'platform=test target=core2': - load: - - 'foo/bar' diff --git a/lib/spack/spack/test/data/modules/tcl/blacklist.yaml b/lib/spack/spack/test/data/modules/tcl/blacklist.yaml deleted file mode 100644 index 4ffeb135e95ebe..00000000000000 --- a/lib/spack/spack/test/data/modules/tcl/blacklist.yaml +++ /dev/null @@ -1,12 +0,0 @@ -# DEPRECATED: remove this in v0.20 -# See `exclude.yaml` for the new syntax -enable: - - tcl -tcl: - whitelist: - - zmpi - blacklist: - - callpath - - mpi - all: - autoload: direct diff --git a/lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml b/lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml deleted file mode 100644 index 128200d6ec6f87..00000000000000 --- a/lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# DEPRECATED: remove this in v0.20 -# See `alter_environment.yaml` for the new syntax -enable: - - tcl -tcl: - all: - autoload: none - filter: - environment_blacklist: - - CMAKE_PREFIX_PATH - environment: - set: - '{name}_ROOT': '{prefix}' - - 'platform=test target=x86_64': - environment: - set: - FOO: 'foo' - OMPI_MCA_mpi_leave_pinned: '1' - unset: - - BAR - - 'platform=test target=core2': - load: - - 'foo/bar' diff --git a/lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml deleted file mode 100644 index b49bc80b5e82a7..00000000000000 --- a/lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml +++ /dev/null @@ -1,8 +0,0 @@ -# DEPRECATED: remove this in v0.20 -# See `exclude_implicits.yaml` for the new syntax -enable: - - tcl -tcl: - blacklist_implicits: true - all: - autoload: direct diff --git a/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml b/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml index b03f966c7c1509..75b4cd09d2ec42 100644 --- a/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml +++ b/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml @@ -4,7 +4,7 @@ tcl: all: autoload: none filter: - environment_blacklist: + exclude_env_vars: - CMAKE_PREFIX_PATH environment: set: diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 3033370ac6f191..ee3e5da81ef679 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -803,6 +803,14 @@ def test_query_spec_with_non_conditional_virtual_dependency(database): assert len(results) == 1 +def test_query_virtual_spec(database): + """Make sure we can query for virtuals in the DB""" + results = spack.store.STORE.db.query_local("mpi") + assert len(results) == 3 + names = [s.name for s in results] + assert all(name in names for name in ["mpich", "mpich2", "zmpi"]) + + def test_failed_spec_path_error(database): """Ensure spec not concrete check is covered.""" s = spack.spec.Spec("a") diff --git a/lib/spack/spack/test/llnl/util/lock.py b/lib/spack/spack/test/llnl/util/lock.py index 9e7f3a3bde31b5..a17e5c94d9a5b8 100644 --- a/lib/spack/spack/test/llnl/util/lock.py +++ b/lib/spack/spack/test/llnl/util/lock.py @@ -18,7 +18,7 @@ mpirun -n 7 spack test lock And it will test locking correctness among MPI processes. Ideally, you -want the MPI processes to span across multiple nodes, so, e.g., for SLURM +want the MPI processes to span across multiple nodes, so, e.g., for Slurm you might do this:: srun -N 7 -n 7 -m cyclic spack test lock diff --git a/lib/spack/spack/test/modules/common.py b/lib/spack/spack/test/modules/common.py index 15656dff259671..906c1d5c2a134c 100644 --- a/lib/spack/spack/test/modules/common.py +++ b/lib/spack/spack/test/modules/common.py @@ -14,11 +14,13 @@ import spack.package_base import spack.schema.modules import spack.spec -import spack.util.spack_yaml as syaml from spack.modules.common import UpstreamModuleIndex from spack.spec import Spec -pytestmark = pytest.mark.not_on_windows("does not run on windows") +pytestmark = [ + pytest.mark.not_on_windows("does not run on windows"), + pytest.mark.usefixtures("mock_modules_root"), +] def test_update_dictionary_extending_list(): @@ -175,6 +177,7 @@ def test_load_installed_package_not_in_repo(install_mockery, mock_fetch, monkeyp """Test that installed packages that have been removed are still loadable""" spec = Spec("trivial-install-test-package").concretized() spec.package.do_install() + spack.modules.module_types["tcl"](spec, "default", True).write() def find_nothing(*args): raise spack.repo.UnknownPackageError("Repo package access is disabled for test") @@ -191,26 +194,6 @@ def find_nothing(*args): spack.package_base.PackageBase.uninstall_by_spec(spec) -@pytest.mark.parametrize( - "module_type, old_config,new_config", - [("tcl", "exclude_implicits.yaml", "hide_implicits.yaml")], -) -def test_exclude_include_update(module_type, old_config, new_config): - module_test_data_root = os.path.join(spack.paths.test_path, "data", "modules", module_type) - with open(os.path.join(module_test_data_root, old_config)) as f: - old_yaml = syaml.load(f) - with open(os.path.join(module_test_data_root, new_config)) as f: - new_yaml = syaml.load(f) - - # ensure file that needs updating is translated to the right thing. - assert spack.schema.modules.update_keys(old_yaml, spack.schema.modules.old_to_new_key) - assert new_yaml == old_yaml - # ensure a file that doesn't need updates doesn't get updated - original_new_yaml = new_yaml.copy() - assert not spack.schema.modules.update_keys(new_yaml, spack.schema.modules.old_to_new_key) - assert original_new_yaml == new_yaml - - @pytest.mark.regression("37649") def test_check_module_set_name(mutable_config): """Tests that modules set name are validated correctly and an error is reported if the diff --git a/lib/spack/spack/test/modules/conftest.py b/lib/spack/spack/test/modules/conftest.py index 210a88a65f8ad3..12ee5c1fcd9443 100644 --- a/lib/spack/spack/test/modules/conftest.py +++ b/lib/spack/spack/test/modules/conftest.py @@ -2,6 +2,8 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import pathlib + import pytest import spack.config @@ -13,26 +15,15 @@ @pytest.fixture() def modulefile_content(request): - """Returns a function that generates the content of a module file - as a list of lines. - """ - + """Returns a function that generates the content of a module file as a list of lines.""" writer_cls = getattr(request.module, "writer_cls") def _impl(spec_str, module_set_name="default", explicit=True): - # Write the module file - spec = spack.spec.Spec(spec_str) - spec.concretize() + spec = spack.spec.Spec(spec_str).concretized() generator = writer_cls(spec, module_set_name, explicit) generator.write(overwrite=True) - - # Get its filename - filename = generator.layout.filename - - # Retrieve the content - with open(filename) as f: - content = f.readlines() - content = "".join(content).split("\n") + written_module = pathlib.Path(generator.layout.filename) + content = written_module.read_text().splitlines() generator.remove() return content @@ -40,27 +31,21 @@ def _impl(spec_str, module_set_name="default", explicit=True): @pytest.fixture() -def factory(request): - """Function that, given a spec string, returns an instance of the writer - and the corresponding spec. - """ - - # Class of the module file writer +def factory(request, mock_modules_root): + """Given a spec string, returns an instance of the writer and the corresponding spec.""" writer_cls = getattr(request.module, "writer_cls") def _mock(spec_string, module_set_name="default", explicit=True): - spec = spack.spec.Spec(spec_string) - spec.concretize() + spec = spack.spec.Spec(spec_string).concretized() return writer_cls(spec, module_set_name, explicit), spec return _mock @pytest.fixture() -def mock_module_filename(monkeypatch, tmpdir): - filename = str(tmpdir.join("module")) +def mock_module_filename(monkeypatch, tmp_path): + filename = tmp_path / "module" # Set for both module types so we can test both - monkeypatch.setattr(spack.modules.lmod.LmodFileLayout, "filename", filename) - monkeypatch.setattr(spack.modules.tcl.TclFileLayout, "filename", filename) - - yield filename + monkeypatch.setattr(spack.modules.lmod.LmodFileLayout, "filename", str(filename)) + monkeypatch.setattr(spack.modules.tcl.TclFileLayout, "filename", str(filename)) + yield str(filename) diff --git a/lib/spack/spack/test/modules/lmod.py b/lib/spack/spack/test/modules/lmod.py index acaae90f696c8e..35c3f3cd97a5db 100644 --- a/lib/spack/spack/test/modules/lmod.py +++ b/lib/spack/spack/test/modules/lmod.py @@ -21,7 +21,10 @@ #: Class of the writer tested in this module writer_cls = spack.modules.lmod.LmodModulefileWriter -pytestmark = pytest.mark.not_on_windows("does not run on windows") +pytestmark = [ + pytest.mark.not_on_windows("does not run on windows"), + pytest.mark.usefixtures("mock_modules_root"), +] @pytest.fixture(params=["clang@=12.0.0", "gcc@=10.2.1"]) diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py index 00460b6796b9a1..e2f1235db0083b 100644 --- a/lib/spack/spack/test/modules/tcl.py +++ b/lib/spack/spack/test/modules/tcl.py @@ -18,7 +18,10 @@ #: Class of the writer tested in this module writer_cls = spack.modules.tcl.TclModulefileWriter -pytestmark = pytest.mark.not_on_windows("does not run on windows") +pytestmark = [ + pytest.mark.not_on_windows("does not run on windows"), + pytest.mark.usefixtures("mock_modules_root"), +] @pytest.mark.usefixtures("config", "mock_packages", "mock_module_filename") @@ -279,7 +282,7 @@ def test_projections_all(self, factory, module_configuration): projection = writer.spec.format(writer.conf.projections["all"]) assert projection in writer.layout.use_name - def test_invalid_naming_scheme(self, factory, module_configuration, mock_module_filename): + def test_invalid_naming_scheme(self, factory, module_configuration): """Tests the evaluation of an invalid naming scheme.""" module_configuration("invalid_naming_scheme") @@ -290,7 +293,7 @@ def test_invalid_naming_scheme(self, factory, module_configuration, mock_module_ with pytest.raises(RuntimeError): writer.layout.use_name - def test_invalid_token_in_env_name(self, factory, module_configuration, mock_module_filename): + def test_invalid_token_in_env_name(self, factory, module_configuration): """Tests setting environment variables with an invalid name.""" module_configuration("invalid_token_in_env_var_name") @@ -425,40 +428,38 @@ def test_extend_context(self, modulefile_content, module_configuration): @pytest.mark.regression("4400") @pytest.mark.db - @pytest.mark.parametrize("config_name", ["hide_implicits", "exclude_implicits"]) - def test_hide_implicits_no_arg(self, module_configuration, database, config_name): - module_configuration(config_name) + def test_hide_implicits_no_arg(self, module_configuration, database): + module_configuration("exclude_implicits") # mpileaks has been installed explicitly when setting up # the tests database mpileaks_specs = database.query("mpileaks") for item in mpileaks_specs: writer = writer_cls(item, "default") - assert not writer.conf.hidden + assert not writer.conf.excluded # callpath is a dependency of mpileaks, and has been pulled # in implicitly callpath_specs = database.query("callpath") for item in callpath_specs: writer = writer_cls(item, "default") - assert writer.conf.hidden + assert writer.conf.excluded @pytest.mark.regression("12105") - @pytest.mark.parametrize("config_name", ["hide_implicits", "exclude_implicits"]) - def test_hide_implicits_with_arg(self, module_configuration, config_name): - module_configuration(config_name) + def test_hide_implicits_with_arg(self, module_configuration): + module_configuration("exclude_implicits") # mpileaks is defined as explicit with explicit argument set on writer mpileaks_spec = spack.spec.Spec("mpileaks") mpileaks_spec.concretize() writer = writer_cls(mpileaks_spec, "default", True) - assert not writer.conf.hidden + assert not writer.conf.excluded # callpath is defined as implicit with explicit argument set on writer callpath_spec = spack.spec.Spec("callpath") callpath_spec.concretize() writer = writer_cls(callpath_spec, "default", False) - assert writer.conf.hidden + assert writer.conf.excluded @pytest.mark.regression("9624") @pytest.mark.db diff --git a/lib/spack/spack/user_environment.py b/lib/spack/spack/user_environment.py index 5d1561a8eaedb7..6e1c798e51dc1c 100644 --- a/lib/spack/spack/user_environment.py +++ b/lib/spack/spack/user_environment.py @@ -11,6 +11,7 @@ import spack.build_environment import spack.config +import spack.error import spack.spec import spack.util.environment as environment import spack.util.prefix as prefix diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index 3aea141d875d7c..36c7e73e0638ae 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -330,8 +330,11 @@ def add_extra_search_paths(paths): for candidate_item in candidate_items: for directory in search_paths: exe = directory / candidate_item - if exe.is_file() and os.access(str(exe), os.X_OK): - return str(exe) + try: + if exe.is_file() and os.access(str(exe), os.X_OK): + return str(exe) + except OSError: + pass if required: raise CommandNotFoundError("spack requires '%s'. Make sure it is in your path." % args[0]) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 880aeb6811a1d0..e73c492c9f01bf 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -894,16 +894,16 @@ e4s-cray-rhel-build: variables: SPACK_CI_STACK_NAME: e4s-cray-sles -# e4s-cray-sles-generate: -# extends: [ ".generate-cray-sles", ".e4s-cray-sles" ] +e4s-cray-sles-generate: + extends: [ ".generate-cray-sles", ".e4s-cray-sles" ] -# e4s-cray-sles-build: -# extends: [ ".build", ".e4s-cray-sles" ] -# trigger: -# include: -# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml -# job: e4s-cray-sles-generate -# strategy: depend -# needs: -# - artifacts: True -# job: e4s-cray-sles-generate +e4s-cray-sles-build: + extends: [ ".build", ".e4s-cray-sles" ] + trigger: + include: + - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml + job: e4s-cray-sles-generate + strategy: depend + needs: + - artifacts: True + job: e4s-cray-sles-generate diff --git a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml index 29dc993a15578c..f1f77c20b0286b 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml +++ b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml @@ -4,6 +4,7 @@ ci: broken-tests-packages: - gptune - superlu-dist # srun -n 4 hangs + - papyrus broken-specs-url: "https://dummy.io" # s3://spack-binaries/broken-specs" diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 8c872240f9e608..88df9e5e8f37ef 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -197,14 +197,18 @@ spack: - amrex +sycl - arborx +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples - cabana +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples + - ginkgo +sycl + - heffte +sycl - kokkos +sycl +openmp cxxstd=17 +tests +examples - kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp cxxstd=17 +tests +examples + - petsc +sycl - slate +sycl - sundials +sycl cxxstd=17 +examples-install - tau +mpi +opencl +level_zero ~pdt +syscall # tau: requires libdrm.so to be installed + - upcxx +level_zero # -- - # - ginkgo +oneapi # InstallError: Ginkgo's oneAPI backend requires theDPC++ compiler as main CXX compiler. # - hpctoolkit +level_zero # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc + # - warpx compute=sycl # warpx: spack-build-wzp6vvo/_deps/fetchedamrex-src/Src/Base/AMReX_RandomEngine.H:18:10: fatal error: 'oneapi/mkl/rng/device.hpp' file not found - py-scipy diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 1ff435bc9bfb7c..c320442cbe0f84 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -8,26 +8,27 @@ spack: definitions: - gcc_system_packages: - matrix: - - - gmake - - gmake@4.3 - - gmake@4.3 cflags=-O3 + - - zlib-ng + - zlib-ng@2.0.7 + - zlib-ng@2.0.7 cflags=-O3 - tcl - - tcl ^gmake@4.3 cflags=-O3 + - tcl ^zlib-ng@2.0.7 cflags=-O3 - hdf5 - hdf5~mpi - hdf5+hl+mpi ^mpich - trilinos - trilinos +hdf5 ^hdf5+hl+mpi ^mpich - - gcc@12 + - gcc@12.3.0 - mpileaks - - lmod + - lmod@8.7.18 + - environment-modules - macsio@1.1+scr ^scr@2.0.0~fortran ^silo~fortran ^hdf5~fortran - ['%gcc@11'] - gcc_old_packages: - - gmake%gcc@10 + - zlib-ng%gcc@10 - clang_packages: - matrix: - - [gmake, tcl ^gmake@4.3] + - [zlib-ng, tcl ^zlib-ng@2.0.7] - ['%clang@14'] - gcc_spack_built_packages: - matrix: diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 91ed9dd1728d88..a54f7db414e409 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -401,7 +401,7 @@ _spack() { then SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -b --bootstrap -p --profile --sorted-profile --lines -v --verbose --stacktrace --backtrace -V --version --print-shell-vars" else - SPACK_COMPREPLY="add arch audit blame bootstrap build-env buildcache cd change checksum ci clean clone commands compiler compilers concretize concretise config containerize containerise create debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers make-installer mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view" + SPACK_COMPREPLY="add arch audit blame bootstrap build-env buildcache cd change checksum ci clean clone commands compiler compilers concretize concretise config containerize containerise create debug deconcretize dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers make-installer mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view" fi } @@ -756,7 +756,7 @@ _spack_compiler() { _spack_compiler_find() { if $list_options then - SPACK_COMPREPLY="-h --help --scope" + SPACK_COMPREPLY="-h --help --mixed-toolchain --no-mixed-toolchain --scope" else SPACK_COMPREPLY="" fi @@ -765,7 +765,7 @@ _spack_compiler_find() { _spack_compiler_add() { if $list_options then - SPACK_COMPREPLY="-h --help --scope" + SPACK_COMPREPLY="-h --help --mixed-toolchain --no-mixed-toolchain --scope" else SPACK_COMPREPLY="" fi @@ -937,6 +937,15 @@ _spack_debug_report() { SPACK_COMPREPLY="-h --help" } +_spack_deconcretize() { + if $list_options + then + SPACK_COMPREPLY="-h --help --root -y --yes-to-all -a --all" + else + _all_packages + fi +} + _spack_dependencies() { if $list_options then @@ -1267,7 +1276,7 @@ _spack_help() { _spack_info() { if $list_options then - SPACK_COMPREPLY="-h --help -a --all --detectable --maintainers --no-dependencies --no-variants --no-versions --phases --tags --tests --virtuals" + SPACK_COMPREPLY="-h --help -a --all --detectable --maintainers --no-dependencies --no-variants --no-versions --phases --tags --tests --virtuals --variants-by-name" else _all_packages fi diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index 7ea1d1848417b3..1029fa6b45e06c 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -371,6 +371,7 @@ complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a containerize -d ' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a containerise -d 'creates recipes to build images for different container runtimes' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a create -d 'create a new package file' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a debug -d 'debugging commands for troubleshooting Spack' +complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a deconcretize -d 'remove specs from the concretized lockfile of an environment' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a dependencies -d 'show dependencies of a package' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a dependents -d 'show packages that depend on another' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a deprecate -d 'replace one package with another via symlinks' @@ -1045,18 +1046,26 @@ complete -c spack -n '__fish_spack_using_command compiler' -s h -l help -f -a he complete -c spack -n '__fish_spack_using_command compiler' -s h -l help -d 'show this help message and exit' # spack compiler find -set -g __fish_spack_optspecs_spack_compiler_find h/help scope= +set -g __fish_spack_optspecs_spack_compiler_find h/help mixed-toolchain no-mixed-toolchain scope= complete -c spack -n '__fish_spack_using_command compiler find' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command compiler find' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command compiler find' -l mixed-toolchain -f -a mixed_toolchain +complete -c spack -n '__fish_spack_using_command compiler find' -l mixed-toolchain -d 'Allow mixed toolchains (for example: clang, clang++, gfortran)' +complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-toolchain -f -a mixed_toolchain +complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-toolchain -d 'Do not allow mixed toolchains (for example: clang, clang++, gfortran)' complete -c spack -n '__fish_spack_using_command compiler find' -l scope -r -f -a '_builtin defaults system site user command_line' complete -c spack -n '__fish_spack_using_command compiler find' -l scope -r -d 'configuration scope to modify' # spack compiler add -set -g __fish_spack_optspecs_spack_compiler_add h/help scope= +set -g __fish_spack_optspecs_spack_compiler_add h/help mixed-toolchain no-mixed-toolchain scope= complete -c spack -n '__fish_spack_using_command compiler add' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command compiler add' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command compiler add' -l mixed-toolchain -f -a mixed_toolchain +complete -c spack -n '__fish_spack_using_command compiler add' -l mixed-toolchain -d 'Allow mixed toolchains (for example: clang, clang++, gfortran)' +complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolchain -f -a mixed_toolchain +complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolchain -d 'Do not allow mixed toolchains (for example: clang, clang++, gfortran)' complete -c spack -n '__fish_spack_using_command compiler add' -l scope -r -f -a '_builtin defaults system site user command_line' complete -c spack -n '__fish_spack_using_command compiler add' -l scope -r -d 'configuration scope to modify' @@ -1282,6 +1291,18 @@ set -g __fish_spack_optspecs_spack_debug_report h/help complete -c spack -n '__fish_spack_using_command debug report' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command debug report' -s h -l help -d 'show this help message and exit' +# spack deconcretize +set -g __fish_spack_optspecs_spack_deconcretize h/help root y/yes-to-all a/all +complete -c spack -n '__fish_spack_using_command_pos_remainder 0 deconcretize' -f -k -a '(__fish_spack_specs)' +complete -c spack -n '__fish_spack_using_command deconcretize' -s h -l help -f -a help +complete -c spack -n '__fish_spack_using_command deconcretize' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command deconcretize' -l root -f -a root +complete -c spack -n '__fish_spack_using_command deconcretize' -l root -d 'deconcretize only specific environment roots' +complete -c spack -n '__fish_spack_using_command deconcretize' -s y -l yes-to-all -f -a yes_to_all +complete -c spack -n '__fish_spack_using_command deconcretize' -s y -l yes-to-all -d 'assume "yes" is the answer to every confirmation request' +complete -c spack -n '__fish_spack_using_command deconcretize' -s a -l all -f -a all +complete -c spack -n '__fish_spack_using_command deconcretize' -s a -l all -d 'deconcretize ALL specs that match each supplied spec' + # spack dependencies set -g __fish_spack_optspecs_spack_dependencies h/help i/installed t/transitive deptype= V/no-expand-virtuals complete -c spack -n '__fish_spack_using_command_pos_remainder 0 dependencies' -f -k -a '(__fish_spack_specs)' @@ -1847,7 +1868,7 @@ complete -c spack -n '__fish_spack_using_command help' -l spec -f -a guide complete -c spack -n '__fish_spack_using_command help' -l spec -d 'help on the package specification syntax' # spack info -set -g __fish_spack_optspecs_spack_info h/help a/all detectable maintainers no-dependencies no-variants no-versions phases tags tests virtuals +set -g __fish_spack_optspecs_spack_info h/help a/all detectable maintainers no-dependencies no-variants no-versions phases tags tests virtuals variants-by-name complete -c spack -n '__fish_spack_using_command_pos 0 info' -f -a '(__fish_spack_packages)' complete -c spack -n '__fish_spack_using_command info' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command info' -s h -l help -d 'show this help message and exit' @@ -1871,6 +1892,8 @@ complete -c spack -n '__fish_spack_using_command info' -l tests -f -a tests complete -c spack -n '__fish_spack_using_command info' -l tests -d 'output relevant build-time and stand-alone tests' complete -c spack -n '__fish_spack_using_command info' -l virtuals -f -a virtuals complete -c spack -n '__fish_spack_using_command info' -l virtuals -d 'output virtual packages' +complete -c spack -n '__fish_spack_using_command info' -l variants-by-name -f -a variants_by_name +complete -c spack -n '__fish_spack_using_command info' -l variants-by-name -d 'list variants in strict name order; don\'t group by condition' # spack install set -g __fish_spack_optspecs_spack_install h/help only= u/until= j/jobs= overwrite fail-fast keep-prefix keep-stage dont-restage use-cache no-cache cache-only use-buildcache= include-build-deps no-check-signature show-log-on-error source n/no-checksum deprecated v/verbose fake only-concrete add no-add f/file= clean dirty test= log-format= log-file= help-cdash cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= y/yes-to-all U/fresh reuse reuse-deps diff --git a/var/spack/repos/builtin.mock/packages/adios2/package.py b/var/spack/repos/builtin.mock/packages/adios2/package.py index 2bd71d6b4dd883..fb2f43ea0e154a 100644 --- a/var/spack/repos/builtin.mock/packages/adios2/package.py +++ b/var/spack/repos/builtin.mock/packages/adios2/package.py @@ -8,11 +8,11 @@ class Adios2(Package): - """The Adaptable Input Output System version 2, - developed in the Exascale Computing Program""" + """This packagae has the variants shared and + bzip2, both defaulted to True""" - homepage = "https://someplace.com" - url = "https://anotherplace.com" + homepage = "https://example.com" + url = "https://example.com/adios2.tar.gz" version("2.9.1", sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9") diff --git a/var/spack/repos/builtin.mock/packages/ascent/package.py b/var/spack/repos/builtin.mock/packages/ascent/package.py index 92b074a5f981d7..9a8db472dc07ab 100644 --- a/var/spack/repos/builtin.mock/packages/ascent/package.py +++ b/var/spack/repos/builtin.mock/packages/ascent/package.py @@ -7,10 +7,11 @@ class Ascent(Package): - """Fake ascent package.""" + """This packagae has the variants shared, defaulted + to True and adios2 defaulted to False""" homepage = "https://github.com/Alpine-DAV/ascent" - url = "someplace" + url = "http://www.example.com/ascent-1.0.tar.gz" version("0.9.2", sha256="44cd954aa5db478ab40042cd54fd6fcedf25000c3bb510ca23fcff8090531b91") diff --git a/var/spack/repos/builtin.mock/packages/bzip2/package.py b/var/spack/repos/builtin.mock/packages/bzip2/package.py index cb49cedd3f35f2..326533ac5ea117 100644 --- a/var/spack/repos/builtin.mock/packages/bzip2/package.py +++ b/var/spack/repos/builtin.mock/packages/bzip2/package.py @@ -8,10 +8,11 @@ class Bzip2(Package): - """This is a thing""" + """This packagae has the variants shared + defaulted to True""" - homepage = "https://someplace.com" - url = "https://anotherplace.com" + homepage = "https://example.com" + url = "https://example.com/bzip2-1.0.8tar.gz" version("1.0.8", sha256="ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269") diff --git a/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py b/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py new file mode 100644 index 00000000000000..21e67f8a61bc12 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class DependencyFooBar(Package): + """This package has a variant "bar", which is False by default, and + variant "foo" which is True by default. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/dependency-foo-bar-1.0.tar.gz" + + version("1.0", md5="1234567890abcdefg1234567890098765") + + variant("foo", default=True, description="") + variant("bar", default=False, description="") diff --git a/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py b/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py new file mode 100644 index 00000000000000..14516566a9f7d4 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ParentFooBar(Package): + """This package has a variant "bar", which is True by default, and depends on another + package which has the same variant defaulting to False. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/parent-foo-bar-1.0.tar.gz" + + version("1.0", md5="abcdefg0123456789abcdefghfedcba0") + + variant("foo", default=True, description="") + variant("bar", default=True, description="") + + depends_on("dependency-foo-bar") diff --git a/var/spack/repos/builtin/packages/abinit/package.py b/var/spack/repos/builtin/packages/abinit/package.py index a343bf69d7e3c4..07a706590456ac 100644 --- a/var/spack/repos/builtin/packages/abinit/package.py +++ b/var/spack/repos/builtin/packages/abinit/package.py @@ -27,6 +27,8 @@ class Abinit(AutotoolsPackage): homepage = "https://www.abinit.org/" url = "https://www.abinit.org/sites/default/files/packages/abinit-8.6.3.tar.gz" + maintainers("downloadico") + version("9.10.3", sha256="3f2a9aebbf1fee9855a09dd687f88d2317b8b8e04f97b2628ab96fb898dce49b") version("9.8.4", sha256="a086d5045f0093b432e6a044d5f71f7edf5a41a62d67b3677cb0751d330c564a") version("9.8.3", sha256="de823878aea2c20098f177524fbb4b60de9b1b5971b2e835ec244dfa3724589b") version("9.6.1", sha256="b6a12760fd728eb4aacca431ae12150609565bedbaa89763f219fcd869f79ac6") @@ -85,6 +87,11 @@ class Abinit(AutotoolsPackage): # libxml2 depends_on("libxml2", when="@9:+libxml2") + # If the Intel suite is used for Lapack, it must be used for fftw and vice-versa + for _intel_pkg in INTEL_MATH_LIBRARIES: + requires(f"^[virtuals=fftw-api] {_intel_pkg}", when=f"^[virtuals=lapack] {_intel_pkg}") + requires(f"^[virtuals=lapack] {_intel_pkg}", when=f"^[virtuals=fftw-api] {_intel_pkg}") + # Cannot ask for +scalapack if it does not depend on MPI conflicts("+scalapack", when="~mpi") @@ -138,19 +145,27 @@ def configure_args(self): oapp(f"--with-optim-flavor={self.spec.variants['optimization-flavor'].value}") if "+wannier90" in spec: - if "@:8" in spec: + if spec.satisfies("@:8"): oapp(f"--with-wannier90-libs=-L{spec['wannier90'].prefix.lib} -lwannier -lm") oapp(f"--with-wannier90-incs=-I{spec['wannier90'].prefix.modules}") oapp(f"--with-wannier90-bins={spec['wannier90'].prefix.bin}") oapp("--enable-connectors") oapp("--with-dft-flavor=atompaw+libxc+wannier90") - else: + elif spec.satisfies("@:9.8"): options.extend( [ f"WANNIER90_CPPFLAGS=-I{spec['wannier90'].prefix.modules}", f"WANNIER90_LIBS=-L{spec['wannier90'].prefix.lib} -lwannier", ] ) + else: + options.extend( + [ + f"WANNIER90_CPPFLAGS=-I{spec['wannier90'].prefix.modules}", + f"WANNIER90_LIBS=-L{spec['wannier90'].prefix.lib}" + "WANNIER90_LDFLAGS=-lwannier", + ] + ) else: if "@:9.8" in spec: oapp(f"--with-fftw={spec['fftw-api'].prefix}") @@ -164,7 +179,10 @@ def configure_args(self): if "+mpi" in spec: oapp(f"CC={spec['mpi'].mpicc}") oapp(f"CXX={spec['mpi'].mpicxx}") - oapp(f"FC={spec['mpi'].mpifc}") + if spec.satisfies("@9.8:"): + oapp(f"F90={spec['mpi'].mpifc}") + else: + oapp(f"FC={spec['mpi'].mpifc}") # MPI version: # let the configure script auto-detect MPI support from mpi_prefix @@ -186,7 +204,8 @@ def configure_args(self): # BLAS/LAPACK/SCALAPACK-ELPA linalg = spec["lapack"].libs + spec["blas"].libs - if "^mkl" in spec: + is_using_intel_libraries = spec["lapack"].name in INTEL_MATH_LIBRARIES + if is_using_intel_libraries: linalg_flavor = "mkl" elif "@9:" in spec and "^openblas" in spec: linalg_flavor = "openblas" @@ -207,7 +226,7 @@ def configure_args(self): oapp(f"--with-linalg-flavor={linalg_flavor}") - if "^mkl" in spec: + if is_using_intel_libraries: fftflavor = "dfti" else: if "+openmp" in spec: @@ -218,7 +237,7 @@ def configure_args(self): oapp(f"--with-fft-flavor={fftflavor}") if "@:8" in spec: - if "^mkl" in spec: + if is_using_intel_libraries: oapp(f"--with-fft-incs={spec['fftw-api'].headers.cpp_flags}") oapp(f"--with-fft-libs={spec['fftw-api'].libs.ld_flags}") else: @@ -229,7 +248,7 @@ def configure_args(self): ] ) else: - if "^mkl" in spec: + if is_using_intel_libraries: options.extend( [ f"FFT_CPPFLAGS={spec['fftw-api'].headers.cpp_flags}", diff --git a/var/spack/repos/builtin/packages/adiak/package.py b/var/spack/repos/builtin/packages/adiak/package.py index 05f936e3f92c5a..e1d757e0827833 100644 --- a/var/spack/repos/builtin/packages/adiak/package.py +++ b/var/spack/repos/builtin/packages/adiak/package.py @@ -20,8 +20,9 @@ class Adiak(CMakePackage): variant("shared", default=True, description="Build dynamic libraries") version( - "0.2.2", commit="3aedd494c81c01df1183af28bc09bade2fabfcd3", submodules=True, preferred=True + "0.4.0", commit="7e8b7233f8a148b402128ed46b2f0c643e3b397e", submodules=True, preferred=True ) + version("0.2.2", commit="3aedd494c81c01df1183af28bc09bade2fabfcd3", submodules=True) version( "0.3.0-alpha", commit="054d2693a977ed0e1f16c665b4966bb90924779e", diff --git a/var/spack/repos/builtin/packages/alpgen/package.py b/var/spack/repos/builtin/packages/alpgen/package.py index 52b47adebf6ff0..9d0a96922edb6e 100644 --- a/var/spack/repos/builtin/packages/alpgen/package.py +++ b/var/spack/repos/builtin/packages/alpgen/package.py @@ -16,7 +16,6 @@ class Alpgen(CMakePackage, MakefilePackage): homepage = "http://mlm.home.cern.ch/mlm/alpgen/" url = "http://mlm.home.cern.ch/mlm/alpgen/V2.1/v214.tgz" - maintainers("iarspider") tags = ["hep"] version("2.1.4", sha256="2f43f7f526793fe5f81a3a3e1adeffe21b653a7f5851efc599ed69ea13985c5e") diff --git a/var/spack/repos/builtin/packages/alquimia/package.py b/var/spack/repos/builtin/packages/alquimia/package.py index d2b33d8524473d..523e0936759f77 100644 --- a/var/spack/repos/builtin/packages/alquimia/package.py +++ b/var/spack/repos/builtin/packages/alquimia/package.py @@ -34,7 +34,7 @@ class Alquimia(CMakePackage): depends_on("pflotran@develop", when="@develop") depends_on("petsc@3.10:", when="@develop") - @when("@1.0.10") + @when("@1.0.10:1.1.0") def patch(self): filter_file( "use iso_[cC]_binding", diff --git a/var/spack/repos/builtin/packages/aluminum/package.py b/var/spack/repos/builtin/packages/aluminum/package.py index bb085f8681044b..7b15b23856fd77 100644 --- a/var/spack/repos/builtin/packages/aluminum/package.py +++ b/var/spack/repos/builtin/packages/aluminum/package.py @@ -9,7 +9,7 @@ from spack.package import * -class Aluminum(CMakePackage, CudaPackage, ROCmPackage): +class Aluminum(CachedCMakePackage, CudaPackage, ROCmPackage): """Aluminum provides a generic interface to high-performance communication libraries, with a focus on allreduce algorithms. Blocking and non-blocking algorithms and GPU-aware @@ -22,208 +22,207 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/LLNL/Aluminum.git" tags = ["ecp", "radiuss"] - maintainers("bvanessen") + maintainers("benson31", "bvanessen") version("master", branch="master") + version("1.4.1", sha256="d130a67fef1cb7a9cb3bbec1d0de426f020fe68c9df6e172c83ba42281cd90e3") + version("1.4.0", sha256="ac54de058f38cead895ec8163f7b1fa7674e4dc5aacba683a660a61babbfe0c6") version("1.3.1", sha256="28ce0af6c6f29f97b7f19c5e45184bd2f8a0b1428f1e898b027d96d47cb74b0b") version("1.3.0", sha256="d0442efbebfdfb89eec793ae65eceb8f1ba65afa9f2e48df009f81985a4c27e3") version("1.2.3", sha256="9b214bdf30f9b7e8e017f83e6615db6be2631f5be3dd186205dbe3aa62f4018a") - version( - "1.2.2", - sha256="c01d9dd98be4cab9b944bae99b403abe76d65e9e1750e7f23bf0105636ad5485", - deprecated=True, - ) - version( - "1.2.1", - sha256="869402708c8a102a67667b83527b4057644a32b8cdf4990bcd1a5c4e5f0e30af", - deprecated=True, - ) - version( - "1.2.0", - sha256="2f3725147f4dbc045b945af68d3d747f5dffbe2b8e928deed64136785210bc9a", - deprecated=True, - ) - version( - "1.1.0", - sha256="78b03e36e5422e8651f400feb4d8a527f87302db025d77aa37e223be6b9bdfc9", - deprecated=True, - ) - version("1.0.0-lbann", tag="v1.0.0-lbann", commit="40a062b1f63e84e074489c0f926f36b806c6b8f3") - version("1.0.0", sha256="028d12e271817214db5c07c77b0528f88862139c3e442e1b12f58717290f414a") - version( - "0.7.0", - sha256="bbb73d2847c56efbe6f99e46b41d837763938483f2e2d1982ccf8350d1148caa", - deprecated=True, - ) - version( - "0.6.0", - sha256="6ca329951f4c7ea52670e46e5020e7e7879d9b56fed5ff8c5df6e624b313e925", - deprecated=True, - ) - version( - "0.5.0", - sha256="dc365a5849eaba925355a8efb27005c5f22bcd1dca94aaed8d0d29c265c064c1", - deprecated=True, - ) - version( - "0.4.0", - sha256="4d6fab5481cc7c994b32fb23a37e9ee44041a9f91acf78f981a97cb8ef57bb7d", - deprecated=True, - ) - version( - "0.3.3", - sha256="26e7f263f53c6c6ee0fe216e981a558dfdd7ec997d0dd2a24285a609a6c68f3b", - deprecated=True, - ) - version( - "0.3.2", - sha256="09b6d1bcc02ac54ba269b1123eee7be20f0104b93596956c014b794ba96b037f", - deprecated=True, - ) - version( - "0.2.1-1", - sha256="066b750e9d1134871709a3e2414b96b166e0e24773efc7d512df2f1d96ee8eef", - deprecated=True, - ) - version( - "0.2.1", - sha256="3d5d15853cccc718f60df68205e56a2831de65be4d96e7f7e8497097e7905f89", - deprecated=True, - ) - version( - "0.2", - sha256="fc8f06c6d8faab17a2aedd408d3fe924043bf857da1094d5553f35c4d2af893b", - deprecated=True, - ) - version( - "0.1", - sha256="3880b736866e439dd94e6a61eeeb5bb2abccebbac82b82d52033bc6c94950bdb", - deprecated=True, - ) - variant("nccl", default=False, description="Builds with support for NCCL communication lib") + # Library capabilities + variant( + "cuda_rma", + default=False, + when="+cuda", + description="Builds with support for CUDA intra-node " + " Put/Get and IPC RMA functionality", + ) variant( "ht", default=False, description="Builds with support for host-enabled MPI" " communication of accelerator data", ) + variant("nccl", default=False, description="Builds with support for NCCL communication lib") + variant("shared", default=True, description="Build Aluminum as a shared library") + + # Debugging features + variant("hang_check", default=False, description="Enable hang checking") + variant("trace", default=False, description="Enable runtime tracing") + + # Profiler support + variant("nvtx", default=False, when="+cuda", description="Enable profiling via nvprof/NVTX") variant( - "cuda_rma", + "roctracer", default=False, when="+rocm", description="Enable profiling via rocprof/roctx" + ) + + # Advanced options + variant("mpi_serialize", default=False, description="Serialize MPI operations") + variant("stream_mem_ops", default=False, description="Enable stream memory operations") + variant( + "thread_multiple", default=False, - description="Builds with support for CUDA intra-node " - " Put/Get and IPC RMA functionality", + description="Allow multiple threads to call Aluminum concurrently", ) - variant("rccl", default=False, description="Builds with support for RCCL communication lib") + + # Benchmark/testing support variant( - "ofi_libfabric_plugin", - default=spack.platforms.cray.slingshot_network(), - when="+rccl", - sticky=True, - description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib", + "benchmarks", + default=False, + description="Build the Aluminum benchmarking drivers " + "(warning: may significantly increase build time!)", ) variant( - "ofi_libfabric_plugin", - default=spack.platforms.cray.slingshot_network(), - when="+nccl", - sticky=True, - description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib", + "tests", + default=False, + description="Build the Aluminum test drivers " + "(warning: may moderately increase build time!)", ) - depends_on("cmake@3.21.0:", type="build", when="@1.0.1:") - depends_on("cmake@3.17.0:", type="build", when="@:1.0.0") - depends_on("mpi") - depends_on("nccl@2.7.0-0:", when="+nccl") - depends_on("hwloc@1.11:") - depends_on("hwloc +cuda +nvml", when="+cuda") - depends_on("hwloc@2.3.0:", when="+rocm") - depends_on("cub", when="@:0.1,0.6.0: +cuda ^cuda@:10") - depends_on("hipcub", when="@:0.1,0.6.0: +rocm") - - depends_on("rccl", when="+rccl") - depends_on("aws-ofi-rccl", when="+rccl +ofi_libfabric_plugin") - depends_on("aws-ofi-nccl", when="+nccl +ofi_libfabric_plugin") + # FIXME: Do we want to expose tuning parameters to the Spack + # recipe? Some are numeric values, some are on/off switches. conflicts("~cuda", when="+cuda_rma", msg="CUDA RMA support requires CUDA") conflicts("+cuda", when="+rocm", msg="CUDA and ROCm support are mutually exclusive") - conflicts("+nccl", when="+rccl", msg="NCCL and RCCL support are mutually exclusive") - generator("ninja") + depends_on("mpi") - def cmake_args(self): - spec = self.spec - args = [ - "-DCMAKE_CXX_STANDARD:STRING=17", - "-DALUMINUM_ENABLE_CUDA:BOOL=%s" % ("+cuda" in spec), - "-DALUMINUM_ENABLE_NCCL:BOOL=%s" % ("+nccl" in spec or "+rccl" in spec), - "-DALUMINUM_ENABLE_ROCM:BOOL=%s" % ("+rocm" in spec), - ] - - if not spec.satisfies("^cmake@3.23.0"): - # There is a bug with using Ninja generator in this version - # of CMake - args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=ON") - - if "+cuda" in spec: - if self.spec.satisfies("%clang"): - for flag in self.spec.compiler_flags["cxxflags"]: - if "gcc-toolchain" in flag: - args.append("-DCMAKE_CUDA_FLAGS=-Xcompiler={0}".format(flag)) - if spec.satisfies("^cuda@11.0:"): - args.append("-DCMAKE_CUDA_STANDARD=17") - else: - args.append("-DCMAKE_CUDA_STANDARD=14") - archs = spec.variants["cuda_arch"].value - if archs != "none": - arch_str = ";".join(archs) - args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % arch_str) - - if spec.satisfies("%cce") and spec.satisfies("^cuda+allow-unsupported-compilers"): - args.append("-DCMAKE_CUDA_FLAGS=-allow-unsupported-compiler") - - if spec.satisfies("@0.5:"): - args.extend( - [ - "-DALUMINUM_ENABLE_HOST_TRANSFER:BOOL=%s" % ("+ht" in spec), - "-DALUMINUM_ENABLE_MPI_CUDA:BOOL=%s" % ("+cuda_rma" in spec), - "-DALUMINUM_ENABLE_MPI_CUDA_RMA:BOOL=%s" % ("+cuda_rma" in spec), - ] + depends_on("cmake@3.21.0:", type="build", when="@1.0.1:") + depends_on("hwloc@1.11:") + + with when("+cuda"): + depends_on("cub", when="^cuda@:10") + depends_on("hwloc +cuda +nvml") + with when("+nccl"): + depends_on("nccl@2.7.0-0:") + for arch in CudaPackage.cuda_arch_values: + depends_on( + "nccl +cuda cuda_arch={0}".format(arch), + when="+cuda cuda_arch={0}".format(arch), + ) + if spack.platforms.cray.slingshot_network(): + depends_on("aws-ofi-nccl") # Note: NOT a CudaPackage + + with when("+rocm"): + for val in ROCmPackage.amdgpu_targets: + depends_on( + "hipcub +rocm amdgpu_target={0}".format(val), when="amdgpu_target={0}".format(val) ) - else: - args.append("-DALUMINUM_ENABLE_MPI_CUDA:BOOL=%s" % ("+ht" in spec)) - - if spec.satisfies("@:0.1,0.6.0: +cuda ^cuda@:10"): - args.append("-DCUB_DIR:FILEPATH=%s" % spec["cub"].prefix) - - # Add support for OS X to find OpenMP (LLVM installed via brew) - if self.spec.satisfies("%clang platform=darwin"): - clang = self.compiler.cc - clang_bin = os.path.dirname(clang) - clang_root = os.path.dirname(clang_bin) - args.extend(["-DOpenMP_DIR={0}".format(clang_root)]) - - if "+rocm" in spec: - args.extend( - [ - "-DHIP_ROOT_DIR={0}".format(spec["hip"].prefix), - "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc), - "-DCMAKE_CXX_FLAGS=-std=c++17", - ] + depends_on( + "hwloc@2.3.0: +rocm amdgpu_target={0}".format(val), + when="amdgpu_target={0}".format(val), ) - archs = self.spec.variants["amdgpu_target"].value - if archs != "none": - arch_str = ",".join(archs) - if spec.satisfies("%rocmcc@:5"): - args.append( - "-DHIP_HIPCC_FLAGS=--amdgpu-target={0}" - " -g -fsized-deallocation -fPIC -std=c++17".format(arch_str) - ) - args.extend( - [ - "-DCMAKE_HIP_ARCHITECTURES=%s" % arch_str, - "-DAMDGPU_TARGETS=%s" % arch_str, - "-DGPU_TARGETS=%s" % arch_str, - ] - ) + # RCCL is *NOT* implented as a ROCmPackage + depends_on( + "rccl amdgpu_target={0}".format(val), when="+nccl amdgpu_target={0}".format(val) + ) + depends_on( + "roctracer-dev +rocm amdgpu_target={0}".format(val), + when="+roctracer amdgpu_target={0}".format(val), + ) + if spack.platforms.cray.slingshot_network(): + depends_on("aws-ofi-rccl", when="+nccl") + def cmake_args(self): + args = [] return args + + def get_cuda_flags(self): + spec = self.spec + args = [] + if spec.satisfies("^cuda+allow-unsupported-compilers"): + args.append("-allow-unsupported-compiler") + + if spec.satisfies("%clang"): + for flag in spec.compiler_flags["cxxflags"]: + if "gcc-toolchain" in flag: + args.append("-Xcompiler={0}".format(flag)) + return args + + def std_initconfig_entries(self): + entries = super(Aluminum, self).std_initconfig_entries() + + # CMAKE_PREFIX_PATH, in CMake types, is a "STRING", not a "PATH". :/ + entries = [x for x in entries if "CMAKE_PREFIX_PATH" not in x] + cmake_prefix_path = os.environ["CMAKE_PREFIX_PATH"].replace(":", ";") + entries.append(cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path)) + return entries + + def initconfig_compiler_entries(self): + spec = self.spec + entries = super(Aluminum, self).initconfig_compiler_entries() + + # FIXME: Enforce this better in the actual CMake. + entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17")) + entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec)) + entries.append(cmake_cache_option("CMAKE_EXPORT_COMPILE_COMMANDS", True)) + entries.append(cmake_cache_option("MPI_ASSUME_NO_BUILTIN_MPI", True)) + + return entries + + def initconfig_hardware_entries(self): + spec = self.spec + entries = super(Aluminum, self).initconfig_hardware_entries() + + entries.append(cmake_cache_option("ALUMINUM_ENABLE_CUDA", "+cuda" in spec)) + if spec.satisfies("+cuda"): + entries.append(cmake_cache_string("CMAKE_CUDA_STANDARD", "17")) + if not spec.satisfies("cuda_arch=none"): + archs = spec.variants["cuda_arch"].value + arch_str = ";".join(archs) + entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", arch_str)) + + # FIXME: Should this use the "cuda_flags" function of the + # CudaPackage class or something? There might be other + # flags in play, and we need to be sure to get them all. + cuda_flags = self.get_cuda_flags() + if len(cuda_flags) > 0: + entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags))) + + entries.append(cmake_cache_option("ALUMINUM_ENABLE_ROCM", "+rocm" in spec)) + if spec.satisfies("+rocm"): + entries.append(cmake_cache_string("CMAKE_HIP_STANDARD", "17")) + if not spec.satisfies("amdgpu_target=none"): + archs = self.spec.variants["amdgpu_target"].value + arch_str = ";".join(archs) + entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str)) + entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str)) + entries.append(cmake_cache_string("GPU_TARGETS", arch_str)) + entries.append(cmake_cache_path("HIP_ROOT_DIR", spec["hip"].prefix)) + + return entries + + def initconfig_package_entries(self): + spec = self.spec + entries = super(Aluminum, self).initconfig_package_entries() + + # Library capabilities + entries.append(cmake_cache_option("ALUMINUM_ENABLE_MPI_CUDA", "+cuda_rma" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_MPI_CUDA_RMA", "+cuda_rma" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_HOST_TRANSFER", "+ht" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_NCCL", "+nccl" in spec)) + + # Debugging features + entries.append(cmake_cache_option("ALUMINUM_DEBUG_HANG_CHECK", "+hang_check" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_TRACE", "+trace" in spec)) + + # Profiler support + entries.append(cmake_cache_option("ALUMINUM_ENABLE_NVPROF", "+nvtx" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_ROCTRACER", "+roctracer" in spec)) + + # Advanced options + entries.append(cmake_cache_option("ALUMINUM_MPI_SERIALIZE", "+mpi_serialize" in spec)) + entries.append( + cmake_cache_option("ALUMINUM_ENABLE_STREAM_MEM_OPS", "+stream_mem_ops" in spec) + ) + entries.append( + cmake_cache_option("ALUMINUM_ENABLE_THREAD_MULTIPLE", "+thread_multiple" in spec) + ) + + # Benchmark/testing support + entries.append(cmake_cache_option("ALUMINUM_ENABLE_BENCHMARKS", "+benchmarks" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_TESTS", "+tests" in spec)) + + return entries diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py index 8f14d508ad0085..da5c2ee353f584 100644 --- a/var/spack/repos/builtin/packages/amrex/package.py +++ b/var/spack/repos/builtin/packages/amrex/package.py @@ -21,7 +21,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): tags = ["ecp", "e4s"] - maintainers("WeiqunZhang", "asalmgren", "etpalmer63") + maintainers("WeiqunZhang", "asalmgren", "atmyers") version("develop", branch="development") version("23.11", sha256="49b9fea10cd2a2b6cb0fedf7eac8f7889eacc68a05ae5ac7c5702bc0eb1b3848") diff --git a/var/spack/repos/builtin/packages/ams/package.py b/var/spack/repos/builtin/packages/ams/package.py index ba75a25e63f6c3..b95e69abc3d81d 100644 --- a/var/spack/repos/builtin/packages/ams/package.py +++ b/var/spack/repos/builtin/packages/ams/package.py @@ -15,6 +15,12 @@ class Ams(CMakePackage, CudaPackage): maintainers("koparasy", "lpottier") version("develop", branch="develop", submodules=False) + version( + "11.08.23.alpha", + tag="11.08.23.alpha", + commit="1a42b29268bb916dae301654ca0b92fdfe288732", + submodules=False, + ) version( "07.25.23-alpha", tag="07.25.23-alpha", diff --git a/var/spack/repos/builtin/packages/arrayfire/package.py b/var/spack/repos/builtin/packages/arrayfire/package.py index 70cdfc67b200ab..9befc618417835 100644 --- a/var/spack/repos/builtin/packages/arrayfire/package.py +++ b/var/spack/repos/builtin/packages/arrayfire/package.py @@ -79,7 +79,7 @@ def cmake_args(self): ] args.append(self.define("CUDA_architecture_build_targets", arch_list)) - if "^mkl" in self.spec: + if self.spec["blas"].name in INTEL_MATH_LIBRARIES: if self.version >= Version("3.8.0"): args.append(self.define("AF_COMPUTE_LIBRARY", "Intel-MKL")) else: diff --git a/var/spack/repos/builtin/packages/asio/package.py b/var/spack/repos/builtin/packages/asio/package.py index 3c66d7df9e3ba1..05f391caa31ecd 100644 --- a/var/spack/repos/builtin/packages/asio/package.py +++ b/var/spack/repos/builtin/packages/asio/package.py @@ -16,6 +16,8 @@ class Asio(AutotoolsPackage): git = "https://github.com/chriskohlhoff/asio.git" maintainers("msimberg", "pauleonix") + license("BSL-1.0") + # As uneven minor versions of asio are not considered stable, they wont be added anymore version("1.28.0", sha256="226438b0798099ad2a202563a83571ce06dd13b570d8fded4840dbc1f97fa328") version("1.26.0", sha256="935583f86825b7b212479277d03543e0f419a55677fa8cb73a79a927b858a72d") diff --git a/var/spack/repos/builtin/packages/bart/package.py b/var/spack/repos/builtin/packages/bart/package.py index cc371f4f5c31da..9fa0baa01833d4 100644 --- a/var/spack/repos/builtin/packages/bart/package.py +++ b/var/spack/repos/builtin/packages/bart/package.py @@ -48,7 +48,7 @@ def edit(self, spec, prefix): if spec["blas"].name == "openblas": env["OPENBLAS"] = "1" - if "^mkl" in spec: + elif spec["blas"].name in INTEL_MATH_LIBRARIES: env["MKL"] = "1" env["MKL_BASE"] = spec["mkl"].prefix.mkl else: diff --git a/var/spack/repos/builtin/packages/batchedblas/package.py b/var/spack/repos/builtin/packages/batchedblas/package.py index c44b50bc81e349..712f270e8cf8fc 100644 --- a/var/spack/repos/builtin/packages/batchedblas/package.py +++ b/var/spack/repos/builtin/packages/batchedblas/package.py @@ -23,7 +23,7 @@ class Batchedblas(MakefilePackage): def edit(self, spec, prefix): CCFLAGS = [self.compiler.openmp_flag, "-I./", "-O3"] BLAS = ["-lm", spec["blas"].libs.ld_flags] - if not spec.satisfies("^mkl"): + if spec["blas"].name not in INTEL_MATH_LIBRARIES: CCFLAGS.append("-D_CBLAS_") if spec.satisfies("%intel"): CCFLAGS.extend(["-Os"]) diff --git a/var/spack/repos/builtin/packages/bfs/package.py b/var/spack/repos/builtin/packages/bfs/package.py index f90c882648c1a7..3e932f142ac060 100644 --- a/var/spack/repos/builtin/packages/bfs/package.py +++ b/var/spack/repos/builtin/packages/bfs/package.py @@ -14,6 +14,7 @@ class Bfs(MakefilePackage): maintainers("alecbcs") + version("3.0.4", sha256="7196f5a624871c91ad051752ea21043c198a875189e08c70ab3167567a72889d") version("3.0.2", sha256="d3456a9aeecc031064db0dbe012e55a11eb97be88d0ab33a90e570fe66457f92") version("3.0.1", sha256="a38bb704201ed29f4e0b989fb2ab3791ca51c3eff90acfc31fff424579bbf962") diff --git a/var/spack/repos/builtin/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py index 1ad363f747a667..e42e823bbb64bd 100644 --- a/var/spack/repos/builtin/packages/bison/package.py +++ b/var/spack/repos/builtin/packages/bison/package.py @@ -65,6 +65,13 @@ class Bison(AutotoolsPackage, GNUMirrorPackage): patch("nvhpc-3.7.patch", when="@3.7.0:3.7 %nvhpc") conflicts("%intel@:14", when="@3.4.2:", msg="Intel 14 has immature C11 support") + conflicts( + "%oneapi", + msg=( + "bison is likely miscompiled by oneapi compilers, " + "see https://github.com/spack/spack/issues/37172" + ), + ) if sys.platform == "darwin" and macos_version() >= Version("10.13"): patch("secure_snprintf.patch", level=0, when="@3.0.4") diff --git a/var/spack/repos/builtin/packages/brahma/package.py b/var/spack/repos/builtin/packages/brahma/package.py index 3932de204f7c92..00f20c1e1c2c5f 100644 --- a/var/spack/repos/builtin/packages/brahma/package.py +++ b/var/spack/repos/builtin/packages/brahma/package.py @@ -15,11 +15,14 @@ class Brahma(CMakePackage): version("develop", branch="dev") version("master", branch="master") + version("0.0.2", tag="v0.0.2", commit="bac58d5aa8962a5c902d401fbf8021aff9104d3c") version("0.0.1", tag="v0.0.1", commit="15156036f14e36511dfc3f3751dc953540526a2b") variant("mpi", default=False, description="Enable MPI support") - depends_on("cpp-logger@0.0.1") - depends_on("gotcha@develop") + depends_on("cpp-logger@0.0.1", when="@:0.0.1") + depends_on("cpp-logger@0.0.2", when="@0.0.2:") + depends_on("gotcha@1.0.4", when="@:0.0.1") + depends_on("gotcha@1.0.5", when="@0.0.2:") depends_on("catch2@3.0.1") depends_on("mpi", when="+mpi") diff --git a/var/spack/repos/builtin/packages/bzip2/package.py b/var/spack/repos/builtin/packages/bzip2/package.py index 58f598ac15286d..e3c618bb303835 100644 --- a/var/spack/repos/builtin/packages/bzip2/package.py +++ b/var/spack/repos/builtin/packages/bzip2/package.py @@ -44,6 +44,10 @@ class Bzip2(Package, SourcewarePackage): if sys.platform != "win32": depends_on("diffutils", type="build") + depends_on("gmake", type="build", when="platform=linux") + depends_on("gmake", type="build", when="platform=cray") + depends_on("gmake", type="build", when="platform=darwin") + @classmethod def determine_version(cls, exe): output = Executable(exe)("--help", output=str, error=str) diff --git a/var/spack/repos/builtin/packages/catch2/package.py b/var/spack/repos/builtin/packages/catch2/package.py index 82eb629bb82661..001d5151a175aa 100644 --- a/var/spack/repos/builtin/packages/catch2/package.py +++ b/var/spack/repos/builtin/packages/catch2/package.py @@ -110,9 +110,13 @@ class Catch2(CMakePackage): ) variant("shared", when="@3:", default=False, description="Build shared library") + variant( + "cxxstd", default="14", values=("14", "17"), multi=False, description="Define C++ standard" + ) + def cmake_args(self): spec = self.spec - args = [] + args = [self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd")] # 1.7.0-1.9.3: no control over test builds if spec.satisfies("@1.9.4:2.1.0"): args.append("-DNO_SELFTEST={0}".format("OFF" if self.run_tests else "ON")) diff --git a/var/spack/repos/builtin/packages/clhep/package.py b/var/spack/repos/builtin/packages/clhep/package.py index 43165ef8b7292d..3241df9fb6d3a5 100644 --- a/var/spack/repos/builtin/packages/clhep/package.py +++ b/var/spack/repos/builtin/packages/clhep/package.py @@ -19,6 +19,8 @@ class Clhep(CMakePackage): maintainers("drbenmorgan") + version("2.4.7.1", sha256="1c8304a7772ac6b99195f1300378c6e3ddf4ad07c85d64a04505652abb8a55f9") + version("2.4.7.0", sha256="7fa460030bc1a804ea7da8cce7611b93261493bbb66c3cfd3ceec935d7e1b8d3") version("2.4.6.4", sha256="49c89330f1903ef707d3c5d79c16a7c5a6f2c90fc290e2034ee3834809489e57") version("2.4.6.3", sha256="fcd007f11b10ba4af28d027222b63148d0eb44ff7a082eee353bdf921f9c684a") version("2.4.6.2", sha256="aded73e49bac85a5b4e86f64a0ee3d6f3cfe5551b0f7731c78b6d8f9dac6e8dc") diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index 44f6b596aa1dd5..15eccd6d3e2646 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -20,7 +20,7 @@ class Cmake(Package): url = "https://github.com/Kitware/CMake/releases/download/v3.19.0/cmake-3.19.0.tar.gz" git = "https://gitlab.kitware.com/cmake/cmake.git" - maintainers("alalazo") + maintainers("alalazo", "johnwparent") tags = ["build-tools", "windows"] @@ -234,13 +234,15 @@ class Cmake(Package): with when("~ownlibs"): depends_on("expat") # expat/zlib are used in CMake/CTest, so why not require them in libarchive. - depends_on("libarchive@3.1.0: xar=expat compression=zlib") - depends_on("libarchive@3.3.3:", when="@3.15.0:") - depends_on("libuv@1.0.0:1.10", when="@3.7.0:3.10.3") - depends_on("libuv@1.10.0:1.10", when="@3.11.0:3.11") - depends_on("libuv@1.10.0:", when="@3.12.0:") - depends_on("rhash", when="@3.8.0:") - depends_on("jsoncpp build_system=meson", when="@3.2:") + for plat in ["darwin", "cray", "linux"]: + with when("platform=%s" % plat): + depends_on("libarchive@3.1.0: xar=expat compression=zlib") + depends_on("libarchive@3.3.3:", when="@3.15.0:") + depends_on("libuv@1.0.0:1.10", when="@3.7.0:3.10.3") + depends_on("libuv@1.10.0:1.10", when="@3.11.0:3.11") + depends_on("libuv@1.10.0:", when="@3.12.0:") + depends_on("rhash", when="@3.8.0:") + depends_on("jsoncpp build_system=meson", when="@3.2:") depends_on("ncurses", when="+ncurses") @@ -248,9 +250,6 @@ class Cmake(Package): depends_on("python@2.7.11:", type="build") depends_on("py-sphinx", type="build") - # TODO: update curl package to build with Windows SSL implementation - # at which point we can build with +ownlibs on Windows - conflicts("~ownlibs", when="platform=windows") # Cannot build with Intel, should be fixed in 3.6.2 # https://gitlab.kitware.com/cmake/cmake/issues/16226 patch("intel-c-gnu11.patch", when="@3.6.0:3.6.1") diff --git a/var/spack/repos/builtin/packages/conquest/package.py b/var/spack/repos/builtin/packages/conquest/package.py index 29e9fa5777bc3f..4478881b91e98c 100644 --- a/var/spack/repos/builtin/packages/conquest/package.py +++ b/var/spack/repos/builtin/packages/conquest/package.py @@ -49,6 +49,15 @@ class Conquest(MakefilePackage): build_directory = "src" + # The SYSTEM variable is required above version 1.2. + # Versions 1.2 and older should ignore it. + @property + def build_targets(self): + if self.version > Version("1.2"): + return ["SYSTEM = example", "Conquest"] + else: + return ["Conquest"] + def edit(self, spec, prefix): fflags = "-O3 -fallow-argument-mismatch" ldflags = "" @@ -63,12 +72,23 @@ def edit(self, spec, prefix): lapack_ld = self.spec["lapack"].libs.ld_flags blas_ld = self.spec["blas"].libs.ld_flags - - defs_file = FileFilter("./src/system.make") - - defs_file.filter("COMPFLAGS=.*", f"COMPFLAGS= {fflags}") - defs_file.filter("LINKFLAGS=.*", f"LINKFLAGS= {ldflags}") - defs_file.filter("# BLAS=.*", f"BLAS= {lapack_ld} -llapack {blas_ld} -lblas") + fftw_ld = self.spec["fftw"].libs.ld_flags + libxc_ld = self.spec["libxc"].libs.ld_flags + + # Starting from 1.3 there's automated logic in the Makefile that picks + # from a list of possible files for system/compiler-specific definitions. + # This is useful for manual builds, but since the spack will do its own + # automation of compiler-specific flags, we will override it. + if self.version > Version("1.2"): + defs_file = FileFilter("./src/system/system.example.make") + else: + defs_file = FileFilter("./src/system.make") + + defs_file.filter(".*COMPFLAGS=.*", f"COMPFLAGS= {fflags}") + defs_file.filter(".*LINKFLAGS=.*", f"LINKFLAGS= {ldflags}") + defs_file.filter(".*BLAS=.*", f"BLAS= {lapack_ld} {blas_ld}") + defs_file.filter(".*FFT_LIB=.*", f"FFT_LIB={fftw_ld}") + defs_file.filter(".*XC_LIB=.*", f"XC_LIB={libxc_ld} -lxcf90 -lxc") if "+openmp" in self.spec: defs_file.filter("OMP_DUMMY = DUMMY", "OMP_DUMMY = ") @@ -81,3 +101,5 @@ def edit(self, spec, prefix): def install(self, spec, prefix): mkdirp(prefix.bin) install("./bin/Conquest", prefix.bin) + if self.version > Version("1.2"): + install_tree("./benchmarks/", join_path(prefix, "benchmarks")) diff --git a/var/spack/repos/builtin/packages/cool/package.py b/var/spack/repos/builtin/packages/cool/package.py index 9418a16cbeb5d4..f12f474c167950 100644 --- a/var/spack/repos/builtin/packages/cool/package.py +++ b/var/spack/repos/builtin/packages/cool/package.py @@ -14,7 +14,6 @@ class Cool(CMakePackage): git = "https://gitlab.cern.ch/lcgcool/cool.git" tags = ["hep"] - maintainers("iarspider") version("3.3.10", tag="COOL_3_3_10", commit="110b51c2b50af07cbe1f64a1c67ce9f737c4421d") version("3.3.7", tag="COOL_3_3_7", commit="6f9a29d903e51ecbb26bdc8a694a67db9f28e234") diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index 2e765c7539fffb..0d90d0d123c4ac 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -297,6 +297,10 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): depends_on("dbcsr+cuda", when="+cuda") depends_on("dbcsr+rocm", when="+rocm") + with when("@2022: +rocm"): + depends_on("hipblas") + depends_on("hipfft") + # CP2K needs compiler specific compilation flags, e.g. optflags conflicts("%apple-clang") conflicts("%clang") diff --git a/var/spack/repos/builtin/packages/cpp-logger/package.py b/var/spack/repos/builtin/packages/cpp-logger/package.py index f325fa09dfc2ba..14f3fa2d60ab02 100644 --- a/var/spack/repos/builtin/packages/cpp-logger/package.py +++ b/var/spack/repos/builtin/packages/cpp-logger/package.py @@ -16,3 +16,4 @@ class CppLogger(CMakePackage): version("develop", branch="develop") version("master", branch="master") version("0.0.1", tag="v0.0.1", commit="d48b38ab14477bb7c53f8189b8b4be2ea214c28a") + version("0.0.2", tag="v0.0.2", commit="329a48401033d2d2a1f1196141763cab029220ae") diff --git a/var/spack/repos/builtin/packages/cpr/package.py b/var/spack/repos/builtin/packages/cpr/package.py index 71e32d9960d536..0d18a6a9199645 100644 --- a/var/spack/repos/builtin/packages/cpr/package.py +++ b/var/spack/repos/builtin/packages/cpr/package.py @@ -18,7 +18,7 @@ class Cpr(CMakePackage): version("1.9.2", sha256="3bfbffb22c51f322780d10d3ca8f79424190d7ac4b5ad6ad896de08dbd06bf31") depends_on("curl") - depends_on("git", when="build") + depends_on("git", type="build") def cmake_args(self): _force = "_FORCE" if self.spec.satisfies("@:1.9") else "" diff --git a/var/spack/repos/builtin/packages/ctffind/package.py b/var/spack/repos/builtin/packages/ctffind/package.py index d1be5c6ea6adbe..ac7bc960c358fb 100644 --- a/var/spack/repos/builtin/packages/ctffind/package.py +++ b/var/spack/repos/builtin/packages/ctffind/package.py @@ -40,7 +40,7 @@ def url_for_version(self, version): def configure_args(self): config_args = [] - if "^mkl" in self.spec: + if self.spec["fftw-api"].name in INTEL_MATH_LIBRARIES: config_args.extend( [ "--enable-mkl", diff --git a/var/spack/repos/builtin/packages/cube/package.py b/var/spack/repos/builtin/packages/cube/package.py index f3d91693230842..9990ea8a0f65c6 100644 --- a/var/spack/repos/builtin/packages/cube/package.py +++ b/var/spack/repos/builtin/packages/cube/package.py @@ -16,7 +16,10 @@ class Cube(AutotoolsPackage): homepage = "https://www.scalasca.org/software/cube-4.x/download.html" url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubegui-4.4.2.tar.gz" + maintainers("swat-jsc") + version("4.8.2", sha256="bf2e02002bb2e5c4f61832ce37b62a440675c6453463014b33b2474aac78f86d") + version("4.8.1", sha256="a8a2a62b4e587c012d3d32385bed7c500db14232419795e0f4272d1dcefc55bc") version("4.8", sha256="1df8fcaea95323e7eaf0cc010784a41243532c2123a27ce93cb7e3241557ff76") version("4.7.1", sha256="7c96bf9ffb8cc132945f706657756fe6f88b7f7a5243ecd3741f599c2006d428") version("4.7", sha256="103fe00fa9846685746ce56231f64d850764a87737dc0407c9d0a24037590f68") diff --git a/var/spack/repos/builtin/packages/cubelib/package.py b/var/spack/repos/builtin/packages/cubelib/package.py index 919a001fedaa4f..aa142328607a6e 100644 --- a/var/spack/repos/builtin/packages/cubelib/package.py +++ b/var/spack/repos/builtin/packages/cubelib/package.py @@ -14,6 +14,7 @@ class Cubelib(AutotoolsPackage): maintainers = ("swat-jsc", "wrwilliams") version("4.8.2", sha256="d6fdef57b1bc9594f1450ba46cf08f431dd0d4ae595c47e2f3454e17e4ae74f4") + version("4.8.1", sha256="e4d974248963edab48c5d0fc5831146d391b0ae4632cccafe840bf5f12cd80a9") version("4.8", sha256="171c93ac5afd6bc74c50a9a58efdaf8589ff5cc1e5bd773ebdfb2347b77e2f68") version("4.7.1", sha256="62cf33a51acd9a723fff9a4a5411cd74203e24e0c4ffc5b9e82e011778ed4f2f") version("4.7", sha256="e44352c80a25a49b0fa0748792ccc9f1be31300a96c32de982b92477a8740938") diff --git a/var/spack/repos/builtin/packages/cubew/package.py b/var/spack/repos/builtin/packages/cubew/package.py index bcab0920fd1833..22a56ddda7b22e 100644 --- a/var/spack/repos/builtin/packages/cubew/package.py +++ b/var/spack/repos/builtin/packages/cubew/package.py @@ -14,6 +14,7 @@ class Cubew(AutotoolsPackage): maintainers = ("swat-jsc", "wrwilliams") version("4.8.2", sha256="4f3bcf0622c2429b8972b5eb3f14d79ec89b8161e3c1cc5862ceda417d7975d2") + version("4.8.1", sha256="42cbd743d87c16e805c8e28e79292ab33de259f2cfba46f2682cb35c1bc032d6") version("4.8", sha256="73c7f9e9681ee45d71943b66c01cfe675b426e4816e751ed2e0b670563ca4cf3") version("4.7.1", sha256="0d364a4930ca876aa887ec40d12399d61a225dbab69e57379b293516d7b6db8d") version("4.7", sha256="a7c7fca13e6cb252f08d4380223d7c56a8e86a67de147bcc0279ebb849c884a5") diff --git a/var/spack/repos/builtin/packages/darshan-runtime/package.py b/var/spack/repos/builtin/packages/darshan-runtime/package.py index 1b4e06b5919a04..64d95b2ec87de2 100644 --- a/var/spack/repos/builtin/packages/darshan-runtime/package.py +++ b/var/spack/repos/builtin/packages/darshan-runtime/package.py @@ -115,9 +115,9 @@ def configure_args(self): if "+apmpi" in spec: extra_args.append("--enable-apmpi-mod") if "+apmpi_sync" in spec: - extra_args.append(["--enable-apmpi-mod", "--enable-apmpi-coll-sync"]) + extra_args.extend(["--enable-apmpi-mod", "--enable-apmpi-coll-sync"]) if "+apxc" in spec: - extra_args.append(["--enable-apxc-mod"]) + extra_args.append("--enable-apxc-mod") extra_args.append("--with-mem-align=8") extra_args.append("--with-log-path-by-env=DARSHAN_LOG_DIR_PATH") diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py index 77c3934bdff7fd..a9ca9fe12dcc09 100644 --- a/var/spack/repos/builtin/packages/dd4hep/package.py +++ b/var/spack/repos/builtin/packages/dd4hep/package.py @@ -254,7 +254,8 @@ def setup_run_environment(self, env): env.set("DD4HEP", self.prefix.examples) env.set("DD4hep_DIR", self.prefix) env.set("DD4hep_ROOT", self.prefix) - env.prepend_path("LD_LIBRARY_PATH", self.libs.directories[0]) + if len(self.libs.directories) > 0: + env.prepend_path("LD_LIBRARY_PATH", self.libs.directories[0]) def url_for_version(self, version): # dd4hep releases are dashes and padded with a leading zero diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index df6f514134a799..f6df5eb5672881 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -23,9 +23,13 @@ class Dealii(CMakePackage, CudaPackage): # only add for immediate deps. transitive_rpaths = False - generator("ninja") + # FIXME nvcc_wrapper (used for +clang) doesn't handle response files + # correctly when ninja is used. Those are used automatically if paths get too long. + generator("make") version("master", branch="master") + version("9.5.1", sha256="a818b535e6488d3aef7853311657c7b4fadc29a9abe91b7b202b131aad630f5e") + version("9.5.0", sha256="a81f41565f0d3a22d491ee687957dd48053225da72e8d6d628d210358f4a0464") version("9.4.2", sha256="45a76cb400bfcff25cc2d9093d9a5c91545c8367985e6798811c5e9d2a6a6fd4") version("9.4.1", sha256="bfe5e4bf069159f93feb0f78529498bfee3da35baf5a9c6852aa59d7ea7c7a48") version("9.4.0", sha256="238677006cd9173658e5b69cdd1861f800556982db6005a3cc5eb8329cc1e36c") @@ -70,10 +74,11 @@ class Dealii(CMakePackage, CudaPackage): values=("default", "11", "14", "17"), ) variant("doc", default=False, description="Compile with documentation") - variant("examples", default=True, description="Compile tutorial programs") + variant("examples", default=True, description="Compile and install tutorial programs") variant("int64", default=False, description="Compile with 64 bit indices support") variant("mpi", default=True, description="Compile with MPI") variant("optflags", default=False, description="Compile using additional optimization flags") + variant("platform-introspection", default=True, description="Enable platform introspection") variant("python", default=False, description="Compile with Python bindings") # Package variants @@ -81,11 +86,12 @@ class Dealii(CMakePackage, CudaPackage): variant("arborx", default=True, description="Compile with Arborx support") variant("arpack", default=True, description="Compile with Arpack and PArpack (only with MPI)") variant("adol-c", default=True, description="Compile with ADOL-C") - variant("cgal", default=True, when="@9.4:", description="Compile with CGAL") + variant("cgal", default=True, when="@9.4:~cuda", description="Compile with CGAL") variant("ginkgo", default=True, description="Compile with Ginkgo") variant("gmsh", default=True, description="Compile with GMSH") variant("gsl", default=True, description="Compile with GSL") variant("hdf5", default=True, description="Compile with HDF5 (only with MPI)") + variant("kokkos", default=True, when="@9.5:", description="Compile with Kokkos") variant("metis", default=True, description="Compile with Metis") variant("muparser", default=True, description="Compile with muParser") variant("nanoflann", default=False, description="Compile with Nanoflann") @@ -98,14 +104,15 @@ class Dealii(CMakePackage, CudaPackage): variant("slepc", default=True, description="Compile with Slepc (only with Petsc and MPI)") variant("symengine", default=True, description="Compile with SymEngine") variant("simplex", default=True, description="Compile with Simplex support") - # TODO @9.3: enable by default, when we know what to do - # variant('taskflow', default=False, - # description='Compile with multi-threading via Taskflow') - # TODO @9.3: disable by default - # (NB: only if tbb is removed in 9.3, as planned!!!) + variant( + "taskflow", + default=True, + when="@9.6:", + description="Compile with multi-threading via Taskflow", + ) variant("threads", default=True, description="Compile with multi-threading via TBB") variant("trilinos", default=True, description="Compile with Trilinos (only with MPI)") - variant("platform-introspection", default=True, description="Enable platform introspection") + variant("vtk", default=True, when="@9.6:", description="Compile with VTK") # Required dependencies: Light version depends_on("blas") @@ -179,6 +186,8 @@ class Dealii(CMakePackage, CudaPackage): # TODO: next line fixes concretization with petsc depends_on("hdf5+mpi+hl+fortran", when="+hdf5+mpi+petsc") depends_on("hdf5+mpi+hl", when="+hdf5+mpi~petsc") + depends_on("kokkos@3.7:", when="@9.5:+kokkos~trilinos") + depends_on("kokkos@3.7:+cuda+cuda_lambda+wrapper", when="@9.5:+kokkos~trilinos+cuda") # TODO: concretizer bug. The two lines mimic what comes from PETSc # but we should not need it depends_on("metis@5:+int64", when="+metis+int64") @@ -198,7 +207,7 @@ class Dealii(CMakePackage, CudaPackage): depends_on("sundials@:3~pthread", when="@9.0:9.2+sundials") depends_on("sundials@5:5.8", when="@9.3:9.3.3+sundials") depends_on("sundials@5:", when="@9.3.4:+sundials") - # depends_on('taskflow', when='@9.3:+taskflow') + depends_on("taskflow@3.4:", when="@9.6:+taskflow") depends_on("trilinos gotype=int", when="+trilinos@12.18.1:") # TODO: next line fixes concretization with trilinos and adol-c depends_on("trilinos~exodus", when="@9.0:+adol-c+trilinos") @@ -222,12 +231,11 @@ class Dealii(CMakePackage, CudaPackage): # do not require +rol to make concretization of xsdk possible depends_on("trilinos+amesos+aztec+epetra+ifpack+ml+muelu+sacado", when="+trilinos") depends_on("trilinos~hypre", when="+trilinos+int64") - # TODO: temporary disable Tpetra when using CUDA due to - # namespace "Kokkos::Impl" has no member "cuda_abort" - depends_on( - "trilinos@master+rol~amesos2~ifpack2~intrepid2~kokkos~tpetra~zoltan2", - when="+trilinos+cuda", - ) + for _arch in CudaPackage.cuda_arch_values: + arch_str = f"+cuda cuda_arch={_arch}" + trilinos_spec = f"trilinos +wrapper {arch_str}" + depends_on(trilinos_spec, when=f"@9.5:+trilinos {arch_str}") + depends_on("vtk", when="@9.6:+vtk") # Explicitly provide a destructor in BlockVector, # otherwise deal.II may fail to build with Intel compilers. @@ -296,44 +304,60 @@ class Dealii(CMakePackage, CudaPackage): msg="CGAL requires the C++ standard to be set explicitly to 17 or later.", ) + conflicts( + "cxxstd=14", + when="@9.6:", + msg="Deal.II 9.6 onwards requires the C++ standard to be set to 17 or later.", + ) + # Interfaces added in 8.5.0: - for p in ["gsl", "python"]: + for _package in ["gsl", "python"]: conflicts( - "+{0}".format(p), + "+{0}".format(_package), when="@:8.4.2", msg="The interface to {0} is supported from version 8.5.0 " "onwards. Please explicitly disable this variant " - "via ~{0}".format(p), + "via ~{0}".format(_package), ) # Interfaces added in 9.0.0: - for p in ["assimp", "gmsh", "nanoflann", "scalapack", "sundials", "adol-c"]: + for _package in ["assimp", "gmsh", "nanoflann", "scalapack", "sundials", "adol-c"]: conflicts( - "+{0}".format(p), + "+{0}".format(_package), when="@:8.5.1", msg="The interface to {0} is supported from version 9.0.0 " "onwards. Please explicitly disable this variant " - "via ~{0}".format(p), + "via ~{0}".format(_package), ) # interfaces added in 9.1.0: - for p in ["ginkgo", "symengine"]: + for _package in ["ginkgo", "symengine"]: conflicts( - "+{0}".format(p), + "+{0}".format(_package), when="@:9.0", msg="The interface to {0} is supported from version 9.1.0 " "onwards. Please explicitly disable this variant " - "via ~{0}".format(p), + "via ~{0}".format(_package), ) # interfaces added in 9.3.0: - for p in ["simplex", "arborx"]: # , 'taskflow']: + for _package in ["simplex", "arborx"]: conflicts( - "+{0}".format(p), + "+{0}".format(_package), when="@:9.2", msg="The interface to {0} is supported from version 9.3.0 " "onwards. Please explicitly disable this variant " - "via ~{0}".format(p), + "via ~{0}".format(_package), + ) + + # interfaces added after 9.5.0: + for _package in ["vtk", "taskflow"]: + conflicts( + "+{0}".format(_package), + when="@:9.5", + msg="The interface to {0} is supported from version 9.6.0 " + "onwards. Please explicitly disable this variant " + "via ~{0}".format(_package), ) # Interfaces removed in 9.3.0: @@ -346,18 +370,29 @@ class Dealii(CMakePackage, CudaPackage): # Check that the combination of variants makes sense # 64-bit BLAS: - for p in ["openblas", "intel-mkl", "intel-parallel-studio+mkl"]: + for _package in ["openblas", "intel-mkl", "intel-parallel-studio+mkl"]: conflicts( - "^{0}+ilp64".format(p), when="@:8.5.1", msg="64bit BLAS is only supported from 9.0.0" + "^{0}+ilp64".format(_package), + when="@:8.5.1", + msg="64bit BLAS is only supported from 9.0.0", ) # MPI requirements: - for p in ["arpack", "hdf5", "netcdf", "p4est", "petsc", "scalapack", "slepc", "trilinos"]: + for _package in [ + "arpack", + "hdf5", + "netcdf", + "p4est", + "petsc", + "scalapack", + "slepc", + "trilinos", + ]: conflicts( - "+{0}".format(p), + "+{0}".format(_package), when="~mpi", msg="To enable {0} it is necessary to build deal.II with " - "MPI support enabled.".format(p), + "MPI support enabled.".format(_package), ) # Optional dependencies: @@ -432,6 +467,7 @@ def cmake_args(self): # Examples / tutorial programs options.append(self.define_from_variant("DEAL_II_COMPONENT_EXAMPLES", "examples")) + options.append(self.define_from_variant("DEAL_II_COMPILE_EXAMPLES", "examples")) # Enforce the specified C++ standard if spec.variants["cxxstd"].value != "default": @@ -478,9 +514,6 @@ def cmake_args(self): if "+mpi" in spec: options.extend( [ - self.define("CMAKE_C_COMPILER", spec["mpi"].mpicc), - self.define("CMAKE_CXX_COMPILER", spec["mpi"].mpicxx), - self.define("CMAKE_Fortran_COMPILER", spec["mpi"].mpifc), self.define("MPI_C_COMPILER", spec["mpi"].mpicc), self.define("MPI_CXX_COMPILER", spec["mpi"].mpicxx), self.define("MPI_Fortran_COMPILER", spec["mpi"].mpifc), @@ -499,6 +532,9 @@ def cmake_args(self): self.define("CUDA_HOST_COMPILER", spec["mpi"].mpicxx), ] ) + # Make sure we use the same compiler that Trilinos uses + if "+trilinos" in spec: + options.extend([self.define("CMAKE_CXX_COMPILER", spec["trilinos"].kokkos_cxx)]) # Python bindings if spec.satisfies("@8.5.0:"): @@ -542,23 +578,25 @@ def cmake_args(self): # Optional dependencies for which library names are the same as CMake # variables: for library in ( + "arborx", + "assimp", + "cgal", + "ginkgo", + "gmsh", "gsl", "hdf5", + "metis", + "muparser", + "nanoflann", "p4est", "petsc", "slepc", - "trilinos", - "metis", "sundials", - "nanoflann", - "assimp", - "gmsh", - "muparser", "symengine", - "ginkgo", - "arborx", - "cgal", - ): # 'taskflow'): + "taskflow", + "trilinos", + "vtk", + ): options.append( self.define_from_variant("DEAL_II_WITH_{0}".format(library.upper()), library) ) diff --git a/var/spack/repos/builtin/packages/dihydrogen/package.py b/var/spack/repos/builtin/packages/dihydrogen/package.py index ca53a897654b33..a6e030d0e8efb1 100644 --- a/var/spack/repos/builtin/packages/dihydrogen/package.py +++ b/var/spack/repos/builtin/packages/dihydrogen/package.py @@ -8,7 +8,39 @@ from spack.package import * -class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage): +# This is a hack to get around some deficiencies in Hydrogen. +def get_blas_entries(inspec): + entries = [] + spec = inspec["hydrogen"] + if "blas=openblas" in spec: + entries.append(cmake_cache_option("DiHydrogen_USE_OpenBLAS", True)) + elif "blas=mkl" in spec or spec.satisfies("^intel-mkl"): + entries.append(cmake_cache_option("DiHydrogen_USE_MKL", True)) + elif "blas=essl" in spec or spec.satisfies("^essl"): + entries.append(cmake_cache_string("BLA_VENDOR", "IBMESSL")) + # IF IBM ESSL is used it needs help finding the proper LAPACK libraries + entries.append( + cmake_cache_string( + "LAPACK_LIBRARIES", + "%s;-llapack;-lblas" + % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), + ) + ) + entries.append( + cmake_cache_string( + "BLAS_LIBRARIES", + "%s;-lblas" + % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), + ) + ) + elif "blas=accelerate" in spec: + entries.append(cmake_cache_option("DiHydrogen_USE_ACCELERATE", True)) + elif spec.satisfies("^netlib-lapack"): + entries.append(cmake_cache_string("BLA_VENDOR", "Generic")) + return entries + + +class Dihydrogen(CachedCMakePackage, CudaPackage, ROCmPackage): """DiHydrogen is the second version of the Hydrogen fork of the well-known distributed linear algebra library, Elemental. DiHydrogen aims to be a basic distributed @@ -20,117 +52,179 @@ class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/LLNL/DiHydrogen.git" tags = ["ecp", "radiuss"] - maintainers("bvanessen") + maintainers("benson31", "bvanessen") version("develop", branch="develop") version("master", branch="master") - version("0.2.1", sha256="11e2c0f8a94ffa22e816deff0357dde6f82cc8eac21b587c800a346afb5c49ac") - version("0.2.0", sha256="e1f597e80f93cf49a0cb2dbc079a1f348641178c49558b28438963bd4a0bdaa4") - version("0.1", sha256="171d4b8adda1e501c38177ec966e6f11f8980bf71345e5f6d87d0a988fef4c4e") + version("0.3.0", sha256="8dd143441a28e0c7662cd92694e9a4894b61fd48508ac1d77435f342bc226dcf") + + # Primary features + + variant("dace", default=False, sticky=True, description="Enable DaCe backend.") + + variant( + "distconv", + default=False, + sticky=True, + description="Enable (legacy) Distributed Convolution support.", + ) + + variant( + "nvshmem", + default=False, + sticky=True, + description="Enable support for NVSHMEM-based halo exchanges.", + when="+distconv", + ) + + variant( + "shared", default=True, sticky=True, description="Enables the build of shared libraries" + ) + + # Some features of developer interest - variant("al", default=True, description="Builds with Aluminum communication library") variant( "developer", default=False, description="Enable extra warnings and force tests to be enabled.", ) - variant("half", default=False, description="Enable FP16 support on the CPU.") + + variant("ci", default=False, description="Use default options for CI builds") + variant( - "distconv", + "coverage", default=False, - description="Support distributed convolutions: spatial, channel, " "filter.", + description="Decorate build with code coverage instrumentation options", + when="%gcc", ) - variant("nvshmem", default=False, description="Builds with support for NVSHMEM") - variant("openmp", default=False, description="Enable CPU acceleration with OpenMP threads.") - variant("rocm", default=False, description="Enable ROCm/HIP language features.") - variant("shared", default=True, description="Enables the build of shared libraries") - - # Variants related to BLAS variant( - "openmp_blas", default=False, description="Use OpenMP for threading in the BLAS library" + "coverage", + default=False, + description="Decorate build with code coverage instrumentation options", + when="%clang", ) - variant("int64_blas", default=False, description="Use 64bit integers for BLAS.") variant( - "blas", - default="openblas", - values=("openblas", "mkl", "accelerate", "essl", "libsci"), - description="Enable the use of OpenBlas/MKL/Accelerate/ESSL/LibSci", + "coverage", + default=False, + description="Decorate build with code coverage instrumentation options", + when="%rocmcc", ) - conflicts("~cuda", when="+nvshmem") + # Package conflicts and requirements - depends_on("mpi") - depends_on("catch2", type="test") + conflicts("+nvshmem", when="~cuda", msg="NVSHMEM requires CUDA support.") - # Specify the correct version of Aluminum - depends_on("aluminum@0.4.0:0.4", when="@0.1 +al") - depends_on("aluminum@0.5.0:0.5", when="@0.2.0 +al") - depends_on("aluminum@0.7.0:0.7", when="@0.2.1 +al") - depends_on("aluminum@0.7.0:", when="@:0.0,0.2.1: +al") + conflicts("+cuda", when="+rocm", msg="CUDA and ROCm are mutually exclusive.") - # Add Aluminum variants - depends_on("aluminum +cuda +nccl +cuda_rma", when="+al +cuda") - depends_on("aluminum +rocm +rccl", when="+al +rocm") - depends_on("aluminum +ht", when="+al +distconv") + requires( + "+cuda", + "+rocm", + when="+distconv", + policy="any_of", + msg="DistConv support requires CUDA or ROCm.", + ) - for arch in CudaPackage.cuda_arch_values: - depends_on("aluminum cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch) - depends_on("nvshmem cuda_arch=%s" % arch, when="+nvshmem +cuda cuda_arch=%s" % arch) + # Dependencies - # variants +rocm and amdgpu_targets are not automatically passed to - # dependencies, so do it manually. - for val in ROCmPackage.amdgpu_targets: - depends_on("aluminum amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) + depends_on("catch2@3.0.1:", type=("build", "test"), when="+developer") + depends_on("cmake@3.21.0:", type="build") + depends_on("cuda@11.0:", when="+cuda") + depends_on("spdlog@1.11.0", when="@:0.1,0.2:") - depends_on("roctracer-dev", when="+rocm +distconv") + with when("@0.3.0:"): + depends_on("hydrogen +al") + for arch in CudaPackage.cuda_arch_values: + depends_on( + "hydrogen +cuda cuda_arch={0}".format(arch), + when="+cuda cuda_arch={0}".format(arch), + ) - depends_on("cudnn", when="+cuda") - depends_on("cub", when="^cuda@:10") + for val in ROCmPackage.amdgpu_targets: + depends_on( + "hydrogen amdgpu_target={0}".format(val), + when="+rocm amdgpu_target={0}".format(val), + ) - # Note that #1712 forces us to enumerate the different blas variants - depends_on("openblas", when="blas=openblas") - depends_on("openblas +ilp64", when="blas=openblas +int64_blas") - depends_on("openblas threads=openmp", when="blas=openblas +openmp_blas") + with when("+distconv"): + depends_on("mpi") - depends_on("intel-mkl", when="blas=mkl") - depends_on("intel-mkl +ilp64", when="blas=mkl +int64_blas") - depends_on("intel-mkl threads=openmp", when="blas=mkl +openmp_blas") + # All this nonsense for one silly little package. + depends_on("aluminum@1.4.1:") - depends_on("veclibfort", when="blas=accelerate") - conflicts("blas=accelerate +openmp_blas") + # Add Aluminum variants + depends_on("aluminum +cuda +nccl", when="+distconv +cuda") + depends_on("aluminum +rocm +nccl", when="+distconv +rocm") - depends_on("essl", when="blas=essl") - depends_on("essl +ilp64", when="blas=essl +int64_blas") - depends_on("essl threads=openmp", when="blas=essl +openmp_blas") - depends_on("netlib-lapack +external-blas", when="blas=essl") + # TODO: Debug linker errors when NVSHMEM is built with UCX + depends_on("nvshmem +nccl~ucx", when="+nvshmem") - depends_on("cray-libsci", when="blas=libsci") - depends_on("cray-libsci +openmp", when="blas=libsci +openmp_blas") + # OMP support is only used in DistConv, and only Apple needs + # hand-holding with it. + depends_on("llvm-openmp", when="%apple-clang") + # FIXME: when="platform=darwin"?? - # Distconv builds require cuda or rocm - conflicts("+distconv", when="~cuda ~rocm") + # CUDA/ROCm arch forwarding - conflicts("+distconv", when="+half") - conflicts("+rocm", when="+half") + for arch in CudaPackage.cuda_arch_values: + depends_on( + "aluminum +cuda cuda_arch={0}".format(arch), + when="+cuda cuda_arch={0}".format(arch), + ) + + # This is a workaround for a bug in the Aluminum package, + # as it should be responsible for its own NCCL dependency. + # Rather than failing to concretize, we help it along. + depends_on( + "nccl cuda_arch={0}".format(arch), + when="+distconv +cuda cuda_arch={0}".format(arch), + ) - depends_on("half", when="+half") + # NVSHMEM also needs arch forwarding + depends_on( + "nvshmem +cuda cuda_arch={0}".format(arch), + when="+nvshmem +cuda cuda_arch={0}".format(arch), + ) - generator("ninja") - depends_on("cmake@3.17.0:", type="build") + # Idenfity versions of cuda_arch that are too old from + # lib/spack/spack/build_systems/cuda.py. We require >=60. + illegal_cuda_arch_values = [ + "10", + "11", + "12", + "13", + "20", + "21", + "30", + "32", + "35", + "37", + "50", + "52", + "53", + ] + for value in illegal_cuda_arch_values: + conflicts("cuda_arch=" + value) - depends_on("spdlog", when="@:0.1,0.2:") + for val in ROCmPackage.amdgpu_targets: + depends_on( + "aluminum amdgpu_target={0}".format(val), + when="+rocm amdgpu_target={0}".format(val), + ) - depends_on("llvm-openmp", when="%apple-clang +openmp") + # CUDA-specific distconv dependencies + depends_on("cudnn", when="+cuda") - # TODO: Debug linker errors when NVSHMEM is built with UCX - depends_on("nvshmem +nccl~ucx", when="+nvshmem") + # ROCm-specific distconv dependencies + depends_on("hipcub", when="+rocm") + depends_on("miopen-hip", when="+rocm") + depends_on("roctracer-dev", when="+rocm") - # Idenfity versions of cuda_arch that are too old - # from lib/spack/spack/build_systems/cuda.py - illegal_cuda_arch_values = ["10", "11", "12", "13", "20", "21"] - for value in illegal_cuda_arch_values: - conflicts("cuda_arch=" + value) + with when("+ci+coverage"): + depends_on("lcov", type=("build", "run")) + depends_on("py-gcovr", type=("build", "run")) + # Technically it's not used in the build, but CMake sets up a + # target, so it needs to be found. @property def libs(self): @@ -138,104 +232,127 @@ def libs(self): return find_libraries("libH2Core", root=self.prefix, shared=shared, recursive=True) def cmake_args(self): + args = [] + return args + + def get_cuda_flags(self): spec = self.spec + args = [] + if spec.satisfies("^cuda+allow-unsupported-compilers"): + args.append("-allow-unsupported-compiler") + + if spec.satisfies("%clang"): + for flag in spec.compiler_flags["cxxflags"]: + if "gcc-toolchain" in flag: + args.append("-Xcompiler={0}".format(flag)) + return args - args = [ - "-DCMAKE_CXX_STANDARD=17", - "-DCMAKE_INSTALL_MESSAGE:STRING=LAZY", - "-DBUILD_SHARED_LIBS:BOOL=%s" % ("+shared" in spec), - "-DH2_ENABLE_ALUMINUM=%s" % ("+al" in spec), - "-DH2_ENABLE_CUDA=%s" % ("+cuda" in spec), - "-DH2_ENABLE_DISTCONV_LEGACY=%s" % ("+distconv" in spec), - "-DH2_ENABLE_OPENMP=%s" % ("+openmp" in spec), - "-DH2_ENABLE_FP16=%s" % ("+half" in spec), - "-DH2_DEVELOPER_BUILD=%s" % ("+developer" in spec), - ] + def initconfig_compiler_entries(self): + spec = self.spec + entries = super(Dihydrogen, self).initconfig_compiler_entries() + + # FIXME: Enforce this better in the actual CMake. + entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17")) + entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec)) + entries.append(cmake_cache_option("CMAKE_EXPORT_COMPILE_COMMANDS", True)) + + # It's possible this should have a `if "platform=cray" in + # spec:` in front of it, but it's not clear to me when this is + # set. In particular, I don't actually see this blurb showing + # up on Tioga builds. Which is causing the obvious problem + # (namely, the one this was added to supposedly solve in the + # first place. + entries.append(cmake_cache_option("MPI_ASSUME_NO_BUILTIN_MPI", True)) + + if spec.satisfies("%clang +distconv platform=darwin"): + clang = self.compiler.cc + clang_bin = os.path.dirname(clang) + clang_root = os.path.dirname(clang_bin) + entries.append(cmake_cache_string("OpenMP_CXX_FLAGS", "-fopenmp=libomp")) + entries.append(cmake_cache_string("OpenMP_CXX_LIB_NAMES", "libomp")) + entries.append( + cmake_cache_string( + "OpenMP_libomp_LIBRARY", "{0}/lib/libomp.dylib".format(clang_root) + ) + ) + + return entries + + def initconfig_hardware_entries(self): + spec = self.spec + entries = super(Dihydrogen, self).initconfig_hardware_entries() - if spec.version < Version("0.3"): - args.append("-DH2_ENABLE_HIP_ROCM=%s" % ("+rocm" in spec)) - else: - args.append("-DH2_ENABLE_ROCM=%s" % ("+rocm" in spec)) - - if not spec.satisfies("^cmake@3.23.0"): - # There is a bug with using Ninja generator in this version - # of CMake - args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=ON") - - if "+cuda" in spec: - if self.spec.satisfies("%clang"): - for flag in self.spec.compiler_flags["cxxflags"]: - if "gcc-toolchain" in flag: - args.append("-DCMAKE_CUDA_FLAGS=-Xcompiler={0}".format(flag)) - if spec.satisfies("^cuda@11.0:"): - args.append("-DCMAKE_CUDA_STANDARD=17") - else: - args.append("-DCMAKE_CUDA_STANDARD=14") - archs = spec.variants["cuda_arch"].value - if archs != "none": + entries.append(cmake_cache_option("H2_ENABLE_CUDA", "+cuda" in spec)) + if spec.satisfies("+cuda"): + entries.append(cmake_cache_string("CMAKE_CUDA_STANDARD", "17")) + if not spec.satisfies("cuda_arch=none"): + archs = spec.variants["cuda_arch"].value arch_str = ";".join(archs) - args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % arch_str) + entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", arch_str)) + + # FIXME: Should this use the "cuda_flags" function of the + # CudaPackage class or something? There might be other + # flags in play, and we need to be sure to get them all. + cuda_flags = self.get_cuda_flags() + if len(cuda_flags) > 0: + entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags))) + + enable_rocm_var = ( + "H2_ENABLE_ROCM" if spec.version < Version("0.3") else "H2_ENABLE_HIP_ROCM" + ) + entries.append(cmake_cache_option(enable_rocm_var, "+rocm" in spec)) + if spec.satisfies("+rocm"): + entries.append(cmake_cache_string("CMAKE_HIP_STANDARD", "17")) + if not spec.satisfies("amdgpu_target=none"): + archs = self.spec.variants["amdgpu_target"].value + arch_str = ";".join(archs) + entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str)) + entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str)) + entries.append(cmake_cache_string("GPU_TARGETS", arch_str)) + entries.append(cmake_cache_path("HIP_ROOT_DIR", spec["hip"].prefix)) - if spec.satisfies("%cce") and spec.satisfies("^cuda+allow-unsupported-compilers"): - args.append("-DCMAKE_CUDA_FLAGS=-allow-unsupported-compiler") + return entries - if "+cuda" in spec: - args.append("-DcuDNN_DIR={0}".format(spec["cudnn"].prefix)) + def initconfig_package_entries(self): + spec = self.spec + entries = super(Dihydrogen, self).initconfig_package_entries() - if spec.satisfies("^cuda@:10"): - if "+cuda" in spec or "+distconv" in spec: - args.append("-DCUB_DIR={0}".format(spec["cub"].prefix)) + # Basic H2 options + entries.append(cmake_cache_option("H2_DEVELOPER_BUILD", "+developer" in spec)) + entries.append(cmake_cache_option("H2_ENABLE_TESTS", "+developer" in spec)) - # Add support for OpenMP with external (Brew) clang - if spec.satisfies("%clang +openmp platform=darwin"): - clang = self.compiler.cc - clang_bin = os.path.dirname(clang) - clang_root = os.path.dirname(clang_bin) - args.extend( - [ - "-DOpenMP_CXX_FLAGS=-fopenmp=libomp", - "-DOpenMP_CXX_LIB_NAMES=libomp", - "-DOpenMP_libomp_LIBRARY={0}/lib/libomp.dylib".format(clang_root), - ] - ) + entries.append(cmake_cache_option("H2_ENABLE_CODE_COVERAGE", "+coverage" in spec)) + entries.append(cmake_cache_option("H2_CI_BUILD", "+ci" in spec)) - if "+rocm" in spec: - args.extend( - [ - "-DCMAKE_CXX_FLAGS=-std=c++17", - "-DHIP_ROOT_DIR={0}".format(spec["hip"].prefix), - "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc), - ] - ) - if "platform=cray" in spec: - args.extend(["-DMPI_ASSUME_NO_BUILTIN_MPI=ON"]) - archs = self.spec.variants["amdgpu_target"].value - if archs != "none": - arch_str = ",".join(archs) - args.append( - "-DHIP_HIPCC_FLAGS=--amdgpu-target={0}" - " -g -fsized-deallocation -fPIC -std=c++17".format(arch_str) - ) - args.extend( - [ - "-DCMAKE_HIP_ARCHITECTURES=%s" % arch_str, - "-DAMDGPU_TARGETS=%s" % arch_str, - "-DGPU_TARGETS=%s" % arch_str, - ] - ) + entries.append(cmake_cache_option("H2_ENABLE_DACE", "+dace" in spec)) - if self.spec.satisfies("^essl"): - # IF IBM ESSL is used it needs help finding the proper LAPACK libraries - args.extend( - [ - "-DLAPACK_LIBRARIES=%s;-llapack;-lblas" - % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), - "-DBLAS_LIBRARIES=%s;-lblas" - % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), - ] - ) + # DistConv options + entries.append(cmake_cache_option("H2_ENABLE_ALUMINUM", "+distconv" in spec)) + entries.append(cmake_cache_option("H2_ENABLE_DISTCONV_LEGACY", "+distconv" in spec)) + entries.append(cmake_cache_option("H2_ENABLE_OPENMP", "+distconv" in spec)) - return args + # Paths to stuff, just in case. CMAKE_PREFIX_PATH should catch + # all this, but this shouldn't hurt to have. + entries.append(cmake_cache_path("spdlog_ROOT", spec["spdlog"].prefix)) + + if "+developer" in spec: + entries.append(cmake_cache_path("Catch2_ROOT", spec["catch2"].prefix)) + + if "+coverage" in spec: + entries.append(cmake_cache_path("lcov_ROOT", spec["lcov"].prefix)) + entries.append(cmake_cache_path("genhtml_ROOT", spec["lcov"].prefix)) + if "+ci" in spec: + entries.append(cmake_cache_path("gcovr_ROOT", spec["py-gcovr"].prefix)) + + if "+distconv" in spec: + entries.append(cmake_cache_path("Aluminum_ROOT", spec["aluminum"].prefix)) + if "+cuda" in spec: + entries.append(cmake_cache_path("cuDNN_ROOT", spec["cudnn"].prefix)) + + # Currently this is a hack for all Hydrogen versions. WIP to + # fix this at develop. + entries.extend(get_blas_entries(spec)) + return entries def setup_build_environment(self, env): if self.spec.satisfies("%apple-clang +openmp"): diff --git a/var/spack/repos/builtin/packages/discotec/package.py b/var/spack/repos/builtin/packages/discotec/package.py index 7693f97c83c805..31b03bb7661283 100644 --- a/var/spack/repos/builtin/packages/discotec/package.py +++ b/var/spack/repos/builtin/packages/discotec/package.py @@ -18,6 +18,7 @@ class Discotec(CMakePackage): version("main", branch="main") + variant("compression", default=False, description="Write sparse grid files compressed") variant("ft", default=False, description="DisCoTec with algorithm-based fault tolerance") variant("gene", default=False, description="Build for GENE (as task library)") variant("hdf5", default=True, description="Interpolation output with HDF5") @@ -31,6 +32,7 @@ class Discotec(CMakePackage): depends_on("cmake@3.24.2:", type="build") depends_on("glpk") depends_on("highfive+mpi+boost+ipo", when="+hdf5") + depends_on("lz4", when="+compression") depends_on("mpi") depends_on("selalib", when="+selalib") depends_on("vtk", when="+vtk") @@ -38,6 +40,7 @@ class Discotec(CMakePackage): def cmake_args(self): args = [ self.define("DISCOTEC_BUILD_MISSING_DEPS", False), + self.define_from_variant("DISCOTEC_WITH_COMPRESSION", "compression"), self.define_from_variant("DISCOTEC_ENABLEFT", "ft"), self.define_from_variant("DISCOTEC_GENE", "gene"), self.define_from_variant("DISCOTEC_OPENMP", "openmp"), diff --git a/var/spack/repos/builtin/packages/dla-future/package.py b/var/spack/repos/builtin/packages/dla-future/package.py index 5b3a3215f39c0a..29e60f73398084 100644 --- a/var/spack/repos/builtin/packages/dla-future/package.py +++ b/var/spack/repos/builtin/packages/dla-future/package.py @@ -14,6 +14,9 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/eth-cscs/DLA-Future.git" maintainers = ["rasolca", "albestro", "msimberg", "aurianer"] + license("BSD-3-Clause") + + version("0.3.0", sha256="9887ac0b466ca03d704a8738bc89e68550ed33509578c576390e98e76b64911b") version("0.2.1", sha256="4c2669d58f041304bd618a9d69d9879a42e6366612c2fc932df3894d0326b7fe") version("0.2.0", sha256="da73cbd1b88287c86d84b1045a05406b742be924e65c52588bbff200abd81a10") version("0.1.0", sha256="f7ffcde22edabb3dc24a624e2888f98829ee526da384cd752b2b271c731ca9b1") @@ -42,9 +45,12 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@3.22:", type="build") depends_on("doxygen", type="build", when="+doc") depends_on("mpi") + + depends_on("blas") + depends_on("lapack") + depends_on("scalapack", when="+scalapack") depends_on("blaspp@2022.05.00:") depends_on("lapackpp@2022.05.00:") - depends_on("scalapack", when="+scalapack") depends_on("umpire~examples") depends_on("umpire~cuda", when="~cuda") @@ -55,8 +61,9 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage): depends_on("pika@0.15.1:", when="@0.1") depends_on("pika@0.16:", when="@0.2.0") - depends_on("pika@0.17:", when="@0.2.1:") - depends_on("pika-algorithms@0.1:") + depends_on("pika@0.17:", when="@0.2.1") + depends_on("pika@0.18:", when="@0.3.0:") + depends_on("pika-algorithms@0.1:", when="@:0.2") depends_on("pika +mpi") depends_on("pika +cuda", when="+cuda") depends_on("pika +rocm", when="+rocm") @@ -107,7 +114,7 @@ def cmake_args(self): args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared")) # BLAS/LAPACK - if "^mkl" in spec: + if self.spec["lapack"].name in INTEL_MATH_LIBRARIES: vmap = { "none": "seq", "openmp": "omp", diff --git a/var/spack/repos/builtin/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py index f0f10b5ecfff7a..cf40fb4e457eb3 100644 --- a/var/spack/repos/builtin/packages/doxygen/package.py +++ b/var/spack/repos/builtin/packages/doxygen/package.py @@ -18,6 +18,8 @@ class Doxygen(CMakePackage): homepage = "https://www.doxygen.org" url = "https://github.com/doxygen/doxygen/archive/refs/tags/Release_1_9_5.tar.gz" + version("1.9.8", sha256="77371e8a58d22d5e03c52729844d1043e9cbf8d0005ec5112ffa4c8f509ddde8") + version("1.9.7", sha256="691777992a7240ed1f822a5c2ff2c4273b57c1cf9fc143553d87f91a0c5970ee") version("1.9.6", sha256="2a3ee47f7276b759f74bac7614c05a1296a5b028d3f6a79a88e4c213db78e7dc") version("1.9.5", sha256="1c5c9cd4445f694e43f089c17529caae6fe889b732fb0b145211025a1fcda1bb") version("1.9.4", sha256="1b083d15b29817463129ae1ae73b930d883030eeec090ea7a99b3a04fdb51c76") diff --git a/var/spack/repos/builtin/packages/dwz/package.py b/var/spack/repos/builtin/packages/dwz/package.py index cb1b1f5e2accc3..feb805ee6917ee 100644 --- a/var/spack/repos/builtin/packages/dwz/package.py +++ b/var/spack/repos/builtin/packages/dwz/package.py @@ -13,8 +13,6 @@ class Dwz(MakefilePackage, SourcewarePackage): sourceware_mirror_path = "dwz/releases/dwz-0.14.tar.gz" git = "git://sourceware.org/git/dwz.git" - maintainers("iarspider") - depends_on("elf") version("0.14-patches", branch="dwz-0.14-branch") diff --git a/var/spack/repos/builtin/packages/ecflow/package.py b/var/spack/repos/builtin/packages/ecflow/package.py index 05519fc04d4963..7fa77e386a0e2d 100644 --- a/var/spack/repos/builtin/packages/ecflow/package.py +++ b/var/spack/repos/builtin/packages/ecflow/package.py @@ -23,7 +23,8 @@ class Ecflow(CMakePackage): maintainers("climbfuji", "AlexanderRichert-NOAA") - # https://confluence.ecmwf.int/download/attachments/8650755/ecFlow-5.8.3-Source.tar.gz?api=v2 + version("5.11.4", sha256="4836a876277c9a65a47a3dc87cae116c3009699f8a25bab4e3afabf160bcf212") + version("5.8.4", sha256="bc628556f8458c269a309e4c3b8d5a807fae7dfd415e27416fe9a3f544f88951") version("5.8.3", sha256="1d890008414017da578dbd5a95cb1b4d599f01d5a3bb3e0297fe94a87fbd81a6") version("4.13.0", sha256="c743896e0ec1d705edd2abf2ee5a47f4b6f7b1818d8c159b521bdff50a403e39") version("4.12.0", sha256="566b797e8d78e3eb93946b923ef540ac61f50d4a17c9203d263c4fd5c39ab1d1") @@ -99,6 +100,9 @@ def cmake_args(self): ssllibs = ";".join(spec["openssl"].libs + spec["zlib"].libs) args.append(self.define("OPENSSL_CRYPTO_LIBRARY", ssllibs)) + if self.spec.satisfies("@5.8.3:"): + args.append("-DCMAKE_CXX_FLAGS=-DBOOST_NO_CXX98_FUNCTION_BASE") + return args # A recursive link in the ecflow source code causes the binary cache diff --git a/var/spack/repos/builtin/packages/ecmwf-atlas/package.py b/var/spack/repos/builtin/packages/ecmwf-atlas/package.py index 00fce1ec788247..d764f8e1bda484 100644 --- a/var/spack/repos/builtin/packages/ecmwf-atlas/package.py +++ b/var/spack/repos/builtin/packages/ecmwf-atlas/package.py @@ -76,8 +76,8 @@ def cmake_args(self): @when("+fismahigh") def patch(self): - filter_file("http://www\.ecmwf\.int", "", "cmake/atlas-import.cmake.in") # noqa: W605 - filter_file("int\.ecmwf", "", "cmake/atlas-import.cmake.in") # noqa: W605 + filter_file("http://www.ecmwf.int", "", "cmake/atlas-import.cmake.in", string=True) + filter_file("int.ecmwf", "", "cmake/atlas-import.cmake.in", string=True) filter_file('http[^"]+', "", "cmake/atlas_export.cmake") patterns = [".travis.yml", "tools/install*.sh", "tools/github-sha.sh"] for pattern in patterns: diff --git a/var/spack/repos/builtin/packages/elbencho/package.py b/var/spack/repos/builtin/packages/elbencho/package.py index a9417a4dfc135a..12562f97f30dbb 100644 --- a/var/spack/repos/builtin/packages/elbencho/package.py +++ b/var/spack/repos/builtin/packages/elbencho/package.py @@ -16,9 +16,13 @@ class Elbencho(MakefilePackage): homepage = "https://github.com/breuner/elbencho" url = "https://github.com/breuner/elbencho/archive/refs/tags/v3.0-1.tar.gz" + git = "https://github.com/breuner/elbencho.git" maintainers("ethanjjjjjjj") + version("master", branch="master") + + version("3.0-3", sha256="5769abcdaebefe2984ac3053fb6e91a54e1863d5ea8f72daea830e10b27c0eaf") version("3.0-1", sha256="19dad85e1fc74419dcdf740f11a47d3f6d566770a06e40976755a3404566c11d") version("2.2-5", sha256="4b598639452665a8b79c4c9d8a22ae63fb9b04057635a45e686aa3939ee255b4") version("2.2-3", sha256="0ae2d495d2863b84f21f55b7c526674fab1be723d0697087017946647f79d0e6") diff --git a/var/spack/repos/builtin/packages/environment-modules/package.py b/var/spack/repos/builtin/packages/environment-modules/package.py index 50c2d11a015a0d..38bcd9b3bac08b 100644 --- a/var/spack/repos/builtin/packages/environment-modules/package.py +++ b/var/spack/repos/builtin/packages/environment-modules/package.py @@ -59,13 +59,15 @@ class EnvironmentModules(Package): variant("X", default=True, description="Build with X functionality") - depends_on("autoconf", type="build", when="@main") - depends_on("automake", type="build", when="@main") - depends_on("libtool", type="build", when="@main") - depends_on("m4", type="build", when="@main") - depends_on("python", type="build", when="@main") - depends_on("py-sphinx@1.0:", type="build", when="@main") - depends_on("gzip", type="build", when="@main") + depends_on("less", type=("build", "run"), when="@4.1:") + with when("@main"): + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("m4", type="build") + depends_on("python", type="build") + depends_on("py-sphinx@1.0:", type="build") + depends_on("gzip", type="build") # Dependencies: depends_on("tcl", type=("build", "link", "run")) @@ -135,6 +137,9 @@ def install(self, spec, prefix): ] ) + if self.spec.satisfies("@4.1:"): + config_args.append(f"--with-pager={str(self.spec['less'].prefix.bin.less)}") + configure(*config_args) make() make("install") diff --git a/var/spack/repos/builtin/packages/epics-base/package.py b/var/spack/repos/builtin/packages/epics-base/package.py index 9aae46a46bea54..e3ef52ffe3b3c9 100644 --- a/var/spack/repos/builtin/packages/epics-base/package.py +++ b/var/spack/repos/builtin/packages/epics-base/package.py @@ -26,6 +26,7 @@ class EpicsBase(MakefilePackage): def patch(self): filter_file(r"^\s*CC\s*=.*", "CC = " + spack_cc, "configure/CONFIG.gnuCommon") filter_file(r"^\s*CCC\s*=.*", "CCC = " + spack_cxx, "configure/CONFIG.gnuCommon") + filter_file(r"\$\(PERL\)\s+\$\(XSUBPP\)", "$(XSUBPP)", "modules/ca/src/perl/Makefile") @property def install_targets(self): diff --git a/var/spack/repos/builtin/packages/esmf/package.py b/var/spack/repos/builtin/packages/esmf/package.py index 2d9cde52f7ffa1..7cf4fb67675d24 100644 --- a/var/spack/repos/builtin/packages/esmf/package.py +++ b/var/spack/repos/builtin/packages/esmf/package.py @@ -28,7 +28,8 @@ class Esmf(MakefilePackage): # Develop is a special name for spack and is always considered the newest version version("develop", branch="develop") - # generate chksum with spack checksum esmf@x.y.z + # generate chksum with 'spack checksum esmf@x.y.z' + version("8.6.0", sha256="ed057eaddb158a3cce2afc0712b49353b7038b45b29aee86180f381457c0ebe7") version("8.5.0", sha256="acd0b2641587007cc3ca318427f47b9cae5bfd2da8d2a16ea778f637107c29c4") version("8.4.2", sha256="969304efa518c7859567fa6e65efd960df2b4f6d72dbf2c3f29e39e4ab5ae594") version("8.4.1", sha256="1b54cee91aacaa9df400bd284614cbb0257e175f6f3ec9977a2d991ed8aa1af6") diff --git a/var/spack/repos/builtin/packages/fairmq/package.py b/var/spack/repos/builtin/packages/fairmq/package.py index 2af3ce52c1a8b6..4639e25e791ecf 100644 --- a/var/spack/repos/builtin/packages/fairmq/package.py +++ b/var/spack/repos/builtin/packages/fairmq/package.py @@ -14,31 +14,15 @@ class Fairmq(CMakePackage): maintainers("dennisklein", "ChristianTackeGSI") version("dev", branch="dev", submodules=True, get_full_repo=True) - version( - "1.7.0", - tag="v1.7.0", - commit="d1c99f7e150c1177dc1cab1b2adc16475cade24e", - submodules=True, - no_cache=True, - ) - version( - "1.6.0", - tag="v1.6.0", - commit="42d27af20fb5cbbbc0b0fdfef1c981d51a8baf87", - submodules=True, - no_cache=True, - ) - version( - "1.5.0", - tag="v1.5.0", - commit="c8fde17b6a10a467035590fd800bb693f50c4826", - submodules=True, - no_cache=True, - ) - # no_cache=True is currently needed, because FairMQ's build system - # depends on the git metadata, see also - # https://github.com/spack/spack/issues/19972 - # https://github.com/spack/spack/issues/14344 + with default_args(submodules=True, no_cache=True): + # no_cache=True is currently needed, because FairMQ's build system + # depends on the git metadata, see also + # https://github.com/spack/spack/issues/19972 + # https://github.com/spack/spack/issues/14344 + version("1.8.1", tag="v1.8.1", commit="961eca52761a31a0200c567b44e2b2d6d6e50df3") + version("1.7.0", tag="v1.7.0", commit="d1c99f7e150c1177dc1cab1b2adc16475cade24e") + version("1.6.0", tag="v1.6.0", commit="42d27af20fb5cbbbc0b0fdfef1c981d51a8baf87") + version("1.5.0", tag="v1.5.0", commit="c8fde17b6a10a467035590fd800bb693f50c4826") variant( "autobind", default=True, when="@1.7:", description="Override the channel autoBind default" @@ -61,9 +45,10 @@ class Fairmq(CMakePackage): generator("make", "ninja", default="ninja") - depends_on("cmake@3.15:", type="build") - depends_on("faircmakemodules", type="build") - depends_on("git", type="build") + with default_args(type="build"): + depends_on("cmake@3.15:") + depends_on("faircmakemodules") + depends_on("git") depends_on("boost@1.66: +container+program_options+filesystem+date_time+regex") depends_on("fairlogger@1.6: +pretty") @@ -72,6 +57,7 @@ class Fairmq(CMakePackage): def cmake_args(self): args = [ self.define("DISABLE_COLOR", True), + self.define("BUILD_TESTING", self.run_tests), self.define_from_variant("BUILD_EXAMPLES", "examples"), self.define_from_variant("FAIRMQ_CHANNEL_DEFAULT_AUTOBIND", "autobind"), ] diff --git a/var/spack/repos/builtin/packages/fdb/package.py b/var/spack/repos/builtin/packages/fdb/package.py index 7dc2f75e76b031..3beb0efbfb19da 100644 --- a/var/spack/repos/builtin/packages/fdb/package.py +++ b/var/spack/repos/builtin/packages/fdb/package.py @@ -16,7 +16,6 @@ class Fdb(CMakePackage): maintainers("skosukhin") - # master version of fdb is subject to frequent changes and is to be used experimentally. version("master", branch="master") version("5.11.23", sha256="09b1d93f2b71d70c7b69472dfbd45a7da0257211f5505b5fcaf55bfc28ca6c65") version("5.11.17", sha256="375c6893c7c60f6fdd666d2abaccb2558667bd450100817c0e1072708ad5591e") @@ -44,6 +43,7 @@ class Fdb(CMakePackage): depends_on("ecbuild@3.7:", type="build", when="@5.11.6:") depends_on("eckit@1.16:") + depends_on("eckit@1.24.4:", when="@5.11.22:") depends_on("eckit+admin", when="+tools") depends_on("eccodes@2.10:") diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py index bb150b154dc9b8..7e698119a4c07a 100644 --- a/var/spack/repos/builtin/packages/flux-core/package.py +++ b/var/spack/repos/builtin/packages/flux-core/package.py @@ -20,6 +20,7 @@ class FluxCore(AutotoolsPackage): maintainers("grondo") version("master", branch="master") + version("0.56.0", sha256="dfce5aa21bcb1f990397343cdff8a60542b2d18cbd929e46bdb444d21a961efb") version("0.55.0", sha256="2925b8a084e9d1069a96de7689b515ad6f2051ecfb9fbbe4d2643507de7ccd30") version("0.54.0", sha256="721fc3fff64b3b167ae55d0e29379ff3211729248ef97e3b9855816219063b42") version("0.53.0", sha256="2f14d032a2d54f34e066c8a15c79917089e9f7f8558baa03dbfe63dbf56918b7") diff --git a/var/spack/repos/builtin/packages/flux-sched/package.py b/var/spack/repos/builtin/packages/flux-sched/package.py index df38f938296043..c0a2e0e6085135 100644 --- a/var/spack/repos/builtin/packages/flux-sched/package.py +++ b/var/spack/repos/builtin/packages/flux-sched/package.py @@ -22,6 +22,7 @@ class FluxSched(CMakePackage, AutotoolsPackage): maintainers("grondo") version("master", branch="master") + version("0.30.0", sha256="1ccb2e53f4caede0233f19b2707e868f0cee9d2c957a06f97c22936ba9a43552") version("0.29.0", sha256="b93b18788e677535aa8ef945cdbeeced6d1408a4d16cb4a816ead53f31dd78d2") version("0.28.0", sha256="9431c671bed5d76fd95b4a4a7f36224d4bf76f416a2a1a5c4908f3ca790d434d") version("0.27.0", sha256="1e131924440c904fa0c925b7aa14c47b97f4e67b43af7efd2ebc0ef7ce90eb7c") diff --git a/var/spack/repos/builtin/packages/flux-security/package.py b/var/spack/repos/builtin/packages/flux-security/package.py index 71f0081f7125be..dd87223e663132 100644 --- a/var/spack/repos/builtin/packages/flux-security/package.py +++ b/var/spack/repos/builtin/packages/flux-security/package.py @@ -20,6 +20,7 @@ class FluxSecurity(AutotoolsPackage): maintainers("grondo") version("master", branch="master") + version("0.11.0", sha256="d1ef78a871155a252f07e4f0a636eb272d6c2048d5e0e943860dd687c6cf808a") version("0.10.0", sha256="b0f39c5e32322f901454469ffd6154019b6dffafc064b55b3e593f70db6a6f68") version("0.9.0", sha256="2258120c6f32ca0b5b13b166bae56d9bd82a44c6eeaa6bc6187e4a4419bdbcc0") version("0.8.0", sha256="9963628063b4abdff6bece03208444c8f23fbfda33c20544c48b21e9f4819ce2") diff --git a/var/spack/repos/builtin/packages/fmt/package.py b/var/spack/repos/builtin/packages/fmt/package.py index ea7abc909284ec..d88823705d2efa 100644 --- a/var/spack/repos/builtin/packages/fmt/package.py +++ b/var/spack/repos/builtin/packages/fmt/package.py @@ -15,6 +15,8 @@ class Fmt(CMakePackage): url = "https://github.com/fmtlib/fmt/releases/download/7.1.3/fmt-7.1.3.zip" maintainers("msimberg") + license("MIT") + version("10.1.1", sha256="b84e58a310c9b50196cda48d5678d5fa0849bca19e5fdba6b684f0ee93ed9d1b") version("10.1.0", sha256="d725fa83a8b57a3cedf238828fa6b167f963041e8f9f7327649bddc68ae316f4") version("10.0.0", sha256="4943cb165f3f587f26da834d3056ee8733c397e024145ca7d2a8a96bb71ac281") diff --git a/var/spack/repos/builtin/packages/form/package.py b/var/spack/repos/builtin/packages/form/package.py index 13c303ca508802..2d1bed59c035e8 100644 --- a/var/spack/repos/builtin/packages/form/package.py +++ b/var/spack/repos/builtin/packages/form/package.py @@ -11,7 +11,7 @@ class Form(AutotoolsPackage): homepage = "https://www.nikhef.nl/~form/" url = "https://github.com/vermaseren/form/releases/download/v4.2.1/form-4.2.1.tar.gz" - maintainers("iarspider", "tueda") + maintainers("tueda") version("4.3.1", sha256="f1f512dc34fe9bbd6b19f2dfef05fcb9912dfb43c8368a75b796ec472ee8bbce") version("4.3.0", sha256="b234e0d095f73ecb0904cdc3b0d8d8323a9fa7f46770a52fb22267c624aafbf6") diff --git a/var/spack/repos/builtin/packages/fplo/package.py b/var/spack/repos/builtin/packages/fplo/package.py index 2c025218ac8ac2..a9e8cbc2c2fb11 100644 --- a/var/spack/repos/builtin/packages/fplo/package.py +++ b/var/spack/repos/builtin/packages/fplo/package.py @@ -83,7 +83,7 @@ def edit(self, spec, prefix): filter_file(r"^\s*F90\s*=.*", "F90=" + spack_fc, *files) # patch for 64 bit integers - if "^mkl+ilp64" in spec: + if spec["mkl"].satisfies("+ilp64"): setuphelper = FileFilter(join_path(self.build_directory, "PYTHON", "setuphelper.py")) setuphelper.filter("mkl 64bit integer 32bit", "mkl 64bit integer 64bit") diff --git a/var/spack/repos/builtin/packages/gbl/package.py b/var/spack/repos/builtin/packages/gbl/package.py index 98aeb67fab3bc3..62f4bed7be196e 100644 --- a/var/spack/repos/builtin/packages/gbl/package.py +++ b/var/spack/repos/builtin/packages/gbl/package.py @@ -12,7 +12,6 @@ class Gbl(CMakePackage): homepage = "https://www.desy.de/~kleinwrt/GBL/doc/cpp/html/" git = "https://gitlab.desy.de/claus.kleinwort/general-broken-lines.git" - maintainers("iarspider") tags = ["hep"] version("V02-04-01", commit="1061b643c6656fbf7ceba579997eb43f0a9e9d3c") diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py index 6528d366d747cc..39b9afcb08596d 100644 --- a/var/spack/repos/builtin/packages/gdal/package.py +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -30,6 +30,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): maintainers("adamjstewart") + version("3.8.0", sha256="ec0f78d9dc32352aeac6edc9c3b27a991b91f9dc6f92c452207d84431c58757d") version("3.7.3", sha256="e0a6f0c453ea7eb7c09967f50ac49426808fcd8f259dbc9888140eb69d7ffee6") version("3.7.2", sha256="40c0068591d2c711c699bbb734319398485ab169116ac28005d8302f80b923ad") version("3.7.1", sha256="9297948f0a8ba9e6369cd50e87c7e2442eda95336b94d2b92ef1829d260b9a06") @@ -90,6 +91,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): version("2.0.0", sha256="91704fafeea2349c5e268dc1e2d03921b3aae64b05ee01d59fdfc1a6b0ffc061") # Optional dependencies + variant("archive", default=False, when="@3.7:", description="Optional for vsi7z VFS driver") variant( "armadillo", default=False, @@ -137,9 +139,11 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): variant("kdu", default=False, description="Required for JP2KAK and JPIPKAK drivers") variant("kea", default=False, description="Required for KEA driver") variant("lerc", default=False, when="@2.4:", description="Required for LERC compression") + variant("libaec", default=False, when="@3.8:", description="Optional for GRIB driver") variant("libcsf", default=False, description="Required for PCRaster driver") variant("libkml", default=False, description="Required for LIBKML driver") variant("liblzma", default=False, description="Required for Zarr driver") + variant("libqb3", default=False, when="@3.6:", description="Required for MRF driver") variant( "libxml2", default=False, description="Required for XML validation in many OGR drivers" ) @@ -190,7 +194,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): ) variant("pcidsk", default=False, description="Required for PCIDSK driver") variant( - "pcre", default=False, description="Required for REGEXP operator in drivers using SQLite3" + "pcre2", default=False, description="Required for REGEXP operator in drivers using SQLite3" ) variant("pdfium", default=False, when="@2.1:", description="Possible backend for PDF driver") variant("png", default=True, description="Required for PNG driver") @@ -201,7 +205,6 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): default=False, description="Required for PostgreSQL and PostGISRaster drivers", ) - variant("qb3", default=False, when="@3.6:", description="Required for MRF driver") variant( "qhull", default=False, @@ -262,6 +265,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): depends_on("json-c@0.12.1", when="@:2.2") # Optional dependencies + depends_on("libarchive", when="+archive") depends_on("armadillo", when="+armadillo") depends_on("blas", when="+armadillo") depends_on("lapack", when="+armadillo") @@ -303,6 +307,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): # depends_on('kakadu', when='+kdu') depends_on("kealib", when="+kea") depends_on("lerc", when="+lerc") + depends_on("libaec", when="+libaec") # depends_on('libcsf', when='+libcsf') depends_on("libkml@1.3:", when="+libkml") depends_on("xz", when="+liblzma") @@ -330,8 +335,8 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): depends_on("oracle-instant-client", when="+oracle") depends_on("parquet-cpp", when="+parquet") # depends_on('pcidsk', when='+pcidsk') - depends_on("pcre2", when="@3.5:+pcre") - depends_on("pcre", when="@:3.4+pcre") + depends_on("pcre2", when="@3.5:+pcre2") + depends_on("pcre", when="@:3.4+pcre2") # depends_on('pdfium', when='+pdfium') depends_on("libpng", when="+png") # depends_on('podofo', when='+podofo') @@ -341,7 +346,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): depends_on("poppler@:0.71", when="@:2.4 +poppler") depends_on("poppler@:21", when="@:3.4.1 +poppler") depends_on("postgresql", when="+postgresql") - depends_on("qb3", when="+qb3") + depends_on("qb3", when="+libqb3") depends_on("qhull", when="+qhull") depends_on("qhull@2015:", when="@3.5:+qhull") depends_on("qhull@:2020.1", when="@:3.3+qhull") @@ -490,6 +495,7 @@ def cmake_args(self): # be necessary. self.define("ENABLE_DEFLATE64", "zlib-ng" not in self.spec), # Optional dependencies + self.define_from_variant("GDAL_USE_ARCHIVE", "archive"), self.define_from_variant("GDAL_USE_ARMADILLO", "armadillo"), self.define_from_variant("GDAL_USE_ARROW", "arrow"), self.define_from_variant("GDAL_USE_BASISU", "basisu"), @@ -519,9 +525,11 @@ def cmake_args(self): self.define_from_variant("GDAL_USE_KDU", "kdu"), self.define_from_variant("GDAL_USE_KEA", "kea"), self.define_from_variant("GDAL_USE_LERC", "lerc"), + self.define_from_variant("GDAL_USE_LIBAEC", "libaec"), self.define_from_variant("GDAL_USE_LIBCSF", "libcsf"), self.define_from_variant("GDAL_USE_LIBKML", "libkml"), self.define_from_variant("GDAL_USE_LIBLZMA", "liblzma"), + self.define_from_variant("GDAL_USE_LIBQB3", "libqb3"), self.define_from_variant("GDAL_USE_LIBXML2", "libxml2"), self.define_from_variant("GDAL_USE_LURATECH", "luratech"), self.define_from_variant("GDAL_USE_LZ4", "lz4"), @@ -541,13 +549,12 @@ def cmake_args(self): self.define_from_variant("GDAL_USE_OPENSSL", "openssl"), self.define_from_variant("GDAL_USE_ORACLE", "oracle"), self.define_from_variant("GDAL_USE_PARQUET", "parquet"), - self.define_from_variant("GDAL_USE_PCRE2", "pcre"), + self.define_from_variant("GDAL_USE_PCRE2", "pcre2"), self.define_from_variant("GDAL_USE_PDFIUM", "pdfium"), self.define_from_variant("GDAL_USE_PNG", "png"), self.define_from_variant("GDAL_USE_PODOFO", "podofo"), self.define_from_variant("GDAL_USE_POPPLER", "poppler"), self.define_from_variant("GDAL_USE_POSTGRESQL", "postgresql"), - self.define_from_variant("GDAL_USE_LIBQB3", "qb3"), self.define_from_variant("GDAL_USE_QHULL", "qhull"), self.define_from_variant("GDAL_USE_RASDAMAN", "rasdaman"), self.define_from_variant("GDAL_USE_RASTERLITE2", "rasterlite2"), @@ -669,7 +676,7 @@ def configure_args(self): self.with_or_without("crypto", variant="openssl", package="openssl"), self.with_or_without("oci", variant="oracle", package="oracle-instant-client"), self.with_or_without("pcidsk", package="pcidsk"), - self.with_or_without("pcre"), + self.with_or_without("pcre", variant="pcre2"), self.with_or_without("pdfium", package="pdfium"), self.with_or_without("png", package="libpng"), self.with_or_without("podofo", package="podofo"), diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index afc4464b098bd8..6f7605fccaf9d0 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -22,6 +22,7 @@ class Geant4(CMakePackage): maintainers("drbenmorgan") + version("11.1.3", sha256="5d9a05d4ccf8b975649eab1d615fc1b8dce5937e01ab9e795bffd04149240db6") version("11.1.2", sha256="e9df8ad18c445d9213f028fd9537e174d6badb59d94bab4eeae32f665beb89af") version("11.1.1", sha256="c5878634da9ba6765ce35a469b2893044f4a6598aa948733da8436cdbfeef7d2") version("11.1.0", sha256="c4a23f2f502efeab56de43a4412b21f65c7ca1b0877b9bc1d7e845ee12edf70a") @@ -152,7 +153,7 @@ def std_when(values): patch("geant4-10.4.3-cxx17-removed-features.patch", level=1, when="@10.4.3 cxxstd=17") # See https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2556 - patch("package-cache.patch", level=1, when="@10.7.0:11.2.0^cmake@3.17:") + patch("package-cache.patch", level=1, when="@10.7.0:11.1.2^cmake@3.17:") # NVHPC: "thread-local declaration follows non-thread-local declaration" conflicts("%nvhpc", when="+threads") diff --git a/var/spack/repos/builtin/packages/geos/package.py b/var/spack/repos/builtin/packages/geos/package.py index dc23024cdc2116..8675db20f0b25f 100644 --- a/var/spack/repos/builtin/packages/geos/package.py +++ b/var/spack/repos/builtin/packages/geos/package.py @@ -21,20 +21,30 @@ class Geos(CMakePackage): maintainers("adamjstewart") + version("3.12.1", sha256="d6ea7e492224b51193e8244fe3ec17c4d44d0777f3c32ca4fb171140549a0d03") version("3.12.0", sha256="d96db96011259178a35555a0f6d6e75a739e52a495a6b2aa5efb3d75390fbc39") + version("3.11.3", sha256="80d60a2bbc0cde7745a3366b9eb8c0d65a142b03e063ea0a52c364758cd5ee89") version("3.11.2", sha256="b1f077669481c5a3e62affc49e96eb06f281987a5d36fdab225217e5b825e4cc") version("3.11.1", sha256="6d0eb3cfa9f92d947731cc75f1750356b3bdfc07ea020553daf6af1c768e0be2") version("3.11.0", sha256="79ab8cabf4aa8604d161557b52e3e4d84575acdc0d08cb09ab3f7aaefa4d858a") + version("3.10.6", sha256="078403158da66cad8be39ad1ede5e2fe4b70dcf7bb292fb06a65bdfe8afa6daf") + version("3.10.5", sha256="cc47d95e846e2745c493d8f9f3a9913b1c61f26717a1165898da64352aec4dde") version("3.10.4", sha256="d6fc11bcfd265cbf2714199174e4c3392d657551e5fd84c74c07c863b29357e3") version("3.10.3", sha256="3c141b07d61958a758345d5f54e3c735834b2f4303edb9f67fb26914f0d44770") version("3.10.2", sha256="50bbc599ac386b4c2b3962dcc411f0040a61f204aaef4eba7225ecdd0cf45715") version("3.10.1", sha256="a8148eec9636814c8ab0f8f5266ce6f9b914ed65b0d083fc43bb0bbb01f83648") version("3.10.0", sha256="097d70e3c8f688e59633ceb8d38ad5c9b0d7ead5729adeb925dbc489437abe13") + version("3.9.5", sha256="c6c9aedfa8864fb44ba78911408442382bfd0690cf2d4091ae3805c863789036") version("3.9.4", sha256="70dff2530d8cd2dfaeeb91a5014bd17afb1baee8f0e3eb18e44d5b4dbea47b14") version("3.9.3", sha256="f8b2314e311456f7a449144efb5e3188c2a28774752bc50fc882a3cd5c89ee35") version("3.9.2", sha256="44a5a9be21d7d473436bf621c2ddcc3cf5a8bbe3c786e13229618a3b9d861297") version("3.9.1", sha256="7e630507dcac9dc07565d249a26f06a15c9f5b0c52dd29129a0e3d381d7e382a") + version("3.9.0", sha256="bd8082cf12f45f27630193c78bdb5a3cba847b81e72b20268356c2a4fc065269") + version("3.8.4", sha256="6de8c98c1ae7cb0cd2d726a8dc9b7467308c4b4e05f9df94742244e64e441499") + version("3.8.3", sha256="f98315d1ba35c8d1a94a2947235f9e9dfb7057fdec343683f64ff9ad1061255c") + version("3.8.2", sha256="5a102f4614b0c9291504bbefd847ebac18ea717843506bd251d015c7cf9726b4") version("3.8.1", sha256="4258af4308deb9dbb5047379026b4cd9838513627cb943a44e16c40e42ae17f7") + version("3.8.0", sha256="99114c3dc95df31757f44d2afde73e61b9f742f0b683fd1894cbbee05dda62d5") version("3.7.2", sha256="2166e65be6d612317115bfec07827c11b403c3f303e0a7420a2106bc999d7707") version("3.6.2", sha256="045a13df84d605a866602f6020fc6cbf8bf4c42fb50de237a08926e1d7d7652a") version("3.6.1", sha256="4a2e4e3a7a09a7cfda3211d0f4a235d9fd3176ddf64bd8db14b4ead266189fc5") diff --git a/var/spack/repos/builtin/packages/ginkgo/package.py b/var/spack/repos/builtin/packages/ginkgo/package.py index 7bdfdb390370d5..37d974f9b70eb0 100644 --- a/var/spack/repos/builtin/packages/ginkgo/package.py +++ b/var/spack/repos/builtin/packages/ginkgo/package.py @@ -24,7 +24,8 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="develop") version("master", branch="master") - version("1.6.0", commit="1f1ed46e724334626f016f105213c047e16bc1ae", preferred=True) # v1.6.0 + version("1.7.0", commit="49242ff89af1e695d7794f6d50ed9933024b66fe") # v1.7.0 + version("1.6.0", commit="1f1ed46e724334626f016f105213c047e16bc1ae") # v1.6.0 version("1.5.0", commit="234594c92b58e2384dfb43c2d08e7f43e2b58e7a") # v1.5.0 version("1.5.0.glu_experimental", branch="glu_experimental") version("1.4.0", commit="f811917c1def4d0fcd8db3fe5c948ce13409e28e") # v1.4.0 @@ -37,13 +38,18 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage): variant("shared", default=True, description="Build shared libraries") variant("full_optimizations", default=False, description="Compile with all optimizations") variant("openmp", default=sys.platform != "darwin", description="Build with OpenMP") - variant("oneapi", default=False, description="Build with oneAPI support") + variant("sycl", default=False, description="Enable SYCL backend") variant("develtools", default=False, description="Compile with develtools enabled") variant("hwloc", default=False, description="Enable HWLOC support") variant("mpi", default=False, description="Enable MPI support") - depends_on("cmake@3.9:", type="build") - depends_on("cuda@9:", when="+cuda") + depends_on("cmake@3.9:", type="build", when="@:1.3.0") + depends_on("cmake@3.13:", type="build", when="@1.4.0:1.6.0") + depends_on("cmake@3.16:", type="build", when="@1.7.0:") + depends_on("cmake@3.18:", type="build", when="+cuda@1.7.0:") + depends_on("cuda@9:", when="+cuda @:1.4.0") + depends_on("cuda@9.2:", when="+cuda @1.5.0:") + depends_on("cuda@10.1:", when="+cuda @1.7.0:") depends_on("mpi", when="+mpi") depends_on("rocthrust", when="+rocm") @@ -60,14 +66,13 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage): depends_on("googletest", type="test") depends_on("numactl", type="test", when="+hwloc") - depends_on("intel-oneapi-mkl", when="+oneapi") - depends_on("intel-oneapi-dpl", when="+oneapi") + depends_on("intel-oneapi-mkl", when="+sycl") + depends_on("intel-oneapi-dpl", when="+sycl") + depends_on("intel-oneapi-tbb", when="+sycl") conflicts("%gcc@:5.2.9") conflicts("+rocm", when="@:1.1.1") conflicts("+mpi", when="@:1.4.0") - conflicts("+cuda", when="+rocm") - conflicts("+openmp", when="+oneapi") # ROCm 4.1.0 breaks platform settings which breaks Ginkgo's HIP support. conflicts("^hip@4.1.0:", when="@:1.3.0") @@ -76,22 +81,35 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage): conflicts("^rocthrust@4.1.0:", when="@:1.3.0") conflicts("^rocprim@4.1.0:", when="@:1.3.0") + # Ginkgo 1.6.0 start relying on ROCm 4.5.0 + conflicts("^hip@:4.3.1", when="@1.6.0:") + conflicts("^hipblas@:4.3.1", when="@1.6.0:") + conflicts("^hipsparse@:4.3.1", when="@1.6.0:") + conflicts("^rocthrust@:4.3.1", when="@1.6.0:") + conflicts("^rocprim@:4.3.1", when="@1.6.0:") + + conflicts( + "+sycl", when="@:1.4.0", msg="For SYCL support, please use Ginkgo version 1.4.0 and newer." + ) + # Skip smoke tests if compatible hardware isn't found patch("1.4.0_skip_invalid_smoke_tests.patch", when="@1.4.0") - # Newer DPC++ compilers use the updated SYCL 2020 standard which change - # kernel attribute propagation rules. This doesn't work well with the - # initial Ginkgo oneAPI support. - patch("1.4.0_dpcpp_use_old_standard.patch", when="+oneapi @1.4.0") - # Add missing include statement patch("thrust-count-header.patch", when="+rocm @1.5.0") def setup_build_environment(self, env): spec = self.spec - if "+oneapi" in spec: + if "+sycl" in spec: env.set("MKLROOT", join_path(spec["intel-oneapi-mkl"].prefix, "mkl", "latest")) env.set("DPL_ROOT", join_path(spec["intel-oneapi-dpl"].prefix, "dpl", "latest")) + # The `IntelSYCLConfig.cmake` is broken with spack. By default, it + # relies on the CMAKE_CXX_COMPILER being the real ipcx/dpcpp + # compiler. If not, the variable SYCL_COMPILER of that script is + # broken, and all the SYCL detection mechanism is wrong. We fix it + # by giving hint environment variables. + env.set("SYCL_LIBRARY_DIR_HINT", os.path.dirname(os.path.dirname(self.compiler.cxx))) + env.set("SYCL_INCLUDE_DIR_HINT", os.path.dirname(os.path.dirname(self.compiler.cxx))) def cmake_args(self): # Check that the have the correct C++ standard is available @@ -106,18 +124,19 @@ def cmake_args(self): except UnsupportedCompilerFlag: raise InstallError("Ginkgo requires a C++14-compliant C++ compiler") - cxx_is_dpcpp = os.path.basename(self.compiler.cxx) == "dpcpp" - if self.spec.satisfies("+oneapi") and not cxx_is_dpcpp: - raise InstallError( - "Ginkgo's oneAPI backend requires the" + "DPC++ compiler as main CXX compiler." - ) + if self.spec.satisfies("@1.4.0:1.6.0 +sycl") and not self.spec.satisfies( + "%oneapi@2021.3.0:" + ): + raise InstallError("ginkgo +sycl requires %oneapi@2021.3.0:") + elif self.spec.satisfies("@1.7.0: +sycl") and not self.spec.satisfies("%oneapi@2022.1.0:"): + raise InstallError("ginkgo +sycl requires %oneapi@2022.1.0:") spec = self.spec from_variant = self.define_from_variant args = [ from_variant("GINKGO_BUILD_CUDA", "cuda"), from_variant("GINKGO_BUILD_HIP", "rocm"), - from_variant("GINKGO_BUILD_DPCPP", "oneapi"), + from_variant("GINKGO_BUILD_SYCL", "sycl"), from_variant("GINKGO_BUILD_OMP", "openmp"), from_variant("GINKGO_BUILD_MPI", "mpi"), from_variant("BUILD_SHARED_LIBS", "shared"), @@ -161,6 +180,11 @@ def cmake_args(self): args.append( self.define("CMAKE_MODULE_PATH", self.spec["hip"].prefix.lib.cmake.hip) ) + + if "+sycl" in self.spec: + sycl_compatible_compilers = ["dpcpp", "icpx"] + if not (os.path.basename(self.compiler.cxx) in sycl_compatible_compilers): + raise InstallError("ginkgo +sycl requires DPC++ (dpcpp) or icpx compiler.") return args @property diff --git a/var/spack/repos/builtin/packages/glab/package.py b/var/spack/repos/builtin/packages/glab/package.py index 1f5d5fc20d6bad..a363d6acf5a2b9 100644 --- a/var/spack/repos/builtin/packages/glab/package.py +++ b/var/spack/repos/builtin/packages/glab/package.py @@ -14,6 +14,7 @@ class Glab(Package): maintainers("alecbcs") + version("1.35.0", sha256="7ed31c7a9b425fc15922f83c5dd8634a2758262a4f25f92583378655fcad6303") version("1.33.0", sha256="447a9b76acb5377642a4975908f610a3082026c176329c7c8cfed1461d2e1570") version("1.31.0", sha256="5648e88e7d6cc993227f5a4e80238af189bed09c7aed1eb12be7408e9a042747") version("1.30.0", sha256="d3c1a9ba723d94a0be10fc343717cf7b61732644f5c42922f1c8d81047164b99") diff --git a/var/spack/repos/builtin/packages/gmake/package.py b/var/spack/repos/builtin/packages/gmake/package.py index 0cfbccb80e6739..29469758c5a2a7 100644 --- a/var/spack/repos/builtin/packages/gmake/package.py +++ b/var/spack/repos/builtin/packages/gmake/package.py @@ -67,6 +67,8 @@ def configure_args(self): return [ "--with-guile" if self.spec.satisfies("+guile") else "--without-guile", "--disable-nls", + # configure needs make to enable dependency tracking, disable explicitly + "--disable-dependency-tracking", ] def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/gotcha/package.py b/var/spack/repos/builtin/packages/gotcha/package.py index 0efc4d986914e8..82bc308fa8f353 100644 --- a/var/spack/repos/builtin/packages/gotcha/package.py +++ b/var/spack/repos/builtin/packages/gotcha/package.py @@ -17,6 +17,7 @@ class Gotcha(CMakePackage): version("develop", branch="develop") version("master", branch="master") + version("1.0.5", tag="1.0.5", commit="e28f10c45a0cda0e1ec225eaea6abfe72c8353aa") version("1.0.4", tag="1.0.4", commit="46f2aaedc885f140a3f31a17b9b9a9d171f3d6f0") version("1.0.3", tag="1.0.3", commit="1aafd1e30d46ce4e6555c8a4ea5f5edf6a5eade5") version("1.0.2", tag="1.0.2", commit="bed1b7c716ebb0604b3e063121649b5611640f25") diff --git a/var/spack/repos/builtin/packages/gperftools/package.py b/var/spack/repos/builtin/packages/gperftools/package.py index 6dd96c36678682..38ff5a25d4b535 100644 --- a/var/spack/repos/builtin/packages/gperftools/package.py +++ b/var/spack/repos/builtin/packages/gperftools/package.py @@ -17,6 +17,8 @@ class Gperftools(AutotoolsPackage): url = "https://github.com/gperftools/gperftools/releases/download/gperftools-2.7/gperftools-2.7.tar.gz" maintainers("albestro", "eschnett", "msimberg", "teonnik") + license("BSD-3-Clause") + version("2.13", sha256="4882c5ece69f8691e51ffd6486df7d79dbf43b0c909d84d3c0883e30d27323e7") version("2.12", sha256="fb611b56871a3d9c92ab0cc41f9c807e8dfa81a54a4a9de7f30e838756b5c7c6") version("2.11", sha256="8ffda10e7c500fea23df182d7adddbf378a203c681515ad913c28a64b87e24dc") diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 7a4147a6eecab8..d373bb4d5d425e 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -90,9 +90,26 @@ class Gromacs(CMakePackage, CudaPackage): default=False, description="Produces a double precision version of the executables", ) - variant("cufftmp", default=False, when="+cuda+mpi", description="Enable Multi GPU FFT support") + variant( + "cufftmp", + default=False, + when="@2022: +cuda+mpi", + description="Enable multi-GPU FFT support with cuFFTMp", + ) + variant( + "heffte", + default=False, + when="@2021: +sycl+mpi", + description="Enable multi-GPU FFT support with HeFFTe", + ) variant("opencl", default=False, description="Enable OpenCL support") - variant("sycl", default=False, description="Enable SYCL support") + variant("sycl", default=False, when="@2021:", description="Enable SYCL support") + variant( + "intel-data-center-gpu-max", + default=False, + when="@2022:", + description="Enable support for Intel Data Center GPU Max", + ) variant("nosuffix", default=False, description="Disable default suffixes") variant( "build_type", @@ -108,6 +125,18 @@ class Gromacs(CMakePackage, CudaPackage): "Profile", ), ) + variant( + "nblib", + default=True, + when="@2021:", + description="Build and install the NB-LIB C++ API for GROMACS", + ) + variant( + "gmxapi", + default=True, + when="@2019:", + description="Build and install the gmxlib python API for GROMACS", + ) variant( "mdrun_only", default=False, @@ -254,6 +283,7 @@ class Gromacs(CMakePackage, CudaPackage): depends_on("cp2k@8.1:", when="+cp2k") depends_on("nvhpc", when="+cufftmp") + depends_on("heffte", when="+heffte") requires( "%intel", @@ -263,6 +293,11 @@ class Gromacs(CMakePackage, CudaPackage): msg="Only attempt to find gcc libs for Intel compiler if Intel compiler is used.", ) + # If the Intel suite is used for Lapack, it must be used for fftw and vice-versa + for _intel_pkg in INTEL_MATH_LIBRARIES: + requires(f"^[virtuals=fftw-api] {_intel_pkg}", when=f"^[virtuals=lapack] {_intel_pkg}") + requires(f"^[virtuals=lapack] {_intel_pkg}", when=f"^[virtuals=fftw-api] {_intel_pkg}") + patch("gmxDetectCpu-cmake-3.14.patch", when="@2018:2019.3^cmake@3.14.0:") patch("gmxDetectSimd-cmake-3.14.patch", when="@5.0:2017^cmake@3.14.0:") # 2021.2 will always try to build tests (see https://gromacs.bioexcel.eu/t/compilation-failure-for-gromacs-2021-1-and-2021-2-with-cmake-3-20-2/2129) @@ -511,6 +546,19 @@ def cmake_args(self): + f'/{self.spec["nvhpc"].version}/math_libs' ) + if "+heffte" in self.spec: + options.append("-DGMX_USE_HEFFTE=on") + options.append(f'-DHeffte_ROOT={self.spec["heffte"].prefix}') + + if "+intel-data-center-gpu-max" in self.spec: + options.append("-DGMX_GPU_NB_CLUSTER_SIZE=8") + options.append("-DGMX_GPU_NB_NUM_CLUSTER_PER_CELL_X=1") + + if "~nblib" in self.spec: + options.append("-DGMX_INSTALL_NBLIB_API=OFF") + if "~gmxapi" in self.spec: + options.append("-DGMXAPI=OFF") + # Activate SIMD based on properties of the target target = self.spec.target if target >= "zen4": @@ -594,7 +642,7 @@ def cmake_args(self): "-DGMX_OPENMP_MAX_THREADS=%s" % self.spec.variants["openmp_max_threads"].value ) - if "^mkl" in self.spec: + if self.spec["lapack"].name in INTEL_MATH_LIBRARIES: # fftw-api@3 is provided by intel-mkl or intel-parllel-studio # we use the mkl interface of gromacs options.append("-DGMX_FFT_LIBRARY=mkl") diff --git a/var/spack/repos/builtin/packages/gzip/package.py b/var/spack/repos/builtin/packages/gzip/package.py index 76a06818251e37..6645969dd0e4fd 100644 --- a/var/spack/repos/builtin/packages/gzip/package.py +++ b/var/spack/repos/builtin/packages/gzip/package.py @@ -12,9 +12,22 @@ class Gzip(AutotoolsPackage): homepage = "https://www.gnu.org/software/gzip/" url = "https://ftp.gnu.org/gnu/gzip/gzip-1.10.tar.gz" - version("1.12", sha256="5b4fb14d38314e09f2fc8a1c510e7cd540a3ea0e3eb9b0420046b82c3bf41085") - version("1.11", sha256="3e8a0e0c45bad3009341dce17d71536c4c655d9313039021ce7554a26cd50ed9") - version("1.10", sha256="c91f74430bf7bc20402e1f657d0b252cb80aa66ba333a25704512af346633c68") + version("1.13", sha256="20fc818aeebae87cdbf209d35141ad9d3cf312b35a5e6be61bfcfbf9eddd212a") + version( + "1.12", + sha256="5b4fb14d38314e09f2fc8a1c510e7cd540a3ea0e3eb9b0420046b82c3bf41085", + deprecated=True, + ) + version( + "1.11", + sha256="3e8a0e0c45bad3009341dce17d71536c4c655d9313039021ce7554a26cd50ed9", + deprecated=True, + ) + version( + "1.10", + sha256="c91f74430bf7bc20402e1f657d0b252cb80aa66ba333a25704512af346633c68", + deprecated=True, + ) # Gzip makes a recursive symlink if built in-source build_directory = "spack-build" diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index 0a3903e8015478..ffd91d30288202 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -207,6 +207,7 @@ class Hdf5(CMakePackage): variant("hl", default=False, description="Enable the high-level library") variant("cxx", default=False, description="Enable C++ support") variant("map", when="@1.14:", default=False, description="Enable MAP API support") + variant("subfiling", when="@1.14:", default=False, description="Enable Subfiling VFD support") variant("fortran", default=False, description="Enable Fortran support") variant("java", when="@1.10:", default=False, description="Enable Java support") variant("threadsafe", default=False, description="Enable thread-safe capabilities") @@ -329,7 +330,7 @@ class Hdf5(CMakePackage): patch("fortran-kinds.patch", when="@1.10.7") - # This patch may only be needed with GCC11.2 on macOS, but it's valid for + # This patch may only be needed with GCC 11.2 on macOS, but it's valid for # any of the head HDF5 versions as of 12/2021. Since it's impossible to # tell what Fortran version is part of a mixed apple-clang toolchain on # macOS (which is the norm), and this might be an issue for other compilers @@ -607,6 +608,7 @@ def cmake_args(self): # are enabled but the tests are disabled. spec.satisfies("@1.8.22+shared+tools"), ), + self.define_from_variant("HDF5_ENABLE_SUBFILING_VFD", "subfiling"), self.define_from_variant("HDF5_ENABLE_MAP_API", "map"), self.define("HDF5_ENABLE_Z_LIB_SUPPORT", True), self.define_from_variant("HDF5_ENABLE_SZIP_SUPPORT", "szip"), @@ -711,6 +713,17 @@ def fix_package_config(self): if not os.path.exists(tgt_filename): symlink(src_filename, tgt_filename) + @run_after("install") + def link_debug_libs(self): + # When build_type is Debug, the hdf5 build appends _debug to all library names. + # Dependents of hdf5 (netcdf-c etc.) can't handle those, thus make symlinks. + if "build_type=Debug" in self.spec: + libs = find(self.prefix.lib, "libhdf5*_debug.*", recursive=False) + with working_dir(self.prefix.lib): + for lib in libs: + libname = os.path.split(lib)[1] + os.symlink(libname, libname.replace("_debug", "")) + @property @llnl.util.lang.memoized def _output_version(self): diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py index 01f1235771a744..df6eb2f948aae2 100644 --- a/var/spack/repos/builtin/packages/heffte/package.py +++ b/var/spack/repos/builtin/packages/heffte/package.py @@ -38,6 +38,12 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): variant("shared", default=True, description="Builds with shared libraries") variant("fftw", default=False, description="Builds with support for FFTW backend") + variant( + "sycl", + default=False, + when="%oneapi", + description="Builds with support for oneAPI SYCL+oneMKL backend", + ) variant("mkl", default=False, description="Builds with support for MKL backend") variant("magma", default=False, description="Use helper methods from the UTK MAGMA library") variant("python", default=False, description="Install the Python bindings") @@ -68,6 +74,8 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): depends_on("rocsparse@3.8:", when="+magma+rocm", type=("build", "run")) depends_on("hipblas@3.8:", when="+magma+rocm", type=("build", "run")) depends_on("hipsparse@3.8:", when="+magma+rocm", type=("build", "run")) + depends_on("intel-oneapi-mkl@2023.2.0:", when="+sycl", type=("build", "run")) + depends_on("intel-oneapi-mpi@2021.10.0:", when="+sycl", type=("build", "run")) examples_src_dir = "examples" @@ -78,6 +86,7 @@ def cmake_args(self): self.define_from_variant("BUILD_SHARED_LIBS", "shared"), self.define_from_variant("Heffte_ENABLE_CUDA", "cuda"), self.define_from_variant("Heffte_ENABLE_ROCM", "rocm"), + self.define_from_variant("Heffte_ENABLE_ONEAPI", "sycl"), self.define_from_variant("Heffte_ENABLE_FFTW", "fftw"), self.define_from_variant("Heffte_ENABLE_MKL", "mkl"), self.define_from_variant("Heffte_ENABLE_MAGMA", "magma"), diff --git a/var/spack/repos/builtin/packages/hpcc/package.py b/var/spack/repos/builtin/packages/hpcc/package.py index 4b281cf42426eb..05d08014482541 100644 --- a/var/spack/repos/builtin/packages/hpcc/package.py +++ b/var/spack/repos/builtin/packages/hpcc/package.py @@ -118,7 +118,10 @@ def edit(self, spec, prefix): lin_alg_libs.append(join_path(spec["fftw-api"].prefix.lib, "libsfftw_mpi.so")) lin_alg_libs.append(join_path(spec["fftw-api"].prefix.lib, "libsfftw.so")) - elif self.spec.variants["fft"].value == "mkl" and "^mkl" in spec: + elif ( + self.spec.variants["fft"].value == "mkl" + and spec["fftw-api"].name in INTEL_MATH_LIBRARIES + ): mklroot = env["MKLROOT"] self.config["@LAINC@"] += " -I{0}".format(join_path(mklroot, "include/fftw")) libfftw2x_cdft = join_path( diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py index 8d58956508a1d9..bb1e28f13cd0ee 100644 --- a/var/spack/repos/builtin/packages/hpctoolkit/package.py +++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py @@ -109,6 +109,11 @@ class Hpctoolkit(AutotoolsPackage): "python", default=False, description="Support unwinding Python source.", when="@2023.03:" ) + with when("@develop build_system=autotools"): + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + boost_libs = ( "+atomic +chrono +date_time +filesystem +system +thread +timer" " +graph +regex +shared +multithreaded visibility=global" diff --git a/var/spack/repos/builtin/packages/hpx-kokkos/package.py b/var/spack/repos/builtin/packages/hpx-kokkos/package.py index 27e88238294c6d..e98c0bb17ccd78 100644 --- a/var/spack/repos/builtin/packages/hpx-kokkos/package.py +++ b/var/spack/repos/builtin/packages/hpx-kokkos/package.py @@ -16,6 +16,8 @@ class HpxKokkos(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/STEllAR-GROUP/hpx-kokkos.git" maintainers("G-071", "msimberg") + license("BSL-1.0") + version("master", branch="master") version("0.4.0", sha256="dafef55521cf4bf7ab28ebad546ea1d3fb83fac3a9932e292db4ab3666cd833f") version("0.3.0", sha256="83c1d11dab95552ad0abdae767c71f757811d7b51d82bd231653dc942e89a45d") diff --git a/var/spack/repos/builtin/packages/hpx/package.py b/var/spack/repos/builtin/packages/hpx/package.py index 5c0d390e590441..628358b38caf8f 100644 --- a/var/spack/repos/builtin/packages/hpx/package.py +++ b/var/spack/repos/builtin/packages/hpx/package.py @@ -18,6 +18,8 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/STEllAR-GROUP/hpx.git" maintainers("msimberg", "albestro", "teonnik", "hkaiser") + license("BSL-1.0") + tags = ["e4s"] version("master", branch="master") diff --git a/var/spack/repos/builtin/packages/hydrogen/package.py b/var/spack/repos/builtin/packages/hydrogen/package.py index df00235725b9f1..6bf6114d4175ee 100644 --- a/var/spack/repos/builtin/packages/hydrogen/package.py +++ b/var/spack/repos/builtin/packages/hydrogen/package.py @@ -7,254 +7,269 @@ from spack.package import * +# This limits the versions of lots of things pretty severely. +# +# - Only v1.5.2 and newer are buildable. +# - CMake must be v3.22 or newer. +# - CUDA must be v11.0.0 or newer. + -class Hydrogen(CMakePackage, CudaPackage, ROCmPackage): +class Hydrogen(CachedCMakePackage, CudaPackage, ROCmPackage): """Hydrogen: Distributed-memory dense and sparse-direct linear algebra and optimization library. Based on the Elemental library.""" homepage = "https://libelemental.org" - url = "https://github.com/LLNL/Elemental/archive/v1.0.1.tar.gz" + url = "https://github.com/LLNL/Elemental/archive/v1.5.1.tar.gz" git = "https://github.com/LLNL/Elemental.git" tags = ["ecp", "radiuss"] maintainers("bvanessen") version("develop", branch="hydrogen") + version("1.5.3", sha256="faefbe738bd364d0e26ce9ad079a11c93a18c6f075719a365fd4fa5f1f7a989a") + version("1.5.2", sha256="a902cad3962471216cfa278ba0561c18751d415cd4d6b2417c02a43b0ab2ea33") version("1.5.1", sha256="447da564278f98366906d561d9c8bc4d31678c56d761679c2ff3e59ee7a2895c") - version("1.5.0", sha256="03dd487fb23b9fdbc715554a8ea48c3196a1021502e61b0172ef3fdfbee75180") - version("1.4.0", sha256="c13374ff4a6c4d1076e47ba8c8d91a7082588b9958d1ed89cffb12f1d2e1452e") - version("1.3.4", sha256="7979f6656f698f0bbad6798b39d4b569835b3013ff548d98089fce7c283c6741") - version("1.3.3", sha256="a51a1cfd40ac74d10923dfce35c2c04a3082477683f6b35e7b558ea9f4bb6d51") - version("1.3.2", sha256="50bc5e87955f8130003d04dfd9dcad63107e92b82704f8107baf95b0ccf98ed6") - version("1.3.1", sha256="a8b8521458e9e747f2b24af87c4c2749a06e500019c383e0cefb33e5df6aaa1d") - version("1.3.0", sha256="0f3006aa1d8235ecdd621e7344c99f56651c6836c2e1bc0cf006331b70126b36") - version("1.2.0", sha256="8545975139582ee7bfe5d00f8d83a8697afc285bf7026b0761e9943355974806") - version("1.1.0-1", sha256="73ce05e4166853a186469269cb00a454de71e126b2019f95bbae703b65606808") - version("1.1.0", sha256="b4c12913acd01c72d31f4522266bfeb8df1d4d3b4aef02e07ccbc9a477894e71") - version("1.0.1", sha256="27cf76e1ef1d58bd8f9b1e34081a14a682b7ff082fb5d1da56713e5e0040e528") - version("1.0", sha256="d8a97de3133f2c6b6bb4b80d32b4a4cc25eb25e0df4f0cec0f8cb19bf34ece98") - - variant("shared", default=True, description="Enables the build of shared libraries") - variant("openmp", default=True, description="Make use of OpenMP within CPU-kernels") - variant( - "openmp_blas", default=False, description="Use OpenMP for threading in the BLAS library" - ) - variant("quad", default=False, description="Enable quad precision") - variant("int64", default=False, description="Use 64bit integers") - variant("int64_blas", default=False, description="Use 64bit integers for BLAS.") - variant("scalapack", default=False, description="Build with ScaLAPACK library") + # Older versions are no longer supported. + + variant("shared", default=True, description="Enables the build of shared libraries.") variant( "build_type", default="Release", description="The build type to build", values=("Debug", "Release"), ) + variant("int64", default=False, description="Use 64-bit integers") + variant("al", default=True, sticky=True, description="Use Aluminum communication library") variant( - "blas", - default="openblas", - values=("openblas", "mkl", "accelerate", "essl", "libsci"), - description="Enable the use of OpenBlas/MKL/Accelerate/ESSL/LibSci", + "cub", default=True, when="+cuda", description="Use CUB/hipCUB for GPU memory management" ) variant( - "mpfr", - default=False, - description="Support GNU MPFR's" "arbitrary-precision floating-point arithmetic", + "cub", default=True, when="+rocm", description="Use CUB/hipCUB for GPU memory management" ) - variant("test", default=False, description="Builds test suite") - variant("al", default=False, description="Builds with Aluminum communication library") + variant("half", default=False, description="Support for FP16 precision data types") + + # TODO: Add netlib-lapack. For GPU-enabled builds, typical + # workflows don't touch host BLAS/LAPACK all that often, and even + # less frequently in performance-critical regions. + variant( + "blas", + default="any", + values=("any", "openblas", "mkl", "accelerate", "essl", "libsci"), + description="Specify a host BLAS library preference", + ) + variant("int64_blas", default=False, description="Use 64-bit integers for (host) BLAS.") + + variant("openmp", default=True, description="Make use of OpenMP within CPU kernels") variant( "omp_taskloops", + when="+openmp", default=False, - description="Use OpenMP taskloops instead of parallel for loops.", + description="Use OpenMP taskloops instead of parallel for loops", ) - variant("half", default=False, description="Builds with support for FP16 precision data types") - conflicts("~openmp", when="+omp_taskloops") + # Users should spec this on their own on the command line, no? + # This doesn't affect Hydrogen itself at all. Not one bit. + # variant( + # "openmp_blas", + # default=False, + # description="Use OpenMP for threading in the BLAS library") + + variant("test", default=False, description="Builds test suite") + conflicts("+cuda", when="+rocm", msg="CUDA and ROCm support are mutually exclusive") + conflicts("+half", when="+rocm", msg="FP16 support not implemented for ROCm.") - depends_on("cmake@3.21.0:", type="build", when="@1.5.2:") - depends_on("cmake@3.17.0:", type="build", when="@:1.5.1") - depends_on("cmake@3.22.0:", type="build", when="%cce") + depends_on("cmake@3.22.0:", type="build", when="@1.5.2:") + depends_on("cmake@3.17.0:", type="build", when="@1.5.1") depends_on("mpi") - depends_on("hwloc@1.11:") - depends_on("hwloc +cuda +nvml", when="+cuda") - depends_on("hwloc@2.3.0:", when="+rocm") + depends_on("blas") + depends_on("lapack") # Note that #1712 forces us to enumerate the different blas variants + # Note that this forces us to use OpenBLAS until #1712 is fixed depends_on("openblas", when="blas=openblas") depends_on("openblas +ilp64", when="blas=openblas +int64_blas") - depends_on("openblas threads=openmp", when="blas=openblas +openmp_blas") + depends_on("openblas@0.3.21:0.3.23", when="blas=openblas arch=ppc64le:") depends_on("intel-mkl", when="blas=mkl") depends_on("intel-mkl +ilp64", when="blas=mkl +int64_blas") - depends_on("intel-mkl threads=openmp", when="blas=mkl +openmp_blas") + # I don't think this is true... depends_on("veclibfort", when="blas=accelerate") - conflicts("blas=accelerate +openmp_blas") depends_on("essl", when="blas=essl") depends_on("essl +ilp64", when="blas=essl +int64_blas") - depends_on("essl threads=openmp", when="blas=essl +openmp_blas") + depends_on("netlib-lapack +external-blas", when="blas=essl") depends_on("cray-libsci", when="blas=libsci") - depends_on("cray-libsci +openmp", when="blas=libsci +openmp_blas") # Specify the correct version of Aluminum - depends_on("aluminum@:0.3", when="@:1.3 +al") - depends_on("aluminum@0.4.0:0.4", when="@1.4.0:1.4 +al") - depends_on("aluminum@0.6.0:0.6", when="@1.5.0:1.5.1 +al") - depends_on("aluminum@0.7.0:", when="@:1.0,1.5.2: +al") + depends_on("aluminum@0.7.0:", when="@1.5.2: +al") # Add Aluminum variants - depends_on("aluminum +cuda +nccl +cuda_rma", when="+al +cuda") - depends_on("aluminum +rocm +rccl", when="+al +rocm") + depends_on("aluminum +cuda +ht", when="+al +cuda") + depends_on("aluminum +rocm +ht", when="+al +rocm") for arch in CudaPackage.cuda_arch_values: - depends_on("aluminum cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch) + depends_on("aluminum +cuda cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch) # variants +rocm and amdgpu_targets are not automatically passed to # dependencies, so do it manually. for val in ROCmPackage.amdgpu_targets: - depends_on("aluminum amdgpu_target=%s" % val, when="+al +rocm amdgpu_target=%s" % val) + depends_on( + "aluminum +rocm amdgpu_target=%s" % val, when="+al +rocm amdgpu_target=%s" % val + ) - # Note that this forces us to use OpenBLAS until #1712 is fixed - depends_on("lapack", when="blas=openblas ~openmp_blas") - - depends_on("scalapack", when="+scalapack") - depends_on("gmp", when="+mpfr") - depends_on("mpc", when="+mpfr") - depends_on("mpfr", when="+mpfr") - - depends_on("cuda", when="+cuda") - depends_on("cub", when="^cuda@:10") - depends_on("hipcub", when="+rocm") + depends_on("cuda@11.0.0:", when="+cuda") + depends_on("hipcub +rocm", when="+rocm +cub") depends_on("half", when="+half") depends_on("llvm-openmp", when="%apple-clang +openmp") - conflicts( - "@0:0.98", - msg="Hydrogen did not exist before v0.99. " + "Did you mean to use Elemental instead?", - ) - - generator("ninja") - @property def libs(self): shared = True if "+shared" in self.spec else False - return find_libraries("libEl", root=self.prefix, shared=shared, recursive=True) + return find_libraries("libHydrogen", root=self.prefix, shared=shared, recursive=True) def cmake_args(self): + args = [] + return args + + def get_cuda_flags(self): spec = self.spec + args = [] + if spec.satisfies("^cuda+allow-unsupported-compilers"): + args.append("-allow-unsupported-compiler") + + if spec.satisfies("%clang"): + for flag in spec.compiler_flags["cxxflags"]: + if "gcc-toolchain" in flag: + args.append("-Xcompiler={0}".format(flag)) + return args - enable_gpu_fp16 = "+cuda" in spec and "+half" in spec - - args = [ - "-DCMAKE_CXX_STANDARD=17", - "-DCMAKE_INSTALL_MESSAGE:STRING=LAZY", - "-DBUILD_SHARED_LIBS:BOOL=%s" % ("+shared" in spec), - "-DHydrogen_ENABLE_OPENMP:BOOL=%s" % ("+openmp" in spec), - "-DHydrogen_ENABLE_QUADMATH:BOOL=%s" % ("+quad" in spec), - "-DHydrogen_USE_64BIT_INTS:BOOL=%s" % ("+int64" in spec), - "-DHydrogen_USE_64BIT_BLAS_INTS:BOOL=%s" % ("+int64_blas" in spec), - "-DHydrogen_ENABLE_MPC:BOOL=%s" % ("+mpfr" in spec), - "-DHydrogen_GENERAL_LAPACK_FALLBACK=ON", - "-DHydrogen_ENABLE_ALUMINUM=%s" % ("+al" in spec), - "-DHydrogen_ENABLE_CUB=%s" % ("+cuda" in spec or "+rocm" in spec), - "-DHydrogen_ENABLE_CUDA=%s" % ("+cuda" in spec), - "-DHydrogen_ENABLE_ROCM=%s" % ("+rocm" in spec), - "-DHydrogen_ENABLE_TESTING=%s" % ("+test" in spec), - "-DHydrogen_ENABLE_HALF=%s" % ("+half" in spec), - "-DHydrogen_ENABLE_GPU_FP16=%s" % enable_gpu_fp16, - ] - - if not spec.satisfies("^cmake@3.23.0"): - # There is a bug with using Ninja generator in this version - # of CMake - args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=ON") - - if "+cuda" in spec: - if self.spec.satisfies("%clang"): - for flag in self.spec.compiler_flags["cxxflags"]: - if "gcc-toolchain" in flag: - args.append("-DCMAKE_CUDA_FLAGS=-Xcompiler={0}".format(flag)) - args.append("-DCMAKE_CUDA_STANDARD=14") - archs = spec.variants["cuda_arch"].value - if archs != "none": - arch_str = ";".join(archs) - args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % arch_str) - - if spec.satisfies("%cce") and spec.satisfies("^cuda+allow-unsupported-compilers"): - args.append("-DCMAKE_CUDA_FLAGS=-allow-unsupported-compiler") - - if "+rocm" in spec: - args.extend( - [ - "-DCMAKE_CXX_FLAGS=-std=c++17", - "-DHIP_ROOT_DIR={0}".format(spec["hip"].prefix), - "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc), - ] - ) - archs = self.spec.variants["amdgpu_target"].value - if archs != "none": - arch_str = ",".join(archs) - cxxflags_str = " ".join(self.spec.compiler_flags["cxxflags"]) - args.append( - "-DHIP_HIPCC_FLAGS=--amdgpu-target={0}" - " -g -fsized-deallocation -fPIC {1}" - " -std=c++17".format(arch_str, cxxflags_str) - ) - args.extend( - [ - "-DCMAKE_HIP_ARCHITECTURES=%s" % arch_str, - "-DAMDGPU_TARGETS=%s" % arch_str, - "-DGPU_TARGETS=%s" % arch_str, - ] - ) + def std_initconfig_entries(self): + entries = super(Hydrogen, self).std_initconfig_entries() + + # CMAKE_PREFIX_PATH, in CMake types, is a "STRING", not a "PATH". :/ + entries = [x for x in entries if "CMAKE_PREFIX_PATH" not in x] + cmake_prefix_path = os.environ["CMAKE_PREFIX_PATH"].replace(":", ";") + entries.append(cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path)) + # IDK why this is here, but it was in the original recipe. So, yeah. + entries.append(cmake_cache_string("CMAKE_INSTALL_MESSAGE", "LAZY")) + return entries + + def initconfig_compiler_entries(self): + spec = self.spec + entries = super(Hydrogen, self).initconfig_compiler_entries() + + # FIXME: Enforce this better in the actual CMake. + entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17")) + entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec)) + entries.append(cmake_cache_option("CMAKE_EXPORT_COMPILE_COMMANDS", True)) + + entries.append(cmake_cache_option("MPI_ASSUME_NO_BUILTIN_MPI", True)) - # Add support for OS X to find OpenMP (LLVM installed via brew) - if self.spec.satisfies("%clang +openmp platform=darwin"): + if spec.satisfies("%clang +openmp platform=darwin") or spec.satisfies( + "%clang +omp_taskloops platform=darwin" + ): clang = self.compiler.cc clang_bin = os.path.dirname(clang) clang_root = os.path.dirname(clang_bin) - args.extend(["-DOpenMP_DIR={0}".format(clang_root)]) + entries.append(cmake_cache_string("OpenMP_CXX_FLAGS", "-fopenmp=libomp")) + entries.append(cmake_cache_string("OpenMP_CXX_LIB_NAMES", "libomp")) + entries.append( + cmake_cache_string( + "OpenMP_libomp_LIBRARY", "{0}/lib/libomp.dylib".format(clang_root) + ) + ) + + return entries + + def initconfig_hardware_entries(self): + spec = self.spec + entries = super(Hydrogen, self).initconfig_hardware_entries() + + entries.append(cmake_cache_option("Hydrogen_ENABLE_CUDA", "+cuda" in spec)) + if spec.satisfies("+cuda"): + entries.append(cmake_cache_string("CMAKE_CUDA_STANDARD", "17")) + if not spec.satisfies("cuda_arch=none"): + archs = spec.variants["cuda_arch"].value + arch_str = ";".join(archs) + entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", arch_str)) + + # FIXME: Should this use the "cuda_flags" function of the + # CudaPackage class or something? There might be other + # flags in play, and we need to be sure to get them all. + cuda_flags = self.get_cuda_flags() + if len(cuda_flags) > 0: + entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags))) + + entries.append(cmake_cache_option("Hydrogen_ENABLE_ROCM", "+rocm" in spec)) + if spec.satisfies("+rocm"): + entries.append(cmake_cache_string("CMAKE_HIP_STANDARD", "17")) + if not spec.satisfies("amdgpu_target=none"): + archs = self.spec.variants["amdgpu_target"].value + arch_str = ";".join(archs) + entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str)) + entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str)) + entries.append(cmake_cache_string("GPU_TARGETS", arch_str)) + entries.append(cmake_cache_path("HIP_ROOT_DIR", spec["hip"].prefix)) + + return entries + + def initconfig_package_entries(self): + spec = self.spec + entries = super(Hydrogen, self).initconfig_package_entries() + + # Basic Hydrogen options + entries.append(cmake_cache_option("Hydrogen_ENABLE_TESTING", "+test" in spec)) + entries.append(cmake_cache_option("Hydrogen_GENERAL_LAPACK_FALLBACK", True)) + entries.append(cmake_cache_option("Hydrogen_USE_64BIT_INTS", "+int64" in spec)) + entries.append(cmake_cache_option("Hydrogen_USE_64BIT_BLAS_INTS", "+int64_blas" in spec)) + + # Advanced dependency options + entries.append(cmake_cache_option("Hydrogen_ENABLE_ALUMINUM", "+al" in spec)) + entries.append(cmake_cache_option("Hydrogen_ENABLE_CUB", "+cub" in spec)) + entries.append(cmake_cache_option("Hydrogen_ENABLE_GPU_FP16", "+cuda +half" in spec)) + entries.append(cmake_cache_option("Hydrogen_ENABLE_HALF", "+half" in spec)) + entries.append(cmake_cache_option("Hydrogen_ENABLE_OPENMP", "+openmp" in spec)) + entries.append( + cmake_cache_option("Hydrogen_ENABLE_OMP_TASKLOOP", "+omp_taskloops" in spec) + ) + + # Note that CUDA/ROCm are handled above. if "blas=openblas" in spec: - args.extend( - [ - "-DHydrogen_USE_OpenBLAS:BOOL=%s" % ("blas=openblas" in spec), - "-DOpenBLAS_DIR:STRING={0}".format(spec["openblas"].prefix), - ] - ) - elif "blas=mkl" in spec: - args.extend(["-DHydrogen_USE_MKL:BOOL=%s" % ("blas=mkl" in spec)]) - elif "blas=accelerate" in spec: - args.extend(["-DHydrogen_USE_ACCELERATE:BOOL=TRUE"]) - elif "blas=essl" in spec: + entries.append(cmake_cache_option("Hydrogen_USE_OpenBLAS", "blas=openblas" in spec)) + # CMAKE_PREFIX_PATH should handle this + entries.append(cmake_cache_string("OpenBLAS_DIR", spec["openblas"].prefix)) + elif "blas=mkl" in spec or spec.satisfies("^intel-mkl"): + entries.append(cmake_cache_option("Hydrogen_USE_MKL", True)) + elif "blas=essl" in spec or spec.satisfies("^essl"): + entries.append(cmake_cache_string("BLA_VENDOR", "IBMESSL")) # IF IBM ESSL is used it needs help finding the proper LAPACK libraries - args.extend( - [ - "-DLAPACK_LIBRARIES=%s;-llapack;-lblas" - % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), - "-DBLAS_LIBRARIES=%s;-lblas" + entries.append( + cmake_cache_string( + "LAPACK_LIBRARIES", + "%s;-llapack;-lblas" % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), - ] + ) ) - - if "+omp_taskloops" in spec: - args.extend(["-DHydrogen_ENABLE_OMP_TASKLOOP:BOOL=%s" % ("+omp_taskloops" in spec)]) - - if "+al" in spec: - args.extend( - [ - "-DHydrogen_ENABLE_ALUMINUM:BOOL=%s" % ("+al" in spec), - "-DALUMINUM_DIR={0}".format(spec["aluminum"].prefix), - ] + entries.append( + cmake_cache_string( + "BLAS_LIBRARIES", + "%s;-lblas" + % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), + ) ) + elif "blas=accelerate" in spec: + entries.append(cmake_cache_option("Hydrogen_USE_ACCELERATE", True)) + elif spec.satisfies("^netlib-lapack"): + entries.append(cmake_cache_string("BLA_VENDOR", "Generic")) - return args + return entries def setup_build_environment(self, env): if self.spec.satisfies("%apple-clang +openmp"): diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index 433d60a2ce7765..6968ec8fd6d08b 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -24,6 +24,7 @@ class Hypre(AutotoolsPackage, CudaPackage, ROCmPackage): test_requires_compiler = True version("develop", branch="master") + version("2.30.0", sha256="8e2af97d9a25bf44801c6427779f823ebc6f306438066bba7fcbc2a5f9b78421") version("2.29.0", sha256="98b72115407a0e24dbaac70eccae0da3465f8f999318b2c9241631133f42d511") version("2.28.0", sha256="2eea68740cdbc0b49a5e428f06ad7af861d1e169ce6a12d2cf0aa2fc28c4a2ae") version("2.27.0", sha256="507a3d036bb1ac21a55685ae417d769dd02009bde7e09785d0ae7446b4ae1f98") @@ -107,6 +108,7 @@ def patch(self): # fix sequential compilation in 'src/seq_mv' depends_on("rocthrust", when="+rocm") depends_on("rocrand", when="+rocm") depends_on("rocprim", when="+rocm") + depends_on("hipblas", when="+rocm +superlu-dist") depends_on("umpire", when="+umpire") depends_on("caliper", when="+caliper") @@ -258,6 +260,8 @@ def configure_args(self): if "+rocm" in spec: rocm_pkgs = ["rocsparse", "rocthrust", "rocprim", "rocrand"] + if "+superlu-dist" in spec: + rocm_pkgs.append("hipblas") rocm_inc = "" for pkg in rocm_pkgs: if "^" + pkg in spec: diff --git a/var/spack/repos/builtin/packages/intel-mkl/package.py b/var/spack/repos/builtin/packages/intel-mkl/package.py index 7dd8ab41227aaa..c66235f382dae8 100644 --- a/var/spack/repos/builtin/packages/intel-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-mkl/package.py @@ -153,8 +153,7 @@ class IntelMkl(IntelPackage): multi=False, ) - provides("blas") - provides("lapack") + provides("blas", "lapack") provides("lapack@3.9.0", when="@2020.4") provides("lapack@3.7.0", when="@11.3") provides("scalapack") diff --git a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py index fe2b7f34387c26..01410cd18d6fb8 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py @@ -7,7 +7,7 @@ @IntelOneApiPackage.update_description -class IntelOneapiAdvisor(IntelOneApiPackage): +class IntelOneapiAdvisor(IntelOneApiLibraryPackageWithSdk): """Intel Advisor is a design and analysis tool for developing performant code. The tool supports C, C++, Fortran, SYCL, OpenMP, OpenCL code, and Python. It helps with the following: Performant diff --git a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py index 0c1e0f79abaf31..9f376cffc7809b 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py @@ -7,7 +7,7 @@ @IntelOneApiPackage.update_description -class IntelOneapiInspector(IntelOneApiPackage): +class IntelOneapiInspector(IntelOneApiLibraryPackageWithSdk): """Intel Inspector is a dynamic memory and threading error debugger for C, C++, and Fortran applications that run on Windows and Linux operating systems. Save money: locate the root cause of memory, diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py index 2eef32fa14e0a2..db3fdd6d7ea8c0 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -126,8 +126,7 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage): provides("fftw-api@3") provides("scalapack", when="+cluster") provides("mkl") - provides("lapack") - provides("blas") + provides("lapack", "blas") @property def component_dir(self): @@ -149,21 +148,6 @@ def libs(self): else: return IntelOneApiStaticLibraryList(libs, system_libs) - def setup_run_environment(self, env): - super().setup_run_environment(env) - - # Support RPATH injection to the library directories when the '-mkl' or '-qmkl' - # flag of the Intel compilers are used outside the Spack build environment. We - # should not try to take care of other compilers because the users have to - # provide the linker flags anyway and are expected to take care of the RPATHs - # flags too. We prefer the __INTEL_POST_CFLAGS/__INTEL_POST_FFLAGS flags over - # the PRE ones so that any other RPATHs provided by the users on the command - # line come before and take precedence over the ones we inject here. - for d in self._find_mkl_libs(self.spec.satisfies("+shared")).directories: - flag = "-Wl,-rpath,{0}".format(d) - env.append_path("__INTEL_POST_CFLAGS", flag, separator=" ") - env.append_path("__INTEL_POST_FFLAGS", flag, separator=" ") - def setup_dependent_build_environment(self, env, dependent_spec): # Only if environment modifications are desired (default is +envmods) if self.spec.satisfies("+envmods"): diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py index c2ed3f164e42af..f9ab11eef76c91 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py @@ -7,7 +7,7 @@ @IntelOneApiPackage.update_description -class IntelOneapiVtune(IntelOneApiPackage): +class IntelOneapiVtune(IntelOneApiLibraryPackageWithSdk): """Intel VTune Profiler is a profiler to optimize application performance, system performance, and system configuration for HPC, cloud, IoT, media, storage, and more. CPU, GPU, and FPGA: Tune diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 84810bacfa3370..50e7021de85d41 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -536,8 +536,7 @@ class IntelParallelStudio(IntelPackage): provides("ipp", when="+ipp") provides("mkl", when="+mkl") - provides("blas", when="+mkl") - provides("lapack", when="+mkl") + provides("blas", "lapack", when="+mkl") provides("scalapack", when="+mkl") provides("fftw-api@3", when="+mkl@professional.2017:") diff --git a/var/spack/repos/builtin/packages/intel-xed/package.py b/var/spack/repos/builtin/packages/intel-xed/package.py index 555d4154a220fd..7cda2358e20d37 100644 --- a/var/spack/repos/builtin/packages/intel-xed/package.py +++ b/var/spack/repos/builtin/packages/intel-xed/package.py @@ -21,6 +21,8 @@ class IntelXed(Package): # Current versions now have actual releases and tags. version("main", branch="main") + version("2023.10.11", tag="v2023.10.11", commit="d7d46c73fb04a1742e99c9382a4acb4ed07ae272") + version("2023.08.21", tag="v2023.08.21", commit="01a6da8090af84cd52f6c1070377ae6e885b078f") version("2023.07.09", tag="v2023.07.09", commit="539a6a349cf7538a182ed3ee1f48bb9317eb185f") version("2023.06.07", tag="v2023.06.07", commit="4dc77137f651def2ece4ac0416607b215c18e6e4") version("2023.04.16", tag="v2023.04.16", commit="a3055cd0209f5c63c88e280bbff9579b1e2942e2") @@ -40,7 +42,12 @@ class IntelXed(Package): # Match xed more closely with the version of mbuild at the time. resource( - name="mbuild", placement=mdir, git=mbuild_git, tag="v2022.07.28", when="@2022.07:9999" + name="mbuild", + placement=mdir, + git=mbuild_git, + tag="v2022.07.28", + commit="75cb46e6536758f1a3cdb3d6bd83a4a9fd0338bb", + when="@2022.07:9999", ) resource( @@ -48,7 +55,7 @@ class IntelXed(Package): placement=mdir, git=mbuild_git, tag="v2022.04.17", - commit="ef19f00de14a9c2c253c1c9b1119e1617280e3f2", + commit="b41485956bf65d51b8c2379768de7eaaa7a4245b", when="@:2022.06", ) diff --git a/var/spack/repos/builtin/packages/interproscan/package.py b/var/spack/repos/builtin/packages/interproscan/package.py index 82380135a76feb..4143dc6ff2899b 100644 --- a/var/spack/repos/builtin/packages/interproscan/package.py +++ b/var/spack/repos/builtin/packages/interproscan/package.py @@ -45,21 +45,21 @@ class Interproscan(Package): ) resource( - when="5.56-89.0 +databases", + when="@5.56-89.0 +databases", name="databases", url="https://ftp.ebi.ac.uk/pub/databases/interpro/iprscan/5/5.56-89.0/alt/interproscan-data-5.56-89.0.tar.gz", sha256="49cd0c69711f9469f3b68857f4581b23ff12765ca2b12893d18e5a9a5cd8032d", ) resource( - when="5.38-76.0 +databases", + when="@5.38-76.0 +databases", name="databases", url="https://ftp.ebi.ac.uk/pub/databases/interpro/iprscan/5/5.38-76.0/alt/interproscan-data-5.38-76.0.tar.gz", sha256="e05e15d701037504f92ecf849c20317e70df28e78ff1945826b3c1e16d9b9cce", ) resource( - when="5.36-75.0 +databases", + when="@5.36-75.0 +databases", name="databases", url="https://ftp.ebi.ac.uk/pub/databases/interpro/iprscan/5/5.36-75.0/alt/interproscan-data-5.36-75.0.tar.gz", sha256="e9b1e6f2d1c20d06661a31a08c973bc8ddf039a4cf1e45ec4443200375e5d6a4", diff --git a/var/spack/repos/builtin/packages/ispc/package.py b/var/spack/repos/builtin/packages/ispc/package.py index 8bef2ce2f584c1..0a2d60a9108b11 100644 --- a/var/spack/repos/builtin/packages/ispc/package.py +++ b/var/spack/repos/builtin/packages/ispc/package.py @@ -25,6 +25,8 @@ class Ispc(CMakePackage): executables = ["^ispc$"] version("main", branch="main") + version("1.21.1", sha256="99bbb1d1f15bc4433d6a63b5bb35b321af3e3af753c3b28a61850d1748e8a89f") + version("1.21.0", sha256="023782f721bfb5893bac24bc2153a8214c916be82c290bf63a3ec6678949b5ef") version("1.20.0", sha256="8bd30ded7f96859451ead1cecf6f58ac8e937288fe0e5b98c56f6eba4be370b4") version("1.19.0", sha256="c1aeae4bdfb28004a6949394ea1b3daa3fdf12f646e17fcc0614861077dc8b6a") version("1.18.1", sha256="fee76d42fc0129f81489b7c2b9143e22a44c281940693c1c13cf1e3dd2ab207f") @@ -45,15 +47,17 @@ class Ispc(CMakePackage): depends_on("tbb", type="link", when="platform=linux @1.20:") depends_on("llvm+clang") depends_on("llvm libcxx=none", when="platform=darwin") - depends_on("llvm@13:15", when="@1.19:") - depends_on("llvm@11.0:14.0", when="@1.18") - depends_on("llvm@11:14", when="@1.17") - depends_on("llvm@:12", when="@:1.16") - depends_on("llvm@11:", when="@1.16") - depends_on("llvm@10:11", when="@1.15.0:1.15") - depends_on("llvm@10.0:10", when="@1.13:1.14") depends_on("llvm targets=arm,aarch64", when="target=arm:") depends_on("llvm targets=arm,aarch64", when="target=aarch64:") + depends_on("llvm@:17", when="@:1.21") + depends_on("llvm@:15", when="@:1.20") + depends_on("llvm@:14", when="@:1.18") + depends_on("llvm@:12", when="@:1.16") + depends_on("llvm@:11", when="@:1.15") + depends_on("llvm@:10", when="@:1.14") + depends_on("llvm@13:", when="@1.19:") + depends_on("llvm@11:", when="@1.16:") + depends_on("llvm@10:", when="@1.13:") patch( "don-t-assume-that-ncurses-zlib-are-system-libraries.patch", diff --git a/var/spack/repos/builtin/packages/itk/package.py b/var/spack/repos/builtin/packages/itk/package.py index d0123b60f6a4ab..0a956f3dfdb30a 100644 --- a/var/spack/repos/builtin/packages/itk/package.py +++ b/var/spack/repos/builtin/packages/itk/package.py @@ -71,7 +71,7 @@ class Itk(CMakePackage): ) def cmake_args(self): - use_mkl = "^mkl" in self.spec + use_mkl = self.spec["fftw-api"].name in INTEL_MATH_LIBRARIES args = [ self.define("BUILD_SHARED_LIBS", True), self.define("ITK_USE_SYSTEM_LIBRARIES", True), diff --git a/var/spack/repos/builtin/packages/jemalloc/package.py b/var/spack/repos/builtin/packages/jemalloc/package.py index 646120ebb02913..516cdc1e4d23b2 100644 --- a/var/spack/repos/builtin/packages/jemalloc/package.py +++ b/var/spack/repos/builtin/packages/jemalloc/package.py @@ -13,8 +13,6 @@ class Jemalloc(AutotoolsPackage): homepage = "http://jemalloc.net/" url = "https://github.com/jemalloc/jemalloc/releases/download/4.0.4/jemalloc-4.0.4.tar.bz2" - maintainers("iarspider") - version("5.3.0", sha256="2db82d1e7119df3e71b7640219b6dfe84789bc0537983c3b7ac4f7189aecfeaa") version("5.2.1", sha256="34330e5ce276099e2e8950d9335db5a875689a4c6a56751ef3b1d8c537f887f6") version("5.2.0", sha256="74be9f44a60d2a99398e706baa921e4efde82bf8fd16e5c0643c375c5851e3b4") diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 4115f148726d13..55716116d577f2 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -26,6 +26,7 @@ class Julia(MakefilePackage): maintainers("vchuravy", "haampie", "giordano") version("master", branch="master") + version("1.9.3", sha256="8d7dbd8c90e71179e53838cdbe24ff40779a90d7360e29766609ed90d982081d") version("1.9.2", sha256="015438875d591372b80b09d01ba899657a6517b7c72ed41222298fef9d4ad86b") version("1.9.0", sha256="48f4c8a7d5f33d0bc6ce24226df20ab49e385c2d0c3767ec8dfdb449602095b2") version("1.8.5", sha256="d31026cc6b275d14abce26fd9fd5b4552ac9d2ce8bde4291e494468af5743031") @@ -163,7 +164,8 @@ class Julia(MakefilePackage): ) # patchelf 0.13 is required because the rpath patch uses --add-rpath - depends_on("patchelf@0.13:", type="build") + # patchelf 0.18 breaks (at least) libjulia-internal.so + depends_on("patchelf@0.13:0.17", type="build") depends_on("perl", type="build") depends_on("libwhich", type="build") depends_on("python", type="build") diff --git a/var/spack/repos/builtin/packages/justbuild/package.py b/var/spack/repos/builtin/packages/justbuild/package.py index 06a350821fbcba..2b9a7ad4d5b6e1 100644 --- a/var/spack/repos/builtin/packages/justbuild/package.py +++ b/var/spack/repos/builtin/packages/justbuild/package.py @@ -22,6 +22,7 @@ class Justbuild(Package): maintainers("asartori86") version("master", branch="master") + version("1.2.3", tag="v1.2.3", commit="45e9c1c85399f00372ad8b72894979a0002d8f95") version("1.2.2", tag="v1.2.2", commit="e1ee04684c34ae30ac3c91b6753e99a81a9dc51c") version("1.2.1", tag="v1.2.1", commit="959cd90083d0c783389cd09e187c98322c16469f") version("1.1.4", tag="v1.1.4", commit="32e96afd159f2158ca129fd00bf02c273d8e1e48") diff --git a/var/spack/repos/builtin/packages/lammps/package.py b/var/spack/repos/builtin/packages/lammps/package.py index a44c7bd603cc6c..b2d3d111334b8e 100644 --- a/var/spack/repos/builtin/packages/lammps/package.py +++ b/var/spack/repos/builtin/packages/lammps/package.py @@ -791,7 +791,7 @@ def cmake_args(self): # FFTW libraries are available and enable them by default. if "^fftw" in spec or "^cray-fftw" in spec or "^amdfftw" in spec: args.append(self.define("FFT", "FFTW3")) - elif "^mkl" in spec: + elif spec["fftw-api"].name in INTEL_MATH_LIBRARIES: args.append(self.define("FFT", "MKL")) elif "^armpl-gcc" in spec or "^acfl" in spec: args.append(self.define("FFT", "FFTW3")) diff --git a/var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch b/var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch new file mode 100644 index 00000000000000..3020af37b07d9d --- /dev/null +++ b/var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch @@ -0,0 +1,39 @@ +diff --git a/src/callbacks/memory_profiler.cpp b/src/callbacks/memory_profiler.cpp +index 0d5cec5d2..6f40705af 100644 +--- a/src/callbacks/memory_profiler.cpp ++++ b/src/callbacks/memory_profiler.cpp +@@ -158,7 +158,10 @@ struct MemUsage + size_t total_mem; + + MemUsage(const std::string& r, size_t m) : report(r), total_mem(m) {} +- bool operator<(const MemUsage& other) { return total_mem < other.total_mem; } ++ bool operator<(const MemUsage& other) const ++ { ++ return total_mem < other.total_mem; ++ } + }; + } // namespace + +diff --git a/src/optimizers/adam.cpp b/src/optimizers/adam.cpp +index d00dfbe7c..1d9ad3949 100644 +--- a/src/optimizers/adam.cpp ++++ b/src/optimizers/adam.cpp +@@ -34,14 +34,12 @@ + + namespace lbann { + +-#if defined (LBANN_HAS_ROCM) && defined (LBANN_HAS_GPU_FP16) ++#if defined(LBANN_HAS_ROCM) && defined(LBANN_HAS_GPU_FP16) + namespace { +-bool isfinite(fp16 const& x) +-{ +- return std::isfinite(float(x)); +-} +-} ++bool isfinite(fp16 const& x) { return std::isfinite(float(x)); } ++} // namespace + #endif ++using std::isfinite; + + template + adam::adam(TensorDataType learning_rate, diff --git a/var/spack/repos/builtin/packages/lbann/package.py b/var/spack/repos/builtin/packages/lbann/package.py index faae710921e563..14f257a3415ffa 100644 --- a/var/spack/repos/builtin/packages/lbann/package.py +++ b/var/spack/repos/builtin/packages/lbann/package.py @@ -5,7 +5,6 @@ import os import socket -import sys from spack.package import * @@ -24,109 +23,42 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): maintainers("bvanessen") version("develop", branch="develop") - version("0.102", sha256="3734a76794991207e2dd2221f05f0e63a86ddafa777515d93d99d48629140f1a") + version("benchmarking", branch="benchmarking") + version("0.104", sha256="a847c7789082ab623ed5922ab1248dd95f5f89d93eed44ac3d6a474703bbc0bf") + version("0.103", sha256="9da1bf308f38323e30cb07f8ecf8efa05c7f50560e8683b9cd961102b1b3e25a") version( - "0.101", - sha256="69d3fe000a88a448dc4f7e263bcb342c34a177bd9744153654528cd86335a1f7", - deprecated=True, - ) - version( - "0.100", - sha256="d1bab4fb6f1b80ae83a7286cc536a32830890f6e5b0c3107a17c2600d0796912", - deprecated=True, - ) - version( - "0.99", - sha256="3358d44f1bc894321ce07d733afdf6cb7de39c33e3852d73c9f31f530175b7cd", - deprecated=True, - ) - version( - "0.98.1", - sha256="9a2da8f41cd8bf17d1845edf9de6d60f781204ebd37bffba96d8872036c10c66", - deprecated=True, - ) - version( - "0.98", - sha256="8d64b9ac0f1d60db553efa4e657f5ea87e790afe65336117267e9c7ae6f68239", - deprecated=True, - ) - version( - "0.97.1", - sha256="2f2756126ac8bb993202cf532d72c4d4044e877f4d52de9fdf70d0babd500ce4", - deprecated=True, - ) - version( - "0.97", - sha256="9794a706fc7ac151926231efdf74564c39fbaa99edca4acb745ee7d20c32dae7", - deprecated=True, - ) - version( - "0.96", - sha256="97af78e9d3c405e963361d0db96ee5425ee0766fa52b43c75b8a5670d48e4b4a", - deprecated=True, - ) - version( - "0.95", - sha256="d310b986948b5ee2bedec36383a7fe79403721c8dc2663a280676b4e431f83c2", - deprecated=True, - ) - version( - "0.94", - sha256="567e99b488ebe6294933c98a212281bffd5220fc13a0a5cd8441f9a3761ceccf", - deprecated=True, - ) - version( - "0.93", - sha256="77bfd7fe52ee7495050f49bcdd0e353ba1730e3ad15042c678faa5eeed55fb8c", - deprecated=True, - ) - version( - "0.92", - sha256="9187c5bcbc562c2828fe619d53884ab80afb1bcd627a817edb935b80affe7b84", - deprecated=True, - ) - version( - "0.91", - sha256="b69f470829f434f266119a33695592f74802cff4b76b37022db00ab32de322f5", + "0.102", + sha256="3734a76794991207e2dd2221f05f0e63a86ddafa777515d93d99d48629140f1a", deprecated=True, ) - variant("al", default=True, description="Builds with support for Aluminum Library") variant( "build_type", default="Release", description="The build type to build", values=("Debug", "Release"), ) - variant( - "conduit", - default=True, - description="Builds with support for Conduit Library " - "(note that for v0.99 conduit is required)", - ) variant( "deterministic", default=False, description="Builds with support for deterministic execution", ) - variant( - "dihydrogen", default=True, description="Builds with support for DiHydrogen Tensor Library" - ) variant( "distconv", default=False, + sticky=True, description="Builds with support for spatial, filter, or channel " "distributed convolutions", ) variant( "dtype", default="float", + sticky=True, description="Type for floating point representation of weights", values=("float", "double"), ) variant("fft", default=False, description="Support for FFT operations") variant("half", default=False, description="Builds with support for FP16 precision data types") - variant("hwloc", default=True, description="Add support for topology aware algorithms") variant("nvprof", default=False, description="Build with region annotations for NVPROF") variant( "numpy", default=False, description="Builds with support for processing NumPy data files" @@ -139,7 +71,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): variant("vtune", default=False, description="Builds with support for Intel VTune") variant("onednn", default=False, description="Support for OneDNN") variant("onnx", default=False, description="Support for exporting models into ONNX format") - variant("nvshmem", default=False, description="Support for NVSHMEM") + variant("nvshmem", default=False, description="Support for NVSHMEM", when="+distconv") variant( "python", default=True, @@ -168,20 +100,13 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): # Don't expose this a dependency until Spack can find the external properly # depends_on('binutils+gold', type='build', when='+gold') + patch("lbann_v0.104_build_cleanup.patch", when="@0.104:") + # Variant Conflicts - conflicts("@:0.90,0.99:", when="~conduit") - conflicts("@0.90:0.101", when="+fft") - conflicts("@:0.90,0.102:", when="~dihydrogen") conflicts("~cuda", when="+nvprof") - conflicts("~hwloc", when="+al") conflicts("~cuda", when="+nvshmem") conflicts("+cuda", when="+rocm", msg="CUDA and ROCm support are mutually exclusive") - conflicts("~vision", when="@0.91:0.101") - conflicts("~numpy", when="@0.91:0.101") - conflicts("~python", when="@0.91:0.101") - conflicts("~pfe", when="@0.91:0.101") - requires("%clang", when="+lld") conflicts("+lld", when="+gold") @@ -191,84 +116,56 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@3.17.0:", type="build") depends_on("cmake@3.21.0:", type="build", when="@0.103:") - # Specify the correct versions of Hydrogen - depends_on("hydrogen@:1.3.4", when="@0.95:0.100") - depends_on("hydrogen@1.4.0:1.4", when="@0.101:0.101.99") - depends_on("hydrogen@1.5.0:", when="@:0.90,0.102:") + # Specify the core libraries: Hydrogen, DiHydrogen, Aluminum + depends_on("hydrogen@1.5.3:") + depends_on("aluminum@1.4.1:") + depends_on("dihydrogen@0.2.0:") + + # Align the following variants across Hydrogen and DiHydrogen + forwarded_variants = ["cuda", "rocm", "half", "nvshmem"] + for v in forwarded_variants: + if v != "nvshmem": + depends_on("hydrogen +{0}".format(v), when="+{0}".format(v)) + depends_on("hydrogen ~{0}".format(v), when="~{0}".format(v)) + if v != "al" and v != "half": + depends_on("dihydrogen +{0}".format(v), when="+{0}".format(v)) + depends_on("dihydrogen ~{0}".format(v), when="~{0}".format(v)) + if v == "cuda" or v == "rocm": + depends_on("aluminum +{0} +nccl".format(v), when="+{0}".format(v)) # Add Hydrogen variants depends_on("hydrogen +openmp +shared +int64") - depends_on("hydrogen +openmp_blas", when=sys.platform != "darwin") - depends_on("hydrogen ~al", when="~al") - depends_on("hydrogen +al", when="+al") - depends_on("hydrogen ~cuda", when="~cuda") - depends_on("hydrogen +cuda", when="+cuda") - depends_on("hydrogen ~half", when="~half") - depends_on("hydrogen +half", when="+half") - depends_on("hydrogen ~rocm", when="~rocm") - depends_on("hydrogen +rocm", when="+rocm") depends_on("hydrogen build_type=Debug", when="build_type=Debug") - # Older versions depended on Elemental not Hydrogen - depends_on("elemental +openmp_blas +shared +int64", when="@0.91:0.94") - depends_on( - "elemental +openmp_blas +shared +int64 build_type=Debug", - when="build_type=Debug @0.91:0.94", - ) - - # Specify the correct version of Aluminum - depends_on("aluminum@:0.3", when="@0.95:0.100 +al") - depends_on("aluminum@0.4.0:0.4", when="@0.101:0.101.99 +al") - depends_on("aluminum@0.5.0:", when="@:0.90,0.102: +al") + # Add DiHydrogen variants + depends_on("dihydrogen +distconv", when="+distconv") + depends_on("dihydrogen@develop", when="@develop") # Add Aluminum variants - depends_on("aluminum +cuda +nccl", when="+al +cuda") - depends_on("aluminum +rocm +rccl", when="+al +rocm") - - depends_on("dihydrogen@0.2.0:", when="@:0.90,0.102:") - depends_on("dihydrogen +openmp", when="+dihydrogen") - depends_on("dihydrogen +openmp_blas", when=sys.platform != "darwin") - depends_on("dihydrogen ~cuda", when="+dihydrogen ~cuda") - depends_on("dihydrogen +cuda", when="+dihydrogen +cuda") - depends_on("dihydrogen ~al", when="+dihydrogen ~al") - depends_on("dihydrogen +al", when="+dihydrogen +al") - depends_on("dihydrogen +distconv +cuda", when="+distconv +cuda") - depends_on("dihydrogen +distconv +rocm", when="+distconv +rocm") - depends_on("dihydrogen ~half", when="+dihydrogen ~half") - depends_on("dihydrogen +half", when="+dihydrogen +half") - depends_on("dihydrogen ~nvshmem", when="+dihydrogen ~nvshmem") - depends_on("dihydrogen +nvshmem", when="+dihydrogen +nvshmem") - depends_on("dihydrogen ~rocm", when="+dihydrogen ~rocm") - depends_on("dihydrogen +rocm", when="+dihydrogen +rocm") - depends_on("dihydrogen@0.1", when="@0.101:0.101.99 +dihydrogen") - depends_on("dihydrogen@:0.0,0.2:", when="@:0.90,0.102: +dihydrogen") - conflicts("~dihydrogen", when="+distconv") + depends_on("aluminum@master", when="@develop") depends_on("hdf5+mpi", when="+distconv") for arch in CudaPackage.cuda_arch_values: depends_on("hydrogen cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch) - depends_on("aluminum cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch) - depends_on("dihydrogen cuda_arch=%s" % arch, when="+dihydrogen +cuda cuda_arch=%s" % arch) + depends_on("aluminum cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch) + depends_on("dihydrogen cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch) depends_on("nccl cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch) # variants +rocm and amdgpu_targets are not automatically passed to # dependencies, so do it manually. for val in ROCmPackage.amdgpu_targets: depends_on("hydrogen amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) - depends_on("aluminum amdgpu_target=%s" % val, when="+al amdgpu_target=%s" % val) - depends_on("dihydrogen amdgpu_target=%s" % val, when="+dihydrogen amdgpu_target=%s" % val) + depends_on("aluminum amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) + depends_on("dihydrogen amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) depends_on("roctracer-dev", when="+rocm +distconv") - depends_on("cudnn", when="@0.90:0.100 +cuda") - depends_on("cudnn@8.0.2:", when="@:0.90,0.101: +cuda") - depends_on("cub", when="@0.94:0.98.2 +cuda ^cuda@:10") - depends_on("cutensor", when="@:0.90,0.102: +cuda") + depends_on("cudnn@8.0.2:", when="+cuda") + depends_on("cutensor", when="+cuda") depends_on("hipcub", when="+rocm") depends_on("mpi") - depends_on("hwloc@1.11:", when="@:0.90,0.102: +hwloc") - depends_on("hwloc@1.11.0:1.11", when="@0.95:0.101 +hwloc") + depends_on("hwloc@1.11:") depends_on("hwloc +cuda +nvml", when="+cuda") depends_on("hwloc@2.3.0:", when="+rocm") depends_on("hiptt", when="+rocm") @@ -296,9 +193,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): # Note that conduit defaults to +fortran +parmetis +python, none of which are # necessary by LBANN: you may want to disable those options in your # packages.yaml - depends_on("conduit@0.4.0: +hdf5", when="@0.94:0 +conduit") - depends_on("conduit@0.5.0:0.6 +hdf5", when="@0.100:0.101 +conduit") - depends_on("conduit@0.6.0: +hdf5", when="@:0.90,0.99:") + depends_on("conduit@0.6.0: +hdf5") # LBANN can use Python in two modes 1) as part of an extensible framework # and 2) to drive the front end model creation and launch @@ -308,13 +203,13 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): extends("python", when="+python") # Python front end and possible extra packages - depends_on("python@3: +shared", type=("build", "run"), when="@:0.90,0.99: +pfe") + depends_on("python@3: +shared", type=("build", "run"), when="+pfe") extends("python", when="+pfe") depends_on("py-setuptools", type="build", when="+pfe") - depends_on("py-protobuf+cpp@3.10.0:", type=("build", "run"), when="@:0.90,0.99: +pfe") + depends_on("py-protobuf+cpp@3.10.0:4.21.12", type=("build", "run"), when="+pfe") - depends_on("protobuf+shared@3.10.0:", when="@:0.90,0.99:") - depends_on("zlib-api", when="protobuf@3.11.0:") + depends_on("protobuf+shared@3.10.0:3.21.12") + depends_on("zlib-api", when="^protobuf@3.11.0:") # using cereal@1.3.1 and above requires changing the # find_package call to lowercase, so stick with :1.3.0 @@ -328,7 +223,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("onnx", when="+onnx") depends_on("nvshmem", when="+nvshmem") - depends_on("spdlog", when="@:0.90,0.102:") + depends_on("spdlog@1.11.0") depends_on("zstr") depends_on("caliper+adiak+mpi", when="+caliper") @@ -336,6 +231,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): generator("ninja") def setup_build_environment(self, env): + env.append_flags("CXXFLAGS", "-fno-omit-frame-pointer") if self.spec.satisfies("%apple-clang"): env.append_flags("CPPFLAGS", self.compiler.openmp_flag) env.append_flags("CFLAGS", self.spec["llvm-openmp"].headers.include_flags) @@ -357,7 +253,7 @@ def libs(self): def cache_name(self): hostname = socket.gethostname() # Get a hostname that has no node identifier - hostname = hostname.rstrip("1234567890") + hostname = hostname.rstrip("1234567890-") return "LBANN_{0}_{1}-{2}-{3}@{4}.cmake".format( hostname, self.spec.version, @@ -440,12 +336,9 @@ def initconfig_package_entries(self): cmake_variant_fields = [ ("LBANN_WITH_CNPY", "numpy"), ("LBANN_DETERMINISTIC", "deterministic"), - ("LBANN_WITH_HWLOC", "hwloc"), - ("LBANN_WITH_ALUMINUM", "al"), ("LBANN_WITH_ADDRESS_SANITIZER", "asan"), ("LBANN_WITH_BOOST", "boost"), ("LBANN_WITH_CALIPER", "caliper"), - ("LBANN_WITH_CONDUIT", "conduit"), ("LBANN_WITH_NVSHMEM", "nvshmem"), ("LBANN_WITH_FFT", "fft"), ("LBANN_WITH_ONEDNN", "onednn"), @@ -460,6 +353,9 @@ def initconfig_package_entries(self): for opt, val in cmake_variant_fields: entries.append(self.define_cmake_cache_from_variant(opt, val)) + entries.append(cmake_cache_option("LBANN_WITH_ALUMINUM", True)) + entries.append(cmake_cache_option("LBANN_WITH_CONDUIT", True)) + entries.append(cmake_cache_option("LBANN_WITH_HWLOC", True)) entries.append(cmake_cache_option("LBANN_WITH_ROCTRACER", "+rocm +distconv" in spec)) entries.append(cmake_cache_option("LBANN_WITH_TBINF", False)) entries.append( @@ -492,7 +388,7 @@ def initconfig_package_entries(self): ) ) - entries.append(self.define_cmake_cache_from_variant("LBANN_WITH_DIHYDROGEN", "dihydrogen")) + entries.append(cmake_cache_option("LBANN_WITH_DIHYDROGEN", True)) entries.append(self.define_cmake_cache_from_variant("LBANN_WITH_DISTCONV", "distconv")) # IF IBM ESSL is used it needs help finding the proper LAPACK libraries diff --git a/var/spack/repos/builtin/packages/lcio/package.py b/var/spack/repos/builtin/packages/lcio/package.py index 8f02063af4c760..6b42bb41751fbe 100644 --- a/var/spack/repos/builtin/packages/lcio/package.py +++ b/var/spack/repos/builtin/packages/lcio/package.py @@ -19,6 +19,7 @@ class Lcio(CMakePackage): maintainers("gaede", "vvolkl", "jmcarcell") version("master", branch="master") + version("2.20.1", sha256="125f657297de12b40694cb0dddec1d1ce3379058492f2a6a2a6f992ee51604d6") version("2.20", sha256="5ef92c9ef04ce468ffb48be0ec6010377a400b064e352cb50f9f4c9599e7e990") version("2.19", sha256="2d6b37094d8d556ab0ba0efa632f10d8b851f533ca5c767e436397df18cb57c7") version("2.18", sha256="e722df7f4a6adcc2459ea1c6488a2a6e40bb04f7ee99536fdc60b51e6c80f565") diff --git a/var/spack/repos/builtin/packages/lcov/package.py b/var/spack/repos/builtin/packages/lcov/package.py index dc7d3aa1b637c3..0c71a9b7aeda63 100644 --- a/var/spack/repos/builtin/packages/lcov/package.py +++ b/var/spack/repos/builtin/packages/lcov/package.py @@ -13,14 +13,42 @@ class Lcov(MakefilePackage): supports statement, function and branch coverage measurement.""" homepage = "http://ltp.sourceforge.net/coverage/lcov.php" - url = "https://github.com/linux-test-project/lcov/releases/download/v1.14/lcov-1.14.tar.gz" + url = "https://github.com/linux-test-project/lcov/releases/download/v2.0/lcov-2.0.tar.gz" maintainers("KineticTheory") + version("2.0", sha256="1857bb18e27abe8bcec701a907d5c47e01db4d4c512fc098d1a6acd29267bf46") version("1.16", sha256="987031ad5528c8a746d4b52b380bc1bffe412de1f2b9c2ba5224995668e3240b") version("1.15", sha256="c1cda2fa33bec9aa2c2c73c87226cfe97de0831887176b45ee523c5e30f8053a") version("1.14", sha256="14995699187440e0ae4da57fe3a64adc0a3c5cf14feab971f8db38fb7d8f071a") - depends_on("perl") + # dependencies from + # https://github.com/linux-test-project/lcov/blob/02ece21d54ccd16255d74f8b00f8875b6c15653a/README#L91-L111 + depends_on("perl", type=("build", "run")) + depends_on("perl-b-hooks-endofscope", type=("run")) + depends_on("perl-capture-tiny", type=("run")) + depends_on("perl-class-inspector", type=("run")) + depends_on("perl-class-singleton", type=("run")) + depends_on("perl-datetime", type=("run")) + depends_on("perl-datetime-locale", type=("run")) + depends_on("perl-datetime-timezone", type=("run")) + depends_on("perl-devel-cover", type=("run")) + depends_on("perl-devel-stacktrace", type=("run")) + depends_on("perl-digest-md5", type=("run")) + depends_on("perl-eval-closure", type=("run")) + depends_on("perl-exception-class", type=("run")) + depends_on("perl-file-sharedir", type=("run")) + depends_on("perl-file-spec", type=("run")) + depends_on("perl-json", type=("run")) + depends_on("perl-memory-process", type=("run")) + depends_on("perl-module-implementation", type=("run")) + depends_on("perl-mro-compat", type=("run")) + depends_on("perl-namespace-clean", type=("run")) + depends_on("perl-package-stash", type=("run")) + depends_on("perl-params-validationcompiler", type=("run")) + depends_on("perl-role-tiny", type=("run")) + depends_on("perl-specio", type=("run")) + depends_on("perl-sub-identify", type=("run")) + depends_on("perl-time-hires", type=("run")) def install(self, spec, prefix): make( diff --git a/var/spack/repos/builtin/packages/ldak/package.py b/var/spack/repos/builtin/packages/ldak/package.py index 1fbb7de0900b16..d074d90ea6830b 100644 --- a/var/spack/repos/builtin/packages/ldak/package.py +++ b/var/spack/repos/builtin/packages/ldak/package.py @@ -33,8 +33,8 @@ class Ldak(Package): requires("target=x86_64:", when="~glpk", msg="bundled qsopt is only for x86_64") requires( - "^mkl", "^openblas", + *[f"^{intel_pkg}" for intel_pkg in INTEL_MATH_LIBRARIES], policy="one_of", msg="Only mkl or openblas are supported for blas/lapack with ldak", ) diff --git a/var/spack/repos/builtin/packages/lemon/package.py b/var/spack/repos/builtin/packages/lemon/package.py index bcf759ea198ec7..61a7fc46e3dce9 100644 --- a/var/spack/repos/builtin/packages/lemon/package.py +++ b/var/spack/repos/builtin/packages/lemon/package.py @@ -16,3 +16,30 @@ class Lemon(CMakePackage): url = "https://lemon.cs.elte.hu/pub/sources/lemon-1.3.1.tar.gz" version("1.3.1", sha256="71b7c725f4c0b4a8ccb92eb87b208701586cf7a96156ebd821ca3ed855bad3c8") + + # variant("coin", default=False, description="Enable Coin solver backend") #TODO build fails + variant("ilog", default=False, description="Enable ILOG (CPLEX) solver backend") + variant("glpk", default=True, description="Enable GLPK solver backend") + # soplex not mentioned in docs but shown in cmakecache + # variant("soplex", default=False, description="Enable SOPLEX solver backend") #TODO + + depends_on("glpk", when="+glpk") + depends_on("cplex", when="+ilog") + # depends_on("coinutils", when="+coin") # just a guess + # depends_on("cbc", when="+coin") + # depends_on("clp", when="+coin") + # depends_on("bzip2", when="+coin") + # depends_on("soplex", when="+soplex") # no such package in Spack yet. TODO + + def cmake_args(self): + spec = self.spec + args = [] + args.extend( + [ + # f"-DLEMON_ENABLE_COIN={spec.variants['coin'].value}", #TODO + f"-DLEMON_ENABLE_ILOG={spec.variants['ilog'].value}", + f"-DLEMON_ENABLE_GLPK={spec.variants['glpk'].value}", + # f"-DLEMON_ENABLE_SOPLEX={spec.variants['soplex'].value}", #TODO + ] + ) + return args diff --git a/var/spack/repos/builtin/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py index c0ece0386c5a5a..dc0c34191c8387 100644 --- a/var/spack/repos/builtin/packages/libevent/package.py +++ b/var/spack/repos/builtin/packages/libevent/package.py @@ -41,6 +41,10 @@ class Libevent(AutotoolsPackage): depends_on("openssl@:1.0", when="@:2.0+openssl") depends_on("openssl", when="+openssl") + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + def url_for_version(self, version): if version >= Version("2.0.22"): url = "https://github.com/libevent/libevent/releases/download/release-{0}-stable/libevent-{0}-stable.tar.gz" @@ -54,6 +58,9 @@ def libs(self): libs = find_libraries("libevent", root=self.prefix, shared=True, recursive=True) return LibraryList(libs) + def autoreconf(self, spec, prefix): + autoreconf("--force", "--install", "--symlink") + def configure_args(self): spec = self.spec configure_args = [] diff --git a/var/spack/repos/builtin/packages/libffi/package.py b/var/spack/repos/builtin/packages/libffi/package.py index c67bd82ddd2f52..d32400c3cfac5f 100644 --- a/var/spack/repos/builtin/packages/libffi/package.py +++ b/var/spack/repos/builtin/packages/libffi/package.py @@ -32,6 +32,11 @@ class Libffi(AutotoolsPackage): patch("clang-powerpc-3.2.1.patch", when="@3.2.1%clang platform=linux") # ref.: https://github.com/libffi/libffi/pull/561 patch("powerpc-3.3.patch", when="@3.3") + patch( + "https://github.com/libffi/libffi/commit/ce077e5565366171aa1b4438749b0922fce887a4.patch?full_index=1", + sha256="070b1f3aa87f2b56f83aff38afc42157e1692bfaa580276ecdbad2048b818ed7", + when="@3.4.3:3.4.4", + ) @property def headers(self): diff --git a/var/spack/repos/builtin/packages/libgcrypt/package.py b/var/spack/repos/builtin/packages/libgcrypt/package.py index cd207db083c0c2..aae41faa590111 100644 --- a/var/spack/repos/builtin/packages/libgcrypt/package.py +++ b/var/spack/repos/builtin/packages/libgcrypt/package.py @@ -14,6 +14,7 @@ class Libgcrypt(AutotoolsPackage): maintainers("alalazo") + version("1.10.3", sha256="8b0870897ac5ac67ded568dcfadf45969cfa8a6beb0fd60af2a9eadc2a3272aa") version("1.10.2", sha256="3b9c02a004b68c256add99701de00b383accccf37177e0d6c58289664cce0c03") version("1.10.1", sha256="ef14ae546b0084cd84259f61a55e07a38c3b53afc0f546bffcef2f01baffe9de") version("1.10.0", sha256="6a00f5c05caa4c4acc120c46b63857da0d4ff61dc4b4b03933fa8d46013fae81") diff --git a/var/spack/repos/builtin/packages/libgit2/package.py b/var/spack/repos/builtin/packages/libgit2/package.py index dd09fd8e1e843c..20410cf1c6d814 100644 --- a/var/spack/repos/builtin/packages/libgit2/package.py +++ b/var/spack/repos/builtin/packages/libgit2/package.py @@ -83,6 +83,7 @@ class Libgit2(CMakePackage): depends_on("cmake@2.8:", type="build", when="@:0.28") depends_on("cmake@3.5:", type="build", when="@0.99:") depends_on("pkgconfig", type="build") + depends_on("python", type="test") # Runtime Dependencies depends_on("libssh2", when="+ssh") @@ -123,5 +124,6 @@ def cmake_args(self): # Control tests args.append(self.define("BUILD_CLAR", self.run_tests)) + args.append(self.define("BUILD_TESTS", self.run_tests)) return args diff --git a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py index a30e29b138d574..25425557854651 100644 --- a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py +++ b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py @@ -59,6 +59,22 @@ class LibjpegTurbo(CMakePackage, AutotoolsPackage): variant("shared", default=True, description="Build shared libs") variant("static", default=True, description="Build static libs") variant("jpeg8", default=False, description="Emulate libjpeg v8 API/ABI") + variant( + "partial_decoder", + default=False, + description="add partial_decode_scale functionality required for rocAL", + ) + + patch( + "https://github.com/libjpeg-turbo/libjpeg-turbo/commit/09c71da06a6346dca132db66f26f959f7e4dd5ad.patch?full_index=1", + sha256="4d5bdfb5de5b04399144254ea383f5357ab7beb830b398aeb35b65f21dd6b4b0", + when="@2.0.6 +partial_decoder", + ) + patch( + "https://github.com/libjpeg-turbo/libjpeg-turbo/commit/640d7ee1917fcd3b6a5271aa6cf4576bccc7c5fb.patch?full_index=1", + sha256="dc1ec567c2356b652100ecdc28713bbf25f544e46f7d2947f31a2395c362cc48", + when="@2.0.6 +partial_decoder", + ) # Can use either of these. But in the current version of the package # only nasm is used. In order to use yasm an environmental variable diff --git a/var/spack/repos/builtin/packages/libksba/package.py b/var/spack/repos/builtin/packages/libksba/package.py index 5230bcb6a5bf42..cd2183f841f014 100644 --- a/var/spack/repos/builtin/packages/libksba/package.py +++ b/var/spack/repos/builtin/packages/libksba/package.py @@ -17,6 +17,7 @@ class Libksba(AutotoolsPackage): maintainers("alalazo") + version("1.6.5", sha256="a564628c574c99287998753f98d750babd91a4e9db451f46ad140466ef2a6d16") version("1.6.4", sha256="bbb43f032b9164d86c781ffe42213a83bf4f2fee91455edfa4654521b8b03b6b") version("1.6.3", sha256="3f72c68db30971ebbf14367527719423f0a4d5f8103fc9f4a1c01a9fa440de5c") diff --git a/var/spack/repos/builtin/packages/likwid/package.py b/var/spack/repos/builtin/packages/likwid/package.py index f7ebb21048c4c4..6dd5b420302dc5 100644 --- a/var/spack/repos/builtin/packages/likwid/package.py +++ b/var/spack/repos/builtin/packages/likwid/package.py @@ -24,6 +24,7 @@ class Likwid(Package): git = "https://github.com/RRZE-HPC/likwid.git" maintainers("TomTheBear") + version("5.3.0", sha256="c290e554c4253124ac2ab8b056e14ee4d23966b8c9fbfa10ba81f75ae543ce4e") version("5.2.2", sha256="7dda6af722e04a6c40536fc9f89766ce10f595a8569b29e80563767a6a8f940e") version("5.2.1", sha256="1b8e668da117f24302a344596336eca2c69d2bc2f49fa228ca41ea0688f6cbc2") version("5.2.0", sha256="aa6dccacfca59e52d8f3be187ffcf292b2a2fa1f51a81bf8912b9d48e5a257e0") @@ -65,6 +66,7 @@ class Likwid(Package): ) variant("fortran", default=True, description="with fortran interface") variant("cuda", default=False, description="with Nvidia GPU profiling support") + variant("rocm", default=False, description="with AMD GPU profiling support") variant( "accessmode", @@ -83,6 +85,10 @@ class Likwid(Package): depends_on("lua", when="@5.0.2:") depends_on("cuda", when="@5: +cuda") depends_on("hwloc", when="@5.2.0:") + depends_on("rocprofiler-dev", when="@5.3: +rocm") + depends_on("rocm-core", when="@5.3: +rocm") + depends_on("rocm-smi", when="@5.3: +rocm") + depends_on("rocm-smi-lib", when="@5.3: +rocm") # TODO: check # depends_on('gnuplot', type='run') @@ -103,6 +109,31 @@ def setup_run_environment(self, env): ) for lib in libs.directories: env.append_path("LD_LIBRARY_PATH", lib) + if "+rocm" in self.spec: + libs = find_libraries( + "librocprofiler64.so.1", + root=self.spec["rocprofiler-dev"].prefix, + shared=True, + recursive=True, + ) + for lib in libs.directories: + env.append_path("LD_LIBRARY_PATH", lib) + libs = find_libraries( + "libhsa-runtime64.so", + root=self.spec["rocm-core"].prefix, + shared=True, + recursive=True, + ) + for lib in libs.directories: + env.append_path("LD_LIBRARY_PATH", lib) + libs = find_libraries( + "librocm_smi64.so", + root=self.spec["rocm-smi-lib"].prefix, + shared=True, + recursive=True, + ) + for lib in libs.directories: + env.append_path("LD_LIBRARY_PATH", lib) @run_before("install") def filter_sbang(self): @@ -170,6 +201,13 @@ def install(self, spec, prefix): else: filter_file("^NVIDIA_INTERFACE.*", "NVIDIA_INTERFACE = false", "config.mk") + if "+rocm" in self.spec: + env["ROCM_HOME"] = spec["rocm-core"].prefix + filter_file("^ROCM_INTERFACE.*", "ROCM_INTERFACE = true", "config.mk") + filter_file("^BUILDAPPDAEMON.*", "BUILDAPPDAEMON = true", "config.mk") + else: + filter_file("^ROCM_INTERFACE.*", "ROCM_INTERFACE = false", "config.mk") + if spec.satisfies("^lua"): filter_file( "^#LUA_INCLUDE_DIR.*", diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 383871353fe9fa..abf3dee6b49a36 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -428,6 +428,12 @@ class Llvm(CMakePackage, CudaPackage): when="@14:15", ) + # missing include + patch( + "https://github.com/llvm/llvm-project/commit/ff1681ddb303223973653f7f5f3f3435b48a1983.patch?full_index=1", + sha256="c6ca6b925f150e8644ce756023797b7f94c9619c62507231f979edab1c09af78", + when="@6:13", + ) # fix building of older versions of llvm with newer versions of glibc for compiler_rt_as in ["project", "runtime"]: with when("compiler-rt={0}".format(compiler_rt_as)): @@ -977,7 +983,10 @@ def post_install(self): ninja() ninja("install") if "+python" in self.spec: - install_tree("llvm/bindings/python", python_platlib) + if spec.version < Version("17.0.0"): + # llvm bindings were removed in v17: + # https://releases.llvm.org/17.0.1/docs/ReleaseNotes.html#changes-to-the-python-bindings + install_tree("llvm/bindings/python", python_platlib) if "+clang" in self.spec: install_tree("clang/bindings/python", python_platlib) diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py index f4ca15a3a3eaee..af3b8a7b6cbf3b 100644 --- a/var/spack/repos/builtin/packages/lmod/package.py +++ b/var/spack/repos/builtin/packages/lmod/package.py @@ -23,6 +23,7 @@ class Lmod(AutotoolsPackage): version("8.7.24", sha256="8451267652059b6507b652e1b563929ecf9b689ffb20830642085eb6a55bd539") version("8.7.20", sha256="c04deff7d2ca354610a362459a7aa9a1c642a095e45a4b0bb2471bb3254e85f4") + version("8.7.18", sha256="b9912caca1557dd0c17113bceb1a4952e0ae75331d38df6361601db3f80366af") version("8.7.2", sha256="5f44f3783496d2d597ced7531e1714c740dbb2883a7d16fde362135fb0b0fd96") version("8.6.18", sha256="3db1c665c35fb8beb78c02e40d56accd361d82b715df70b2a995bcb10fbc2c80") version("8.6.5", sha256="4a1823264187340be11104d82f8226905daa8149186fa8615dfc742b6d19c2ce") diff --git a/var/spack/repos/builtin/packages/mapl/package.py b/var/spack/repos/builtin/packages/mapl/package.py index 54cef1e40e96da..299a203f372b4d 100644 --- a/var/spack/repos/builtin/packages/mapl/package.py +++ b/var/spack/repos/builtin/packages/mapl/package.py @@ -36,6 +36,11 @@ class Mapl(CMakePackage): version("develop", branch="develop") version("main", branch="main") + version("2.42.0", sha256="9b6c3434919c14ef79004db5f76cb3dd8ef375584227101c230a372bb0470fdd") + version("2.41.2", sha256="73e1f0961f1b70e8159c0a2ce3499eb5158f3ca6d081f4c7826af7854ebfb44d") + version("2.41.1", sha256="2b384bd4fbaac1bff4ef009922c436c4ab54832172a5cd4d312ea44e32c1ae7c") + version("2.41.0", sha256="1142f9395e161174e3ec1654fba8bda1d0bd93edc7438b1927d8f5d7b42a0a86") + version("2.40.4", sha256="fb843b118d6e56cd4fc4b114c4d6f91956d5c8b3d9389ada56da1dfdbc58904f") version("2.40.3", sha256="4b82a314c88a035fc2b91395750aa7950d6bee838786178ed16a3f39a1e45519") version("2.40.2", sha256="7327f6f5bce6e09e7f7b930013fba86ee7cbfe8ed4c7c087fc9ab5acbf6640fd") version("2.40.1", sha256="6f40f946fabea6ba73b0764092e495505d220455b191b4e454736a0a25ee058c") @@ -116,6 +121,12 @@ class Mapl(CMakePackage): # Versions later than 3.14 remove FindESMF.cmake # from ESMA_CMake. + resource( + name="esma_cmake", + git="https://github.com/GEOS-ESM/ESMA_cmake.git", + tag="v3.36.0", + when="@2.42.0:", + ) resource( name="esma_cmake", git="https://github.com/GEOS-ESM/ESMA_cmake.git", @@ -159,6 +170,12 @@ class Mapl(CMakePackage): # Patch to add missing MPI Fortran target to top-level CMakeLists.txt patch("mapl-2.12.3-mpi-fortran.patch", when="@:2.12.3") + # MAPL only compiles with MPICH from version 2.42.0 and higher so we conflict + # with older versions. Also, it's only been tested with MPICH 4, so we don't + # allow older MPICH + conflicts("mpich@:3") + conflicts("mpich@4", when="@:2.41") + variant("flap", default=False, description="Build with FLAP support", when="@:2.39") variant("pflogger", default=True, description="Build with pFlogger support") variant("fargparse", default=True, description="Build with fArgParse support") diff --git a/var/spack/repos/builtin/packages/metkit/package.py b/var/spack/repos/builtin/packages/metkit/package.py index 784e028068daff..41246cf6da9ab6 100644 --- a/var/spack/repos/builtin/packages/metkit/package.py +++ b/var/spack/repos/builtin/packages/metkit/package.py @@ -15,6 +15,8 @@ class Metkit(CMakePackage): maintainers("skosukhin") + version("1.10.17", sha256="1c525891d77ed28cd4c87b065ba4d1aea24d0905452c18d885ccbd567bbfc9b1") + version("1.10.2", sha256="a038050962aecffda27b755c40b0a6ed0db04a2c22cad3d8c93e6109c8ab4b34") version("1.9.2", sha256="35d5f67196197cc06e5c2afc6d1354981e7c85a441df79a2fbd774e0c343b0b4") version("1.7.0", sha256="8c34f6d8ea5381bd1bcfb22462349d03e1592e67d8137e76b3cecf134a9d338c") @@ -26,8 +28,10 @@ class Metkit(CMakePackage): depends_on("ecbuild@3.4:", type="build") depends_on("eckit@1.16:") + depends_on("eckit@1.21:", when="@1.10:") depends_on("eccodes@2.5:", when="+grib") + depends_on("eccodes@2.27:", when="@1.10.2: +grib") depends_on("odc", when="+odb") diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index f4821e63c2ba0f..75eeda7b1fd51b 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -967,6 +967,9 @@ def find_optional_library(name, prefix): if "^rocthrust" in spec and not spec["hip"].external: # petsc+rocm needs the rocthrust header path hip_headers += spec["rocthrust"].headers + if "^hipblas" in spec and not spec["hip"].external: + # superlu-dist+rocm needs the hipblas header path + hip_headers += spec["hipblas"].headers if "%cce" in spec: # We assume the proper Cray CCE module (cce) is loaded: craylibs_path = env["CRAYLIBS_" + machine().upper()] diff --git a/var/spack/repos/builtin/packages/migraphx/package.py b/var/spack/repos/builtin/packages/migraphx/package.py index 81bf1bff2b3818..09d340d01c8d16 100644 --- a/var/spack/repos/builtin/packages/migraphx/package.py +++ b/var/spack/repos/builtin/packages/migraphx/package.py @@ -131,6 +131,7 @@ def url_for_version(self, version): depends_on("py-pybind11", type="build", when="@:4.0.0") depends_on("py-pybind11@2.6:", type="build", when="@4.1.0:") depends_on("pkgconfig", type="build", when="@5.3.0:") + depends_on("abseil-cpp") for ver in [ "3.5.0", diff --git a/var/spack/repos/builtin/packages/millepede/package.py b/var/spack/repos/builtin/packages/millepede/package.py index 6cc9b5a9ada53d..d086f8a75993f6 100644 --- a/var/spack/repos/builtin/packages/millepede/package.py +++ b/var/spack/repos/builtin/packages/millepede/package.py @@ -14,8 +14,6 @@ class Millepede(MakefilePackage): homepage = "https://gitlab.desy.de/claus.kleinwort/millepede-ii" url = "https://gitlab.desy.de/claus.kleinwort/millepede-ii/-/archive/V04-11-01/millepede-ii-V04-11-01.tar.gz" - maintainers("iarspider") - parallel = False version("04-13-03", sha256="669a6e46a6f02ba3c78b2760e2ffb2c90d25b582ccd1a5c0770eef81c7bcbbe9") diff --git a/var/spack/repos/builtin/packages/mimalloc/package.py b/var/spack/repos/builtin/packages/mimalloc/package.py index fed6e5bf783543..a88aaed0db8463 100644 --- a/var/spack/repos/builtin/packages/mimalloc/package.py +++ b/var/spack/repos/builtin/packages/mimalloc/package.py @@ -14,6 +14,8 @@ class Mimalloc(CMakePackage): git = "https://github.com/microsoft/mimalloc.git" maintainers("msimberg") + license("MIT") + version("dev-slice", branch="dev-slice") version("dev", branch="dev") version("master", branch="master") diff --git a/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch new file mode 100644 index 00000000000000..364a4a403651c4 --- /dev/null +++ b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch @@ -0,0 +1,13 @@ +diff --git a/rocAL/rocAL/CMakeLists.txt b/rocAL/rocAL/CMakeLists.txt +index 7ae8cb8..195f387 100644 +--- a/rocAL/rocAL/CMakeLists.txt ++++ b/rocAL/rocAL/CMakeLists.txt +@@ -122,6 +122,8 @@ if(NOT Threads_FOUND) + endif() + + if(${BUILD_ROCAL}) ++ find_path(HALF_INCLUDE_DIR half.hpp) ++ include_directories(${HALF_INCLUDE_DIR}) + # AMD OpenVX & VX_RPP + set(LINK_LIBRARY_LIST ${LINK_LIBRARY_LIST} openvx vx_rpp) + # AMD RPP diff --git a/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch new file mode 100644 index 00000000000000..2e935e9cee2417 --- /dev/null +++ b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch @@ -0,0 +1,21 @@ +diff --git a/rocAL/rocAL/CMakeLists.txt b/rocAL/rocAL/CMakeLists.txt +index bb28810..3c97eab 100644 +--- a/rocAL/rocAL/CMakeLists.txt ++++ b/rocAL/rocAL/CMakeLists.txt +@@ -39,6 +39,8 @@ find_package(Boost COMPONENTS ${BOOST_COMPONENTS} QUIET) + set(THREADS_PREFER_PTHREAD_FLAG ON) + find_package(Threads QUIET) + ++find_path(HALF_INCLUDE_DIR half.hpp) ++ + if( GPU_SUPPORT AND "${BACKEND}" STREQUAL "HIP") + if(NOT DEFINED HIP_PATH) + if(NOT DEFINED ENV{HIP_PATH}) +@@ -120,6 +122,7 @@ if(NOT Threads_FOUND) + endif() + + if(${BUILD_ROCAL}) ++ include_directories(${HALF_INCLUDE_DIR}) + # AMD OpenVX & RPP + include_directories(${AMDRPP_INCLUDE_DIRS}) + set(LINK_LIBRARY_LIST ${LINK_LIBRARY_LIST} openvx vx_rpp) diff --git a/var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch b/var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch new file mode 100644 index 00000000000000..c3aec5597982bc --- /dev/null +++ b/var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch @@ -0,0 +1,62 @@ +diff --git a/model_compiler/python/nnir_to_clib.py b/model_compiler/python/nnir_to_clib.py +index b688094..26fcfe3 100644 +--- a/model_compiler/python/nnir_to_clib.py ++++ b/model_compiler/python/nnir_to_clib.py +@@ -151,6 +151,10 @@ if (OPENVX_BACKEND_OPENCL_FOUND) + include_directories (${OpenCL_INCLUDE_DIRS} ${OpenCL_INCLUDE_DIRS}/Headers ) + endif() + ++find_path(HALF_INCLUDE_DIR half.hpp) ++message(STATUS "HALF_INCLUDE_DIR: ${HALF_INCLUDE_DIR}") ++include_directories(${HALF_INCLUDE_DIR}) ++ + find_package(OpenCV QUIET) + include_directories (/opt/rocm/include/mivisionx) + include_directories (${PROJECT_SOURCE_DIR}/lib) +diff --git a/samples/inference/mv_objdetect/CMakeLists.txt b/samples/inference/mv_objdetect/CMakeLists.txt +index 9b92b84..d82b71e 100644 +--- a/samples/inference/mv_objdetect/CMakeLists.txt ++++ b/samples/inference/mv_objdetect/CMakeLists.txt +@@ -50,7 +50,10 @@ if (OPENVX_BACKEND_OPENCL_FOUND) + include_directories (${OpenCL_INCLUDE_DIRS} ${OpenCL_INCLUDE_DIRS}/Headers ) + endif() + +-include_directories (${ROCM_PATH}/include/mivisionx ${PROJECT_SOURCE_DIR} ) ++find_path(HALF_INCLUDE_DIR half.hpp) ++message(STATUS "HALF_INCLUDE_DIR: ${HALF_INCLUDE_DIR}") ++ ++include_directories (${ROCM_PATH}/include/mivisionx ${PROJECT_SOURCE_DIR} ${HALF_INCLUDE_DIR} ) + link_directories (${ROCM_PATH}/lib ${PROJECT_SOURCE_DIR}/lib) + option (USE_POSTPROC "Use postprocessing module implementation" ON) + set(SOURCES mvobjdetect.cpp mvdeploy_api.cpp visualize.cpp) +diff --git a/utilities/rocAL/rocAL_unittests/CMakeLists.txt b/utilities/rocAL/rocAL_unittests/CMakeLists.txt +index 6500003..20de035 100644 +--- a/utilities/rocAL/rocAL_unittests/CMakeLists.txt ++++ b/utilities/rocAL/rocAL_unittests/CMakeLists.txt +@@ -43,9 +43,10 @@ include(GNUInstallDirs) + + list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../amd_openvx/cmake) + ++find_path(HALF_INCLUDE_DIR half.hpp) + find_package(OpenCV QUIET) + find_package(AMDRPP QUIET) +-include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal) ++include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal ${HALF_INCLUDE_DIR}) + link_directories(${ROCM_PATH}/lib/) + file(GLOB My_Source_Files ./*.cpp) + add_executable(${PROJECT_NAME} ${My_Source_Files}) +diff --git a/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt b/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt +index bd64a5b..3aa6172 100644 +--- a/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt ++++ b/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt +@@ -46,8 +46,8 @@ list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../amd_openvx/cmake) + + find_package(OpenCV QUIET) + find_package(AMDRPP QUIET) +- +-include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal) ++find_path(HALF_INCLUDE_DIR half.hpp) ++include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal ${HALF_INCLUDE_DIR}) + link_directories(${ROCM_PATH}/lib/) + file(GLOB My_Source_Files ./*.cpp) + add_executable(${PROJECT_NAME} ${My_Source_Files}) diff --git a/var/spack/repos/builtin/packages/mivisionx/package.py b/var/spack/repos/builtin/packages/mivisionx/package.py index b298160520f53f..f30ae2a9c25599 100644 --- a/var/spack/repos/builtin/packages/mivisionx/package.py +++ b/var/spack/repos/builtin/packages/mivisionx/package.py @@ -116,8 +116,19 @@ def url_for_version(self, version): variant("opencl", default=False, description="Use OPENCL as the backend") variant("hip", default=True, description="Use HIP as backend") + variant("add_tests", default=False, description="add tests and samples folder") + patch("0001-add-half-include-path.patch", when="@5.5") + patch("0001-add-half-include-path-5.6.patch", when="@5.6:") + patch("0002-add-half-include-path-for-tests.patch", when="@5.5: +add_tests") + + patch( + "https://github.com/GPUOpen-ProfessionalCompute-Libraries/MIVisionX/commit/da24882438b91a0ae1feee23206b75c1a1256887.patch?full_index=1", + sha256="41caff199224f904ef5dc2cd9c5602d6cfa41eba6af0fcc782942a09dd202ab4", + when="@5.6", + ) conflicts("+opencl", when="@5.6.0:") + conflicts("+add_tests", when="@:5.4") def patch(self): if self.spec.satisfies("@4.2.0"): @@ -179,6 +190,86 @@ def patch(self): "amd_openvx_extensions/amd_nn/nn_hip/CMakeLists.txt", string=True, ) + if self.spec.satisfies("@5.5.0: + hip"): + filter_file( + "${ROCM_PATH}/llvm/bin/clang++", + "{0}/bin/clang++".format(self.spec["llvm-amdgpu"].prefix), + "rocAL/rocAL/rocAL_hip/CMakeLists.txt", + string=True, + ) + if self.spec.satisfies("+add_tests"): + filter_file( + "${ROCM_PATH}/include/mivisionx", + "{0}/include/mivisionx".format(self.spec.prefix), + "tests/amd_migraphx_tests/mnist/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/lib", + "{0}/lib".format(self.spec.prefix), + "tests/amd_migraphx_tests/mnist/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/include/mivisionx", + "{0}/include/mivisionx".format(self.spec.prefix), + "tests/amd_migraphx_tests/resnet50/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/lib", + "{0}/lib".format(self.spec.prefix), + "tests/amd_migraphx_tests/resnet50/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/include/mivisionx", + "{0}/include/mivisionx".format(self.spec.prefix), + "samples/inference/mv_objdetect/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/lib", + "{0}/lib".format(self.spec.prefix), + "samples/inference/mv_objdetect/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/include/mivisionx", + "{0}/include/mivisionx".format(self.spec.prefix), + "model_compiler/python/nnir_to_clib.py", + string=True, + ) + filter_file( + "/opt/rocm", + "{0}".format(self.spec.prefix), + "model_compiler/python/nnir_to_clib.py", + string=True, + ) + filter_file( + "${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal", + "{0}/include/mivisionx/rocal".format(self.spec.prefix), + "utilities/rocAL/rocAL_unittests/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/lib", + "{0}/lib".format(self.spec.prefix), + "utilities/rocAL/rocAL_unittests/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal", + "{0}/include/mivisionx/rocal".format(self.spec.prefix), + "utilities/rocAL/rocAL_video_unittests/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/lib", + "{0}/lib".format(self.spec.prefix), + "utilities/rocAL/rocAL_video_unittests/CMakeLists.txt", + string=True, + ) depends_on("cmake@3.5:", type="build") depends_on("ffmpeg@:4", type="build", when="@:5.3") @@ -203,7 +294,17 @@ def patch(self): depends_on("miopen-opencl@3.5.0", when="@1.7+opencl") depends_on("miopengemm@1.1.6", when="@1.7+opencl") depends_on("openssl", when="@4.0.0:") - depends_on("libjpeg-turbo", type="build") + depends_on("libjpeg-turbo@2.0.6+partial_decoder", type="build") + depends_on("rpp", when="@5.5:") + depends_on("lmdb", when="@5.5:") + depends_on("py-setuptools", when="@5.6:") + depends_on("py-wheel", when="@5.6:") + depends_on("py-pybind11", when="@5.6:") + depends_on("py-google-api-python-client", when="+add_tests") + depends_on("py-protobuf@3.20.3", type=("build", "run"), when="+add_tests") + depends_on("py-future", when="+add_tests") + depends_on("py-numpy", when="+add_tests") + depends_on("py-pytz", when="+add_tests") conflicts("^cmake@3.22:", when="@:5.0.0") # need to choose atleast one backend and both cannot be set @@ -265,11 +366,15 @@ def patch(self): depends_on("miopen-hip@" + ver, when="@" + ver) for ver in ["5.3.3", "5.4.0", "5.4.3", "5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("migraphx@" + ver, when="@" + ver) + depends_on("hip@" + ver, when="@" + ver) for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) depends_on("python@3.5:", type="build") + def setup_run_environment(self, env): + env.set("MIVISIONX_MODEL_COMPILER_PATH", self.spec.prefix.libexec.mivisionx.model_compiler) + def flag_handler(self, name, flags): spec = self.spec protobuf = spec["protobuf"].prefix.include @@ -290,4 +395,24 @@ def cmake_args(self): args.append(self.define("HIP_PATH", spec["hip"].prefix)) if self.spec.satisfies("~hip~opencl"): args.append(self.define("BACKEND", "CPU")) + if self.spec.satisfies("@5.5:"): + args.append( + self.define("AMDRPP_LIBRARIES", "{0}/lib/librpp.so".format(spec["rpp"].prefix)) + ) + args.append( + self.define("AMDRPP_INCLUDE_DIRS", "{0}/include/rpp".format(spec["rpp"].prefix)) + ) + args.append( + self.define( + "TurboJpeg_LIBRARIES_DIRS", "{0}/lib64".format(spec["libjpeg-turbo"].prefix) + ) + ) + args.append(self.define("CMAKE_INSTALL_PREFIX_PYTHON", spec.prefix)) return args + + @run_after("install") + def add_tests(self): + if self.spec.satisfies("+add_tests"): + install_tree("tests", self.spec.prefix.tests) + install_tree("samples", self.spec.prefix.samples) + install_tree("utilities", self.spec.prefix.utilities) diff --git a/var/spack/repos/builtin/packages/molgw/package.py b/var/spack/repos/builtin/packages/molgw/package.py index 17e5283c920558..91026c7abe0b00 100644 --- a/var/spack/repos/builtin/packages/molgw/package.py +++ b/var/spack/repos/builtin/packages/molgw/package.py @@ -78,7 +78,7 @@ def edit(self, spec, prefix): flags["PREFIX"] = prefix # Set LAPACK and SCALAPACK - if "^mkl" in spec: + if spec["lapack"].name not in INTEL_MATH_LIBRARIES: flags["LAPACK"] = self._get_mkl_ld_flags(spec) else: flags["LAPACK"] = spec["lapack"].libs.ld_flags + " " + spec["blas"].libs.ld_flags @@ -105,7 +105,7 @@ def edit(self, spec, prefix): if "+scalapack" in spec: flags["CPPFLAGS"] = flags.get("CPPFLAGS", "") + " -DHAVE_SCALAPACK -DHAVE_MPI " - if "^mkl" in spec: + if spec["lapack"].name in INTEL_MATH_LIBRARIES: flags["CPPFLAGS"] = flags.get("CPPFLAGS", "") + " -DHAVE_MKL " # Write configuration file diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index b66c0b8fd4c52a..aa2d89e5b5e9aa 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -55,7 +55,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): variant("hydra", default=True, description="Build the hydra process manager") variant("romio", default=True, description="Enable ROMIO MPI I/O implementation") variant("verbs", default=False, description="Build support for OpenFabrics verbs.") - variant("slurm", default=False, description="Enable SLURM support") + variant("slurm", default=False, description="Enable Slurm support") variant("wrapperrpath", default=True, description="Enable wrapper rpath") variant( "pmi", @@ -70,16 +70,14 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): description="""Abstract Device Interface (ADI) implementation. The ch4 device is in experimental state for versions before 3.4.""", - values=("ch3", "ch4"), + values=("ch3", "ch4", "ch3:sock"), multi=False, ) variant( "netmod", default="ofi", description="""Network module. Only single netmod builds are -supported. For ch3 device configurations, this presumes the -ch3:nemesis communication channel. ch3:sock is not supported by this -spack package at this time.""", +supported, and netmod is ignored if device is ch3:sock.""", values=("tcp", "mxm", "ofi", "ucx"), multi=False, ) @@ -121,6 +119,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): depends_on("yaksa+cuda", when="+cuda ^yaksa") depends_on("yaksa+rocm", when="+rocm ^yaksa") conflicts("datatype-engine=yaksa", when="device=ch3") + conflicts("datatype-engine=yaksa", when="device=ch3:sock") variant( "hcoll", @@ -135,8 +134,10 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): # overriding the variant from CudaPackage. conflicts("+cuda", when="@:3.3") conflicts("+cuda", when="device=ch3") + conflicts("+cuda", when="device=ch3:sock") conflicts("+rocm", when="@:4.0") conflicts("+rocm", when="device=ch3") + conflicts("+rocm", when="device=ch3:sock") conflicts("+cuda", when="+rocm", msg="CUDA must be disabled to support ROCm") provides("mpi@:4.0") @@ -271,6 +272,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): conflicts("netmod=tcp", when="device=ch4") conflicts("pmi=pmi2", when="device=ch3 netmod=ofi") conflicts("pmi=pmix", when="device=ch3") + conflicts("pmi=pmix", when="device=ch3:sock") conflicts("pmi=pmix", when="+hydra") conflicts("pmi=cray", when="+hydra") @@ -556,7 +558,10 @@ def configure_args(self): elif "device=ch3" in spec: device_config = "--with-device=ch3:nemesis:" - if "netmod=ucx" in spec: + # Do not apply any netmod if device is ch3:sock + if "device=ch3:sock" in spec: + device_config = "--with-device=ch3:sock" + elif "netmod=ucx" in spec: device_config += "ucx" elif "netmod=ofi" in spec: device_config += "ofi" diff --git a/var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch b/var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch new file mode 100644 index 00000000000000..667e412acc1cee --- /dev/null +++ b/var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch @@ -0,0 +1,26 @@ +--- ./configure.orig 2023-11-12 14:48:25.802025918 -0800 ++++ ./configure 2023-11-12 14:48:56.177057419 -0800 +@@ -571,10 +571,7 @@ + try: + flags = [] + for flag in shlex.split (execute ([ 'pkg-config' ] + pkg_config_flags.split(), RunError)[1]): +- if flag.startswith ('-I'): +- flags += [ '-idirafter', flag[2:] ] +- else: +- flags += [ flag ] ++ flags += [ flag ] + return flags + except Exception: + log('error running "pkg-config ' + pkg_config_flags + '"\n\n') +@@ -1323,10 +1320,7 @@ + for entry in qt: + if entry[0] != '$' and not entry == '-I.': + entry = entry.replace('\"','').replace("'",'') +- if entry.startswith('-I'): +- qt_cflags += [ '-idirafter', entry[2:] ] +- else: +- qt_cflags += [ entry ] ++ qt_cflags += [ entry ] + + qt = qt_ldflags + qt_libs + qt_ldflags = [] diff --git a/var/spack/repos/builtin/packages/mrtrix3/package.py b/var/spack/repos/builtin/packages/mrtrix3/package.py index 2a59d7ec22a8e9..53bf19ae53065d 100644 --- a/var/spack/repos/builtin/packages/mrtrix3/package.py +++ b/var/spack/repos/builtin/packages/mrtrix3/package.py @@ -17,21 +17,26 @@ class Mrtrix3(Package): git = "https://github.com/MRtrix3/mrtrix3.git" version( - "3.0.3", - sha256="6ec7d5a567d8d7338e85575a74565189a26ec8971cbe8fb24a49befbc446542e", + "3.0.4", + sha256="f1d1aa289cfc3e46e3a8eca93594b23d061c6d50a0cd03727433a7e2cd14f71a", preferred=True, ) + version("3.0.3", sha256="6ec7d5a567d8d7338e85575a74565189a26ec8971cbe8fb24a49befbc446542e") version("2017-09-25", commit="72aca89e3d38c9d9e0c47104d0fb5bd2cbdb536d") depends_on("python@2.7:", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) depends_on("glu") depends_on("qt+opengl@4.7:") - depends_on("eigen") + # MRTrix <= 3.0.3 can't build with eigen >= 3.4 due to conflicting declarations + depends_on("eigen@3.3", when="@3.0.3") + depends_on("eigen@3.4:", when="@3.0.4:") depends_on("zlib-api") depends_on("libtiff") depends_on("fftw") + patch("fix_includes.patch", when="@3.0.3:3.0.4") + conflicts("%gcc@7:", when="@2017-09-25") # MRtrix3/mrtrix3#1041 def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index 32bcaf3a812ac1..1befb5acef9b84 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -16,6 +16,8 @@ class Mumps(Package): homepage = "https://graal.ens-lyon.fr/MUMPS/index.php" url = "https://graal.ens-lyon.fr/MUMPS/MUMPS_5.5.1.tar.gz" + maintainers("jcortial-safran") + version("5.5.1", sha256="1abff294fa47ee4cfd50dfd5c595942b72ebfcedce08142a75a99ab35014fa15") version("5.5.0", sha256="e54d17c5e42a36c40607a03279e0704d239d71d38503aab68ef3bfe0a9a79c13") version("5.4.1", sha256="93034a1a9fe0876307136dcde7e98e9086e199de76f1c47da822e7d4de987fa8") @@ -223,7 +225,7 @@ def write_makefile_inc(self): # As of version 5.2.0, MUMPS is able to take advantage # of the GEMMT BLAS extension. MKL and amdblis are the only # known BLAS implementation supported. - if "@5.2.0: ^mkl" in self.spec: + if self.spec["blas"].name in INTEL_MATH_LIBRARIES and self.spec.satisfies("@5.2.0:"): optf.append("-DGEMMT_AVAILABLE") if "@5.2.0: ^amdblis@3.0:" in self.spec: diff --git a/var/spack/repos/builtin/packages/ncview/package.py b/var/spack/repos/builtin/packages/ncview/package.py index 8526b7807020a8..c706245e94d491 100644 --- a/var/spack/repos/builtin/packages/ncview/package.py +++ b/var/spack/repos/builtin/packages/ncview/package.py @@ -26,9 +26,10 @@ def patch(self): patched_file = "configure" with keep_modification_time(patched_file): filter_file( - "if test x\$CC_TEST_SAME != x\$NETCDF_CC_TEST_SAME; then", # noqa: W605 + "if test x$CC_TEST_SAME != x$NETCDF_CC_TEST_SAME; then", "if false; then", patched_file, + string=True, ) def url_for_version(self, version): diff --git a/var/spack/repos/builtin/packages/npm/package.py b/var/spack/repos/builtin/packages/npm/package.py index 0d6480f210428b..c8c544c1179381 100644 --- a/var/spack/repos/builtin/packages/npm/package.py +++ b/var/spack/repos/builtin/packages/npm/package.py @@ -13,49 +13,16 @@ class Npm(Package): """npm: A package manager for javascript.""" homepage = "https://github.com/npm/cli" - # base https://www.npmjs.com/ - - git = "https://github.com/npm/cli.git" url = "https://registry.npmjs.org/npm/-/npm-9.3.1.tgz" + git = "https://github.com/npm/cli.git" version("9.3.1", sha256="41caa26a340b0562bc5429d28792049c980fe3e872b42b82cad94e8f70e37f40") version("8.19.3", sha256="634bf4e0dc87be771ebf48a058629960e979a209c20a51ebdbc4897ca6a25260") version("7.24.2", sha256="5b9eeea011f8bc3b76e55cc33339e87213800677f37e0756ad13ef0e9eaccd64") version("6.14.18", sha256="c9b15f277e2a0b1b57e05bad04504296a27024555d56c2aa967f862e957ad2ed") - version( - "6.14.9", - sha256="1e0e880ce0d5adf0120fb3f92fc8e5ea5bac73681d37282615d074ff670f7703", - deprecated=True, - ) - version( - "6.14.8", - sha256="fe8e873cb606c06f67f666b4725eb9122c8927f677c8c0baf1477f0ff81f5a2c", - deprecated=True, - ) - version( - "6.13.7", - sha256="6adf71c198d61a5790cf0e057f4ab72c6ef6c345d72bed8bb7212cb9db969494", - deprecated=True, - ) - version( - "6.13.4", - sha256="a063290bd5fa06a8753de14169b7b243750432f42d01213fbd699e6b85916de7", - deprecated=True, - ) - version( - "3.10.9", - sha256="fb0871b1aebf4b74717a72289fade356aedca83ee54e7386e38cb51874501dd6", - deprecated=True, - ) - version( - "3.10.5", - sha256="ff019769e186152098841c1fa6325e5a79f7903a45f13bd0046a4dc8e63f845f", - deprecated=True, - ) - depends_on("node-js", type=("build", "run")) - depends_on("libvips") + depends_on("libvips", when="@:7") # npm 6.13.4 ships with node-gyp 5.0.5, which contains several Python 3 # compatibility issues on macOS. Manually update to node-gyp 6.0.1 for diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py index 1098a0332db448..90dbdb44786ee2 100644 --- a/var/spack/repos/builtin/packages/octave/package.py +++ b/var/spack/repos/builtin/packages/octave/package.py @@ -167,7 +167,7 @@ def configure_args(self): config_args = [] # Required dependencies - if "^mkl" in spec and "gfortran" in self.compiler.fc: + if spec["lapack"].name in INTEL_MATH_LIBRARIES and "gfortran" in self.compiler.fc: mkl_re = re.compile(r"(mkl_)intel(_i?lp64\b)") config_args.extend( [ diff --git a/var/spack/repos/builtin/packages/octopus/package.py b/var/spack/repos/builtin/packages/octopus/package.py index 8a88711dad900e..3ccd8719a1758b 100644 --- a/var/spack/repos/builtin/packages/octopus/package.py +++ b/var/spack/repos/builtin/packages/octopus/package.py @@ -159,7 +159,7 @@ def configure_args(self): if "^fftw" in spec: args.append("--with-fftw-prefix=%s" % spec["fftw"].prefix) - elif "^mkl" in spec: + elif spec["fftw-api"].name in INTEL_MATH_LIBRARIES: # As of version 10.0, Octopus depends on fftw-api instead # of FFTW. If FFTW is not in the dependency tree, then # it ought to be MKL as it is currently the only providers diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index 409dfa004d9bea..e88a3f418e5c67 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -24,6 +24,7 @@ class Openblas(CMakePackage, MakefilePackage): libraries = ["libopenblas", "openblas"] version("develop", branch="develop") + version("0.3.25", sha256="4c25cb30c4bb23eddca05d7d0a85997b8db6144f5464ba7f8c09ce91e2f35543") version("0.3.24", sha256="ceadc5065da97bd92404cac7254da66cc6eb192679cf1002098688978d4d5132") version("0.3.23", sha256="5d9491d07168a5d00116cdc068a40022c3455bf9293c7cb86a65b1054d7e5114") version("0.3.22", sha256="7fa9685926ba4f27cfe513adbf9af64d6b6b63f9dcabb37baefad6a65ff347a7") @@ -191,6 +192,13 @@ class Openblas(CMakePackage, MakefilePackage): when="@0.3.21 %gcc@:9", ) + # Fix build on A64FX for OpenBLAS v0.3.24 + patch( + "https://github.com/OpenMathLib/OpenBLAS/commit/90231bfc4e4afc51f67c248328fbef0cecdbd2c2.patch?full_index=1", + sha256="139e314f3408dc5c080d28887471f382e829d1bd06c8655eb72593e4e7b921cc", + when="@0.3.24 target=a64fx", + ) + # See https://github.com/spack/spack/issues/19932#issuecomment-733452619 # Notice: fixed on Amazon Linux GCC 7.3.1 (which is an unofficial version # as GCC only has major.minor releases. But the bound :7.3.0 doesn't hurt) @@ -369,6 +377,14 @@ def _microarch_target_args(self): # case can go away. args.append("TARGET=" + "RISCV64_GENERIC") + elif self.spec.satisfies("@0.3.19: target=a64fx"): + # Special case for Fujitsu's A64FX + if any(self.spec.satisfies(i) for i in ["%gcc@11:", "%clang", "%fj"]): + args.append("TARGET=A64FX") + else: + # fallback to armv8-a+sve without -mtune=a64fx flag + args.append("TARGET=ARMV8SVE") + else: args.append("TARGET=" + microarch.name.upper()) diff --git a/var/spack/repos/builtin/packages/openimagedenoise/package.py b/var/spack/repos/builtin/packages/openimagedenoise/package.py index 9ccce30a86c266..e98d6d2baaa32c 100644 --- a/var/spack/repos/builtin/packages/openimagedenoise/package.py +++ b/var/spack/repos/builtin/packages/openimagedenoise/package.py @@ -17,6 +17,7 @@ class Openimagedenoise(CMakePackage): # maintainers("github_user1", "github_user2") + version("2.1.0", sha256="ce144ba582ff36563d9442ee07fa2a4d249bc85aa93e5b25fc527ff4ee755ed6") version("2.0.1", sha256="328eeb9809d18e835dca7203224af3748578794784c026940c02eea09c695b90") version("1.4.3", sha256="3276e252297ebad67a999298d8f0c30cfb221e166b166ae5c955d88b94ad062a") version("1.4.2", sha256="e70d27ce24b41364782376c1b3b4f074f77310ccfe5f8ffec4a13a347e48a0ea") diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index 5325235612442d..f2347d01ecb0b2 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -595,7 +595,7 @@ class Openmpi(AutotoolsPackage, CudaPackage): conflicts( "schedulers=slurm ~pmi", when="@1.5.4", - msg="+pmi is required for openmpi to work with SLURM.", + msg="+pmi is required for openmpi to work with Slurm.", ) conflicts( "schedulers=loadleveler", diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index 358a008088883a..8e2cd947bb54ed 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -381,6 +381,10 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package depends_on("ca-certificates-mozilla", type="build", when="certs=mozilla") depends_on("nasm", when="platform=windows") + depends_on("gmake", type="build", when="platform=linux") + depends_on("gmake", type="build", when="platform=cray") + depends_on("gmake", type="build", when="platform=darwin") + patch( "https://github.com/openssl/openssl/commit/f9e578e720bb35228948564192adbe3bc503d5fb.patch?full_index=1", sha256="3fdcf2d1e47c34f3a012f23306322c5a35cad55b180c9b6fb34537b55884645c", diff --git a/var/spack/repos/builtin/packages/openvkl/package.py b/var/spack/repos/builtin/packages/openvkl/package.py index 32bbdcafe26c3b..bc9a32f5ce29fd 100644 --- a/var/spack/repos/builtin/packages/openvkl/package.py +++ b/var/spack/repos/builtin/packages/openvkl/package.py @@ -16,6 +16,7 @@ class Openvkl(CMakePackage): # maintainers("github_user1", "github_user2") + version("2.0.0", sha256="469c3fba254c4fcdd84f8a9763d2e1aaa496dc123b5a9d467cc0a561e284c4e6") version("1.3.2", sha256="7704736566bf17497a3e51c067bd575316895fda96eccc682dae4aac7fb07b28") version("1.3.1", sha256="c9cefb6c313f2b4c0331e9629931759a6bc204ec00deed6ec0becad1670a1933") version("1.3.0", sha256="c6d4d40e6d232839c278b53dee1e7bd3bd239c3ccac33f49b465fc65a0692be9") @@ -36,6 +37,7 @@ class Openvkl(CMakePackage): depends_on("rkcommon@1.8.0:", when="@1.1:") depends_on("rkcommon@:1.10.0", when="@:1.3.1") depends_on("rkcommon@1.11.0:", when="@1.3.2:") + depends_on("rkcommon@:1.11.0", when="@:1.3.2") depends_on("tbb") def cmake_args(self): diff --git a/var/spack/repos/builtin/packages/ospray/package.py b/var/spack/repos/builtin/packages/ospray/package.py index 85a79894bbf246..fe81c528aa142e 100644 --- a/var/spack/repos/builtin/packages/ospray/package.py +++ b/var/spack/repos/builtin/packages/ospray/package.py @@ -16,6 +16,7 @@ class Ospray(CMakePackage): # maintainers("aumuell") + version("3.0.0", sha256="d8d8e632d77171c810c0f38f8d5c8387470ca19b75f5b80ad4d3d12007280288") version("2.12.0", sha256="268b16952b2dd44da2a1e40d2065c960bc2442dd09b63ace8b65d3408f596301") version("2.11.0", sha256="55974e650d9b78989ee55adb81cffd8c6e39ce5d3cf0a3b3198c522bf36f6e81") version("2.10.0", sha256="bd478284f48d2cb775fc41a2855a9d9f5ea16c861abda0f8dc94e02ea7189cb8") @@ -38,26 +39,31 @@ class Ospray(CMakePackage): depends_on("rkcommon@1.9", when="@2.9.0") depends_on("rkcommon@1.10:", when="@2.10.0:") depends_on("rkcommon@1.11:", when="@2.11:") + depends_on("rkcommon@1.12:", when="@3:") depends_on("embree@3.12: +ispc") depends_on("embree@3.13.1:", when="@2.7.0:") depends_on("embree@:3", when="@:2.10") depends_on("embree@4:", when="@2.11:") + depends_on("embree@4.3:", when="@3:") with when("+volumes"): - depends_on("openvkl@0.13.0:") + depends_on("openvkl@0.13.0:1", when="@2") depends_on("openvkl@1.0.1:", when="@2.7.0:") depends_on("openvkl@1.2.0:", when="@2.9.0:") depends_on("openvkl@1.3.0:", when="@2.10.0:") - depends_on("openvkl@1.3.2:", when="@2.11:") + depends_on("openvkl@1.3.2:", when="@2.11:2") + depends_on("openvkl@2:", when="@3:") with when("+denoiser"): depends_on("openimagedenoise@1.2.3:") depends_on("openimagedenoise@1.3:", when="@2.5:") depends_on("openimagedenoise@:1", when="@:2.11") depends_on("openimagedenoise@2:", when="@2.12:") + depends_on("openimagedenoise@2.1:", when="@3:") depends_on("ispc@1.14.1:", type=("build")) depends_on("ispc@1.16.0:", when="@2.7.0:", type=("build")) depends_on("ispc@1.18.0:", when="@2.10.0:", type=("build")) depends_on("ispc@1.19.0:", when="@2.11.0:", type=("build")) depends_on("ispc@1.20.0:", when="@2.12.0:", type=("build")) + depends_on("ispc@1.21.1:", when="@3:", type=("build")) depends_on("tbb") depends_on("mpi", when="+mpi") diff --git a/var/spack/repos/builtin/packages/pacparser/package.py b/var/spack/repos/builtin/packages/pacparser/package.py index d42d927e370356..905cc9f874fc17 100644 --- a/var/spack/repos/builtin/packages/pacparser/package.py +++ b/var/spack/repos/builtin/packages/pacparser/package.py @@ -9,8 +9,6 @@ class Pacparser(MakefilePackage): """pacparser is a library to parse proxy auto-config (PAC) files.""" - maintainers("iarspider") - homepage = "https://pacparser.github.io/" url = "https://github.com/manugarg/pacparser/releases/download/v1.4.0/pacparser-v1.4.0.tar.gz" git = "https://github.com/manugarg/pacparser.git" diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 5ca64f29c08610..1f3cd9a76fb19a 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -28,6 +28,9 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): tags = ["e4s"] version("master", branch="master", submodules=True) + version( + "5.12.0-RC1", sha256="892eda2ae72831bbadd846be465d496ada35739779229c604cddd56e018a1aea" + ) version( "5.11.2", sha256="5c5d2f922f30d91feefc43b4a729015dbb1459f54c938896c123d2ac289c7a1e", @@ -190,7 +193,7 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): depends_on("libxt", when="~osmesa platform={}".format(p)) conflicts("+qt", when="+osmesa") - depends_on("ospray@2.1:", when="+raytracing") + depends_on("ospray@2.1:2", when="+raytracing") depends_on("openimagedenoise", when="+raytracing") depends_on("ospray +mpi", when="+raytracing +mpi") diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py index 2630085d4eeaf7..21fc0d62aad485 100644 --- a/var/spack/repos/builtin/packages/patchelf/package.py +++ b/var/spack/repos/builtin/packages/patchelf/package.py @@ -19,7 +19,14 @@ class Patchelf(AutotoolsPackage): maintainers("haampie") version("0.18.0", sha256="64de10e4c6b8b8379db7e87f58030f336ea747c0515f381132e810dbf84a86e7") - version("0.17.2", sha256="20427b718dd130e4b66d95072c2a2bd5e17232e20dad58c1bea9da81fae330e0") + # patchelf 0.18 breaks libraries: + # https://github.com/spack/spack/issues/39252 + # https://github.com/spack/spack/pull/40938 + version( + "0.17.2", + sha256="20427b718dd130e4b66d95072c2a2bd5e17232e20dad58c1bea9da81fae330e0", + preferred=True, + ) version("0.16.1", sha256="1a562ed28b16f8a00456b5f9ee573bb1af7c39c1beea01d94fc0c7b3256b0406") version("0.15.0", sha256="53a8d58ed4e060412b8fdcb6489562b3c62be6f65cee5af30eba60f4423bfa0f") version("0.14.5", sha256="113ada3f1ace08f0a7224aa8500f1fa6b08320d8f7df05ff58585286ec5faa6f") diff --git a/var/spack/repos/builtin/packages/perl-class-singleton/package.py b/var/spack/repos/builtin/packages/perl-class-singleton/package.py new file mode 100644 index 00000000000000..fa44321b24ee80 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-class-singleton/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlClassSingleton(PerlPackage): + """Class::Singleton - Implementation of a "Singleton" class""" + + homepage = "https://metacpan.org/pod/Class::Singleton" + url = "https://cpan.metacpan.org/authors/id/S/SH/SHAY/Class-Singleton-1.6.tar.gz" + + version("1.6", sha256="27ba13f0d9512929166bbd8c9ef95d90d630fc80f0c9a1b7458891055e9282a4") diff --git a/var/spack/repos/builtin/packages/perl-datetime-locale/package.py b/var/spack/repos/builtin/packages/perl-datetime-locale/package.py new file mode 100644 index 00000000000000..6e341423f06238 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-datetime-locale/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlDatetimeLocale(PerlPackage): + """DateTime::Locale - Localization support for DateTime.pm""" + + homepage = "https://metacpan.org/pod/DateTime::Locale" + url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/DateTime-Locale-1.40.tar.gz" + + version("1.40", sha256="7490b4194b5d23a4e144976dedb3bdbcc6d3364b5d139cc922a86d41fdb87afb") + + depends_on("perl-file-sharedir-install", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/perl-datetime-timezone/package.py b/var/spack/repos/builtin/packages/perl-datetime-timezone/package.py new file mode 100644 index 00000000000000..b6c9eba506d845 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-datetime-timezone/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlDatetimeTimezone(PerlPackage): + """DateTime::TimeZone - Time zone object base class and factory""" + + homepage = "https://metacpan.org/pod/DateTime::TimeZone" + url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/DateTime-TimeZone-2.60.tar.gz" + + version("2.60", sha256="f0460d379323905b579bed44e141237a337dc25dd26b6ab0c60ac2b80629323d") diff --git a/var/spack/repos/builtin/packages/perl-datetime/package.py b/var/spack/repos/builtin/packages/perl-datetime/package.py new file mode 100644 index 00000000000000..3bb9f31f819821 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-datetime/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlDatetime(PerlPackage): + """DateTime - A date and time object for Perl""" + + homepage = "https://metacpan.org/pod/DateTime" + url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/DateTime-1.63.tar.gz" + + version("1.63", sha256="1b11e49ec6e184ae2a10eccd05eda9534f32458fc644c12ab710c29a3a816f6f") + + depends_on("perl-namespace-autoclean", type=("run")) diff --git a/var/spack/repos/builtin/packages/perl-devel-cover/package.py b/var/spack/repos/builtin/packages/perl-devel-cover/package.py new file mode 100644 index 00000000000000..dfadcfb6713ba0 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-devel-cover/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlDevelCover(PerlPackage): + """Devel::Cover - Perl extension for code coverage metrics""" + + homepage = "https://metacpan.org/pod/Devel::Cover" + url = "https://cpan.metacpan.org/authors/id/P/PJ/PJCJ/Devel-Cover-1.40.tar.gz" + + version("1.40", sha256="26e2f431fbcf7bff3851f352f83b84067c09ff206f40ab975cad8d2bafe711a8") diff --git a/var/spack/repos/builtin/packages/perl-file-sharedir/package.py b/var/spack/repos/builtin/packages/perl-file-sharedir/package.py new file mode 100644 index 00000000000000..45c6c5169679e8 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-file-sharedir/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlFileSharedir(PerlPackage): + """File::ShareDir - Locate per-dist and per-module shared files""" + + homepage = "https://metacpan.org/pod/File::ShareDir" + url = "https://cpan.metacpan.org/authors/id/R/RE/REHSACK/File-ShareDir-1.118.tar.gz" + + version("1.118", sha256="3bb2a20ba35df958dc0a4f2306fc05d903d8b8c4de3c8beefce17739d281c958") + + # depends_on("perl-module-build", type="build") diff --git a/var/spack/repos/builtin/packages/perl-file-spec/package.py b/var/spack/repos/builtin/packages/perl-file-spec/package.py new file mode 100644 index 00000000000000..3d4d767b0b7c2b --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-file-spec/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlFileSpec(PerlPackage): + """File::Spec - Perl extension for portably performing operations on file names""" + + homepage = "https://metacpan.org/pod/File::Spec" + url = "https://cpan.metacpan.org/authors/id/K/KW/KWILLIAMS/File-Spec-0.90.tar.gz" + + version("0.90", sha256="695a34604e1b6a98327fe2b374504329735b07c2c45db9f55df1636e4c29bf79") diff --git a/var/spack/repos/builtin/packages/perl-memory-process/package.py b/var/spack/repos/builtin/packages/perl-memory-process/package.py new file mode 100644 index 00000000000000..3cc302b624fe17 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-memory-process/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlMemoryProcess(PerlPackage): + """Memory::Process - Perl class to determine actual memory usage""" + + homepage = "https://metacpan.org/pod/Memory::Process" + url = "https://cpan.metacpan.org/authors/id/S/SK/SKIM/Memory-Process-0.06.tar.gz" + + version("0.06", sha256="35814488ffd29c97621625ea3b3d700afbfa60ed055bd759d4e58d9c8fd44e4e") diff --git a/var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py b/var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py new file mode 100644 index 00000000000000..8304e7377aa574 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlNamespaceAutoclean(PerlPackage): + """Namespace::Autoclean - Keep imports out of your namespace""" + + homepage = "https://metacpan.org/pod/namespace::autoclean" + url = "https://cpan.metacpan.org/authors/id/E/ET/ETHER/namespace-autoclean-0.29.tar.gz" + + version("0.29", sha256="45ebd8e64a54a86f88d8e01ae55212967c8aa8fed57e814085def7608ac65804") diff --git a/var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py b/var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py new file mode 100644 index 00000000000000..6f408c960a8df9 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlParamsValidationcompiler(PerlPackage): + """Params::ValidationCompiler - Build an optimized subroutine parameter validator once, + use it forever""" + + homepage = "https://metacpan.org/pod/Params::ValidationCompiler" + url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/Params-ValidationCompiler-0.31.tar.gz" + + version("0.31", sha256="7b6497173f1b6adb29f5d51d8cf9ec36d2f1219412b4b2410e9d77a901e84a6d") diff --git a/var/spack/repos/builtin/packages/perl-specio/package.py b/var/spack/repos/builtin/packages/perl-specio/package.py new file mode 100644 index 00000000000000..05954d586dee77 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-specio/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlSpecio(PerlPackage): + """Type constraints and coercions for Perl .""" + + homepage = "https://metacpan.org/dist/Specio" + url = "http://search.cpan.org/CPAN/authors/id/D/DR/DROLSKY/Specio-0.48.tar.gz" + + version("0.48", sha256="0c85793580f1274ef08173079131d101f77b22accea7afa8255202f0811682b2") diff --git a/var/spack/repos/builtin/packages/pflotran/package.py b/var/spack/repos/builtin/packages/pflotran/package.py index 57a409b150c858..37fecbe31c3efd 100644 --- a/var/spack/repos/builtin/packages/pflotran/package.py +++ b/var/spack/repos/builtin/packages/pflotran/package.py @@ -57,3 +57,9 @@ def flag_handler(self, name, flags): if "%gcc@10:" in self.spec and name == "fflags": flags.append("-fallow-argument-mismatch") return flags, None, None + + @when("@5.0.0") + def patch(self): + filter_file( + "use iso_[cC]_binding", "use, intrinsic :: iso_c_binding", "src/pflotran/hdf5_aux.F90" + ) diff --git a/var/spack/repos/builtin/packages/photos-f/package.py b/var/spack/repos/builtin/packages/photos-f/package.py index 5c6cac9bf568d8..536407ff8ac49a 100644 --- a/var/spack/repos/builtin/packages/photos-f/package.py +++ b/var/spack/repos/builtin/packages/photos-f/package.py @@ -18,8 +18,6 @@ class PhotosF(MakefilePackage): "http://cern.ch/service-spi/external/MCGenerators/distribution/photos/photos-215.5-src.tgz" ) - maintainers("iarspider") - version("215.5", sha256="3e2b3f60ffe2d3a6a95cf2f156aa24b93e1fa3c439a85fa0ae780ca2f6e0dbb5") patch("photos-215.5-update-configure.patch", level=2) diff --git a/var/spack/repos/builtin/packages/pika-algorithms/package.py b/var/spack/repos/builtin/packages/pika-algorithms/package.py index 3387dfdb2736cb..48ca6fe2f0129b 100644 --- a/var/spack/repos/builtin/packages/pika-algorithms/package.py +++ b/var/spack/repos/builtin/packages/pika-algorithms/package.py @@ -15,6 +15,8 @@ class PikaAlgorithms(CMakePackage): git = "https://github.com/pika-org/pika-algorithms.git" maintainers("msimberg", "albestro", "teonnik", "aurianer") + license("BSL-1.0") + version("0.1.4", sha256="67ea5e8545b234f82dcc75612a774f2e3df8425a283f2034c2d1e2e5ac74f945") version("0.1.3", sha256="53b79fcc0e5decc0a4d70abf0897a4f66141b85eea6d65013f51eec02ad123b7") version("0.1.2", sha256="286cf5c4db06717fa66c681cec8c99207154dd07e72d72f2b5b4a3cb9ff698bf") diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index 949475650240dc..1dcd4d2613a5a2 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -17,6 +17,8 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pika-org/pika.git" maintainers("msimberg", "albestro", "teonnik", "aurianer") + license("BSL-1.0") + version("0.20.0", sha256="f338cceea66a0e3954806b2aca08f6560bba524ecea222f04bc18b483851c877") version("0.19.1", sha256="674675abf0dd4c6f5a0b2fa3db944b277ed65c62f654029d938a8cab608a9c1d") version("0.19.0", sha256="f45cc16e4e50cbb183ed743bdc8b775d49776ee33c13ea39a650f4230a5744cb") diff --git a/var/spack/repos/builtin/packages/podio/package.py b/var/spack/repos/builtin/packages/podio/package.py index 5f0b94ba3770eb..b7eaa980a3ed50 100644 --- a/var/spack/repos/builtin/packages/podio/package.py +++ b/var/spack/repos/builtin/packages/podio/package.py @@ -15,11 +15,13 @@ class Podio(CMakePackage): url = "https://github.com/AIDASoft/podio/archive/v00-09-02.tar.gz" git = "https://github.com/AIDASoft/podio.git" - maintainers("vvolkl", "drbenmorgan", "jmcarcell") + maintainers("vvolkl", "drbenmorgan", "jmcarcell", "tmadlener") tags = ["hep", "key4hep"] version("master", branch="master") + version("0.17.3", sha256="079517eba9c43d01255ef8acd88468c3ead7bb9d8fed11792e121bb481d54dee") + version("0.17.2", sha256="5b519335c4e1708f71ed85b3cac8ca81e544cc4572a5c37019ce9fc414c5e74d") version("0.17.1", sha256="97d6c5f81d50ee42bf7c01f041af2fd333c806f1bbf0a4828ca961a24cea6bb2") version("0.17", sha256="0c19f69970a891459cab227ab009514f1c1ce102b70e8c4b7d204eb6a0c643c1") version("0.16.7", sha256="8af7c947e2637f508b7af053412bacd9218d41a455d69addd7492f05b7a4338d") diff --git a/var/spack/repos/builtin/packages/py-abipy/package.py b/var/spack/repos/builtin/packages/py-abipy/package.py index 3e868f56075119..dfaed29c7d4f61 100644 --- a/var/spack/repos/builtin/packages/py-abipy/package.py +++ b/var/spack/repos/builtin/packages/py-abipy/package.py @@ -17,7 +17,7 @@ class PyAbipy(PythonPackage): version("0.2.0", sha256="c72b796ba0f9ea4299eac3085bede092d2652e9e5e8074d3badd19ef7b600792") variant("gui", default=False, description="Build the GUI") - variant("ipython", default=False, when="0.2.0", description="Build IPython support") + variant("ipython", default=False, when="@0.2.0", description="Build IPython support") depends_on("py-setuptools", type="build") # in newer pip versions --install-option does not exist diff --git a/var/spack/repos/builtin/packages/py-archspec/package.py b/var/spack/repos/builtin/packages/py-archspec/package.py index 047beda9afabb6..564f798ea953d5 100644 --- a/var/spack/repos/builtin/packages/py-archspec/package.py +++ b/var/spack/repos/builtin/packages/py-archspec/package.py @@ -15,8 +15,9 @@ class PyArchspec(PythonPackage): maintainers("alalazo") + version("0.2.2", sha256="d922c9fd80a5234d8cef883fbe0e146b381c449062c0405f91714ebad1edc035") version("0.2.1", sha256="0974a8a95831d2d43cce906c5b79a35d5fd2bf9be478b0e3b7d83ccc51ac815e") version("0.2.0", sha256="6aaba5ebdb5c3633c400d8c221a6a18716da0c64b367a8509f4217b22e91a5f5") depends_on("py-poetry-core@1.0.0:", type="build") - depends_on("py-click@8", type=("build", "run")) + depends_on("py-click@8", type=("build", "run"), when="@:0.2.0") diff --git a/var/spack/repos/builtin/packages/py-async-lru/package.py b/var/spack/repos/builtin/packages/py-async-lru/package.py index 021112b4f9f4b6..ec2033768f1b4e 100644 --- a/var/spack/repos/builtin/packages/py-async-lru/package.py +++ b/var/spack/repos/builtin/packages/py-async-lru/package.py @@ -12,8 +12,6 @@ class PyAsyncLru(PythonPackage): homepage = "https://github.com/wikibusiness/async_lru" pypi = "async-lru/async-lru-1.0.2.tar.gz" - maintainers("iarspider") - version("1.0.3", sha256="c2cb9b2915eb14e6cf3e717154b40f715bf90e596d73623677affd0d1fbcd32a") version("1.0.2", sha256="baa898027619f5cc31b7966f96f00e4fc0df43ba206a8940a5d1af5336a477cb") diff --git a/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py b/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py index a46d057753a4df..5b2b830c4645ea 100644 --- a/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py +++ b/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py @@ -12,8 +12,6 @@ class PyBackportsEntryPointsSelectable(PythonPackage): homepage = "https://github.com/jaraco/backports.entry_points_selectable" pypi = "backports.entry_points_selectable/backports.entry_points_selectable-1.1.0.tar.gz" - maintainers("iarspider") - version("1.1.1", sha256="914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386") version("1.1.0", sha256="988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a") diff --git a/var/spack/repos/builtin/packages/py-black/package.py b/var/spack/repos/builtin/packages/py-black/package.py index 825d37a446f291..a2cba61bc88951 100644 --- a/var/spack/repos/builtin/packages/py-black/package.py +++ b/var/spack/repos/builtin/packages/py-black/package.py @@ -17,6 +17,9 @@ class PyBlack(PythonPackage): maintainers("adamjstewart") + version("23.11.0", sha256="4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05") + version("23.10.1", sha256="1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258") + version("23.10.0", sha256="31b9f87b277a68d0e99d2905edae08807c007973eaa609da5f0c62def6b7c0bd") version("23.9.1", sha256="24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d") version("23.9.0", sha256="3511c8a7e22ce653f89ae90dfddaf94f3bb7e2587a245246572d3b9c92adf066") version("23.7.0", sha256="022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb") @@ -48,13 +51,14 @@ class PyBlack(PythonPackage): depends_on("py-platformdirs@2:") depends_on("py-tomli@1.1:", when="@22.8: ^python@:3.10") depends_on("py-tomli@1.1:", when="@21.7:22.6") - depends_on("py-typing-extensions@3.10:", when="^python@:3.9") - - depends_on("py-colorama@0.4.3:", when="+colorama") - depends_on("py-uvloop@0.15.2:", when="+uvloop") - depends_on("py-aiohttp@3.7.4:", when="+d") - depends_on("py-ipython@7.8:", when="+jupyter") - depends_on("py-tokenize-rt@3.2:", when="+jupyter") + depends_on("py-typing-extensions@4.0.1:", when="@23.9: ^python@:3.10") + depends_on("py-typing-extensions@3.10:", when="@:23.7 ^python@:3.9") + + depends_on("py-colorama@0.4.3:", when="+colorama") + depends_on("py-uvloop@0.15.2:", when="+uvloop") + depends_on("py-aiohttp@3.7.4:", when="+d") + depends_on("py-ipython@7.8:", when="+jupyter") + depends_on("py-tokenize-rt@3.2:", when="+jupyter") # Historical dependencies depends_on("py-setuptools@45:", when="@:22.8", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bokeh/package.py b/var/spack/repos/builtin/packages/py-bokeh/package.py index 9c6601dffe311e..dcc13130ad8a87 100644 --- a/var/spack/repos/builtin/packages/py-bokeh/package.py +++ b/var/spack/repos/builtin/packages/py-bokeh/package.py @@ -9,9 +9,10 @@ class PyBokeh(PythonPackage): """Statistical and novel interactive HTML plots for Python""" - homepage = "https://github.com/bokeh/bokeh" + homepage = "https://bokeh.org/" pypi = "bokeh/bokeh-0.12.2.tar.gz" + version("3.3.1", sha256="2a7b3702d7e9f03ef4cd801b02b7380196c70cff2773859bcb84fa565218955c") version("2.4.3", sha256="ef33801161af379665ab7a34684f2209861e3aefd5c803a21fbbb99d94874b03") version("2.4.1", sha256="d0410717d743a0ac251e62480e2ea860a7341bdcd1dbe01499a904f233c90512") version("2.4.0", sha256="6fa00ed8baab5cca33f4175792c309fa2536eaae7e90abee884501ba8c90fddb") @@ -20,11 +21,16 @@ class PyBokeh(PythonPackage): version("0.12.2", sha256="0a840f6267b6d342e1bd720deee30b693989538c49644142521d247c0f2e6939") depends_on("py-setuptools", type="build", when="@1.3.4:") + depends_on("py-setuptools@64:", type="build", when="@3:") + depends_on("py-setuptools-git-versioning", type="build", when="@3:") + depends_on("py-colorama", type="build", when="@3:") depends_on("python@2.6:", type=("build", "run"), when="@0.12.2") depends_on("python@2.7:", type=("build", "run"), when="@1.3.4:") depends_on("python@3.6:", type=("build", "run"), when="@2.3.3:") depends_on("python@3.7:", type=("build", "run"), when="@2.4.0:") + depends_on("python@3.8:", type=("build", "run"), when="@3.0.0:") + depends_on("python@3.9:", type=("build", "run"), when="@3.2.0:") depends_on("py-requests@1.2.3:", type=("build", "run"), when="@0.12.2") depends_on("py-six@1.5.2:", type=("build", "run"), when="@:1.3.4") @@ -33,11 +39,16 @@ class PyBokeh(PythonPackage): depends_on("py-jinja2@2.7:", type=("build", "run")) depends_on("py-jinja2@2.9:", type=("build", "run"), when="@2.3.3:") + depends_on("py-contourpy@1:", type=("build", "run"), when="@3:") + depends_on("py-numpy@1.7.1:", type=("build", "run")) depends_on("py-numpy@1.11.3:", type=("build", "run"), when="@2.3.3:") + depends_on("py-numpy@1.16:", type=("build", "run"), when="@3.1:") depends_on("py-packaging@16.8:", type=("build", "run"), when="@1.3.4:") + depends_on("py-pandas@1.2:", type=("build", "run"), when="@3:") + depends_on("pil@4.0:", type=("build", "run"), when="@1.3.4:") depends_on("pil@7.1.0:", type=("build", "run"), when="@2.3.3:") @@ -46,5 +57,7 @@ class PyBokeh(PythonPackage): depends_on("py-tornado@4.3:", type=("build", "run")) depends_on("py-tornado@5.1:", type=("build", "run"), when="@2.3.3:") - depends_on("py-typing-extensions@3.7.4:", type=("build", "run"), when="@2.3.3:") - depends_on("py-typing-extensions@3.10.0:", type=("build", "run"), when="@2.4.0:") + depends_on("py-typing-extensions@3.7.4:", type=("build", "run"), when="@2.3.3:3.0.0") + depends_on("py-typing-extensions@3.10.0:", type=("build", "run"), when="@2.4.0:3.0.0") + + depends_on("py-xyzservices@2021.09.1:", type=("build", "run"), when="@3:") diff --git a/var/spack/repos/builtin/packages/py-cleo/package.py b/var/spack/repos/builtin/packages/py-cleo/package.py index 1852bd7b3eb76e..b5d60f1adc25e3 100644 --- a/var/spack/repos/builtin/packages/py-cleo/package.py +++ b/var/spack/repos/builtin/packages/py-cleo/package.py @@ -12,17 +12,26 @@ class PyCleo(PythonPackage): homepage = "https://github.com/sdispater/cleo" pypi = "cleo/cleo-0.8.1.tar.gz" - version("1.0.0a5", sha256="097c9d0e0332fd53cc89fc11eb0a6ba0309e6a3933c08f7b38558555486925d3") + maintainers("LydDeb") + + version("2.1.0", sha256="0b2c880b5d13660a7ea651001fb4acb527696c01f15c9ee650f377aa543fd523") + version("2.0.1", sha256="eb4b2e1f3063c11085cebe489a6e9124163c226575a3c3be69b2e51af4a15ec5") + version("2.0.0", sha256="fbc5cb141cbc31ea8ffd3d5cd67d3b183fa38aa5098fd37e39e9a953a232fda9") + version("1.0.0", sha256="bb5e4f70db83a597575ec86a1ed8fc56bd80934cfea3db97a23ea50c03b78382") version( - "0.8.1", - sha256="3d0e22d30117851b45970b6c14aca4ab0b18b1b53c8af57bed13208147e4069f", - preferred=True, + "1.0.0a5", + sha256="097c9d0e0332fd53cc89fc11eb0a6ba0309e6a3933c08f7b38558555486925d3", + deprecated=True, ) + version("0.8.1", sha256="3d0e22d30117851b45970b6c14aca4ab0b18b1b53c8af57bed13208147e4069f") depends_on("python@2.7,3.4:3", type=("build", "run")) depends_on("python@3.7:3", when="@1:", type=("build", "run")) depends_on("py-poetry-core@1:", type="build") - depends_on("py-poetry-core@1", when="@1:", type="build") - depends_on("py-clikit@0.6.0:0.6", when="@0.8.1", type=("build", "run")) - depends_on("py-pylev@1.3:1", when="@1:", type=("build", "run")) - depends_on("py-crashtest@0.3.1:0.3", when="@1:", type=("build", "run")) + depends_on("py-poetry-core@1.1:1", when="@1:2.0.0", type="build") + depends_on("py-poetry-core@1.1.0:", when="@2.0.1:", type="build") + depends_on("py-clikit@0.6", when="@0.8.1", type=("build", "run")) + depends_on("py-pylev@1.3:1", when="@1.0.0a5", type=("build", "run")) + depends_on("py-crashtest@0.4.1:0.4", when="@1:", type=("build", "run")) + depends_on("py-rapidfuzz@2.2:2", when="@1:2.0", type=("build", "run")) + depends_on("py-rapidfuzz@3", when="@2.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cppy/package.py b/var/spack/repos/builtin/packages/py-cppy/package.py index b759799e1b65fc..c697695cce8859 100644 --- a/var/spack/repos/builtin/packages/py-cppy/package.py +++ b/var/spack/repos/builtin/packages/py-cppy/package.py @@ -12,8 +12,6 @@ class PyCppy(PythonPackage): homepage = "https://github.com/nucleic/cppy" pypi = "cppy/cppy-1.1.0.tar.gz" - maintainers("iarspider") - version("1.2.1", sha256="83b43bf17b1085ac15c5debdb42154f138b928234b21447358981f69d0d6fe1b") version("1.1.0", sha256="4eda6f1952054a270f32dc11df7c5e24b259a09fddf7bfaa5f33df9fb4a29642") diff --git a/var/spack/repos/builtin/packages/py-crashtest/package.py b/var/spack/repos/builtin/packages/py-crashtest/package.py index f16c993168ec74..d2dade000db470 100644 --- a/var/spack/repos/builtin/packages/py-crashtest/package.py +++ b/var/spack/repos/builtin/packages/py-crashtest/package.py @@ -13,9 +13,11 @@ class PyCrashtest(PythonPackage): homepage = "https://github.com/sdispater/crashtest" pypi = "crashtest/crashtest-0.3.1.tar.gz" + version("0.4.1", sha256="80d7b1f316ebfbd429f648076d6275c877ba30ba48979de4191714a75266f0ce") version("0.4.0", sha256="d629b00f1d4e79c316909f4eb763bbcb29b510d65fbde1365a1ceb93ab7fa4c8") version("0.3.1", sha256="42ca7b6ce88b6c7433e2ce47ea884e91ec93104a4b754998be498a8e6c3d37dd") depends_on("python@3.6:3", type=("build", "run")) depends_on("python@3.7:3", when="@0.4.0:", type=("build", "run")) depends_on("py-poetry-core@1:", type="build") + depends_on("py-poetry-core@1.1.0:", when="@0.4.1:", type="build") diff --git a/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py b/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py index ee18886ed1b15f..aa99a09d8c1267 100644 --- a/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py +++ b/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py @@ -15,10 +15,17 @@ class PyDlioProfilerPy(PythonPackage): version("develop", branch="dev") version("master", branch="master") + version("0.0.2", tag="v0.0.2", commit="b72144abf1499e03d1db87ef51e780633e9e9533") version("0.0.1", tag="v0.0.1", commit="28affe716211315dd6936ddc8e25ce6c43cdf491") - depends_on("cpp-logger@0.0.1") - depends_on("brahma@0.0.1") - depends_on("gotcha@develop") + + depends_on("cpp-logger@0.0.1", when="@:0.0.1") + depends_on("cpp-logger@0.0.2", when="@0.0.2:") + depends_on("brahma@0.0.1", when="@:0.0.1") + depends_on("brahma@0.0.2", when="@0.0.2:") + depends_on("gotcha@1.0.4", when="@:0.0.1") + depends_on("gotcha@1.0.5", when="@0.0.2:") + depends_on("gotcha@1.0.5", when="@0.0.2:") + depends_on("yaml-cpp@0.6.3", when="@0.0.2:") depends_on("py-setuptools@42:", type="build") depends_on("py-pybind11", type=("build", "run")) depends_on("py-ninja", type="build") diff --git a/var/spack/repos/builtin/packages/py-elephant/package.py b/var/spack/repos/builtin/packages/py-elephant/package.py index 41f94db3a5eeac..63c2fea875ed6a 100644 --- a/var/spack/repos/builtin/packages/py-elephant/package.py +++ b/var/spack/repos/builtin/packages/py-elephant/package.py @@ -16,6 +16,10 @@ class PyElephant(PythonPackage): # list of GitHub accounts to notify when the package is updated. maintainers("Moritz-Alexander-Kern") + version("master", branch="master") + version("0.14.0", sha256="02ce3b2a8d08dc19828f95384551339ea0946bc405c1db9aace54135417c2b0f") + version("0.13.0", sha256="2c6463cf9ace41631f2af196c5b80b468bf1c4b264d3a6b1ea0fb587d9e7dd67") + version("0.12.0", sha256="81f8d668f92d8688344bb7a9c5abaa8438d824560c935a411e6e36ddf7dc7c72") version("0.11.2", sha256="f8759fff0bbb136ae4ffc8d1eacadeea8ba56610d705c3bf207de87ada3ba240") version("0.11.1", sha256="d604a202583440fdf9d95d42cef50a410bd74fcaaa1a925b139435f27ab012ef") version("0.11.0", sha256="7b547964dbd196361edc922db2c5a7c0c886ef1effcca6c6dc7adb06f966a3be") @@ -28,40 +32,44 @@ class PyElephant(PythonPackage): version("0.3.0", sha256="747251ccfb5820bdead6391411b5faf205b4ddf3ababaefe865f50b16540cfef") variant("docs", default=False, description="Install documentation dependencies") - variant("pandas", default=False, description="Build with pandas", when="@0.3.0:0.4.1") variant( "extras", default=False, description="Build with extras for GPFA, ASSET", when="@0.6.4:" ) depends_on("py-setuptools", type="build") - depends_on("python@3.7:", type=("build", "run"), when="@0.11.0:") + + depends_on("python@3.8:", type=("build", "run"), when="@0.12.0:") + + depends_on("py-neo@0.10.0:", type=("build", "run"), when="@0.11.0:") + depends_on("py-neo@0.9.0", type=("build", "run"), when="@0.9.0:0.10.0") + depends_on("py-neo@0.8.0", type=("build", "run"), when="@0.6.4:0.8.0") depends_on("py-neo@0.3.4:", type=("build", "run"), when="@0.3.0:0.4.1") + depends_on("py-numpy@1.19.5:", type=("build", "run"), when="@0.12.0:") + depends_on("py-numpy@1.18.1:1.23.5", type=("build", "run"), when="@0.6.4:0.11.2") depends_on("py-numpy@1.8.2:", type=("build", "run"), when="@0.3.0:0.4.1") - depends_on("py-quantities@0.10.1:", type=("build", "run"), when="@0.3.0:0.4.1") + depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@0.14.0:") + depends_on("py-quantities@0.12.1:0.13.0,0.14.1:", type=("build", "run"), when="@0.6.4:0.13.0") + depends_on("py-quantities@0.10.1:0.13.0,0.14.1:", type=("build", "run"), when="@0.3.0:0.4.1") + depends_on("py-scipy@1.5.4:", type=("build", "run"), when="@0.6.4:") depends_on("py-scipy@0.14.0:", type=("build", "run"), when="@0.3.0:0.4.1") - depends_on("py-pandas@0.14.1:", type=("build", "run"), when="+pandas") - depends_on("py-numpydoc@0.5:", type=("build", "run"), when="@0.3.0:0.8.0+docs") + depends_on("py-six@1.10.0:", type=("build", "run"), when="@0.6.4:") + depends_on("py-tqdm", type=("build", "run"), when="@0.6.4:") + + depends_on("py-pandas@0.18.0:", type=("build", "run"), when="+extras") + depends_on("py-scikit-learn@0.23.2:", type=("build", "run"), when="+extras") + depends_on("py-statsmodels@0.12.1:", type=("build", "run"), when="+extras") + depends_on("py-jinja2@2.11.2:", type=("build", "run"), when="+extras") + depends_on("py-numpydoc@1.1.0:", type=("build", "run"), when="@0.9.0:+docs") - depends_on("py-sphinx@1.2.2:", type=("build", "run"), when="@0.3.0:0.6.0+docs") - depends_on("py-sphinx@2.4.3:", type=("build", "run"), when="@0.7.0:0.8.0+docs") - depends_on("py-sphinx@3.3.0:", type=("build", "run"), when="@0.9.0:+docs") + depends_on("py-numpydoc@0.5:", type=("build", "run"), when="@0.3.0:0.8.0+docs") depends_on("py-jupyter@1.0.0:", type=("build", "run"), when="@0.7.0:+docs") - depends_on("py-nbsphinx@0.5.0:", type=("build", "run"), when="@0.7.0:0.8.0+docs") + depends_on("py-sphinx@3.3.0:", type=("build", "run"), when="@0.9.0:+docs") + depends_on("py-sphinx@2.4.3:", type=("build", "run"), when="@0.7.0:0.8.0+docs") + depends_on("py-sphinx@1.2.2:", type=("build", "run"), when="@0.3.0:0.6.0+docs") depends_on("py-nbsphinx@0.8.0:", type=("build", "run"), when="@0.9.0:+docs") - depends_on("py-sphinxcontrib-bibtex@1.0.0", type=("build", "run"), when="@0.7.0:+docs") - depends_on("py-sphinx-tabs@1.1.13:", type=("build", "run"), when="@0.7.0:0.8.0+docs") + depends_on("py-nbsphinx@0.5.0:", type=("build", "run"), when="@0.7.0:0.8.0+docs") + depends_on("py-sphinxcontrib-bibtex@1.0.1:", type=("build", "run"), when="@0.7.0:+docs") depends_on("py-sphinx-tabs@1.3.0:", type=("build", "run"), when="@0.9.0:+docs") - depends_on("py-matplotlib@3.1.0:", type=("build", "run"), when="@0.8.0+docs") + depends_on("py-sphinx-tabs@1.1.13:", type=("build", "run"), when="@0.7.0:0.8.0+docs") depends_on("py-matplotlib@3.3.2:", type=("build", "run"), when="@0.9.0:+docs") - depends_on("py-pandas@0.18.0:", type=("build", "run"), when="+extras") - depends_on("py-scikit-learn@0.23.2:", type=("build", "run"), when="+extras") - depends_on("py-statsmodels@0.12.1:", type=("build", "run"), when="+extras") - depends_on("py-jinja2@2.11.2:", type=("build", "run"), when="+extras") - depends_on("py-neo@0.10.0:", type=("build", "run"), when="@0.11.0:") - depends_on("py-neo@0.9.0", type=("build", "run"), when="@0.9.0:0.10.0") - depends_on("py-neo@0.8.0", type=("build", "run"), when="@0.6.4:0.8.0") - depends_on("py-numpy@1.18.1:", type=("build", "run"), when="@0.6.4:") - depends_on("py-quantities@0.12.1:", type=("build", "run"), when="@0.6.4:") - depends_on("py-scipy@1.5.4:", type=("build", "run"), when="@0.6.4:") - depends_on("py-six@1.10.0:", type=("build", "run"), when="@0.6.4:") - depends_on("py-tqdm", type=("build", "run"), when="@0.6.4:") + depends_on("py-matplotlib@3.1.0:", type=("build", "run"), when="@0.8.0+docs") diff --git a/var/spack/repos/builtin/packages/py-geomdl/package.py b/var/spack/repos/builtin/packages/py-geomdl/package.py new file mode 100644 index 00000000000000..6a2e9f27603796 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-geomdl/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyGeomdl(PythonPackage): + """Object-oriented pure Python B-Spline and NURBS library.""" + + homepage = "https://pypi.org/project/geomdl" + pypi = "geomdl/geomdl-5.3.1.tar.gz" + + version("5.3.1", sha256="e81a31b4d5f111267b16045ba1d9539235a98b2cff5e4bad18f7ddcd4cb804c8") + + depends_on("py-setuptools@40.6.3:", type="build") + + # For compiling geomdl.core module + depends_on("py-cython@:2", type="build") + + variant("viz", default=False, description="Add viz dependencies") + + depends_on("py-numpy@1.15.4:", type="run", when="+viz") + depends_on("py-matplotlib@2.2.3:", type="run", when="+viz") + depends_on("py-plotly", type="run", when="+viz") diff --git a/var/spack/repos/builtin/packages/py-gitpython/package.py b/var/spack/repos/builtin/packages/py-gitpython/package.py index 69a0f5ec393acc..5cc534437a4937 100644 --- a/var/spack/repos/builtin/packages/py-gitpython/package.py +++ b/var/spack/repos/builtin/packages/py-gitpython/package.py @@ -12,6 +12,8 @@ class PyGitpython(PythonPackage): homepage = "https://gitpython.readthedocs.org" pypi = "GitPython/GitPython-3.1.12.tar.gz" + version("3.1.40", sha256="22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4") + version("3.1.34", sha256="85f7d365d1f6bf677ae51039c1ef67ca59091c7ebd5a3509aa399d4eda02d6dd") version("3.1.27", sha256="1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704") version("3.1.24", sha256="df83fdf5e684fef7c6ee2c02fc68a5ceb7e7e759d08b694088d0cacb4eba59e5") version("3.1.23", sha256="aaae7a3bfdf0a6db30dc1f3aeae47b71cd326d86b936fe2e158aa925fdf1471c") diff --git a/var/spack/repos/builtin/packages/py-grpcio-tools/package.py b/var/spack/repos/builtin/packages/py-grpcio-tools/package.py index e2cc8d79db6289..e1fd541b47b356 100644 --- a/var/spack/repos/builtin/packages/py-grpcio-tools/package.py +++ b/var/spack/repos/builtin/packages/py-grpcio-tools/package.py @@ -41,8 +41,10 @@ def setup_build_environment(self, env): for dep in self.spec.dependencies(deptype="link"): query = self.spec[dep.name] - env.prepend_path("LIBRARY_PATH", query.libs.directories[0]) - env.prepend_path("CPATH", query.headers.directories[0]) + for p in query.libs.directories: + env.prepend_path("LIBRARY_PATH", p) + for p in query.headers.directories: + env.prepend_path("CPATH", p) def patch(self): if self.spec.satisfies("%fj"): diff --git a/var/spack/repos/builtin/packages/py-grpcio/package.py b/var/spack/repos/builtin/packages/py-grpcio/package.py index 8a185d480265f0..8f319edd7258c5 100644 --- a/var/spack/repos/builtin/packages/py-grpcio/package.py +++ b/var/spack/repos/builtin/packages/py-grpcio/package.py @@ -55,8 +55,10 @@ def setup_build_environment(self, env): for dep in self.spec.dependencies(deptype="link"): query = self.spec[dep.name] - env.prepend_path("LIBRARY_PATH", query.libs.directories[0]) - env.prepend_path("CPATH", query.headers.directories[0]) + for p in query.libs.directories: + env.prepend_path("LIBRARY_PATH", p) + for p in query.headers.directories: + env.prepend_path("CPATH", p) def patch(self): filter_file("-std=gnu99", "", "setup.py") diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py index 262ce6445ac74a..60de24072a8291 100644 --- a/var/spack/repos/builtin/packages/py-h5py/package.py +++ b/var/spack/repos/builtin/packages/py-h5py/package.py @@ -38,39 +38,39 @@ class PyH5py(PythonPackage): variant("mpi", default=True, description="Build with MPI support") # Python versions - depends_on("python@:3.9", type=("build", "run"), when="@:2.8") - depends_on("python@3.6:", type=("build", "run"), when="@3:3.1") depends_on("python@3.7:", type=("build", "run"), when="@3.2:") + depends_on("python@3.6:", type=("build", "run"), when="@3:3.1") + depends_on("python@:3.9", type=("build", "run"), when="@:2.8") # Build dependencies - depends_on("py-cython@0.23:0", type="build", when="@:2") - depends_on("py-cython@0.29:0", type=("build"), when="@3:") - depends_on("py-cython@0.29.14:0", type=("build"), when="@3:3.7 ^python@3.8.0:3.8") depends_on("py-cython@0.29.15:0", type=("build"), when="@3:3.7 ^python@3.9.0:") + depends_on("py-cython@0.29.14:0", type=("build"), when="@3:3.7 ^python@3.8.0:3.8") + depends_on("py-cython@0.29:0", type=("build"), when="@3:") + depends_on("py-cython@0.23:0", type="build", when="@:2") depends_on("py-pkgconfig", type="build") - depends_on("py-setuptools", type="build") depends_on("py-setuptools@61:", type="build", when="@3.8.0:") + depends_on("py-setuptools", type="build") depends_on("py-wheel", type="build", when="@3:") # Build and runtime dependencies depends_on("py-cached-property@1.5:", type=("build", "run"), when="@:3.6 ^python@:3.7") - depends_on("py-numpy@1.7:", type=("build", "run"), when="@:2") - depends_on("py-numpy@1.14.5:", type=("build", "run"), when="@3:") - depends_on("py-numpy@1.17.5:", type=("build", "run"), when="@3:3.5 ^python@3.8.0:3.8") depends_on("py-numpy@1.19.3:", type=("build", "run"), when="@3:3.5 ^python@3.9.0:") + depends_on("py-numpy@1.17.5:", type=("build", "run"), when="@3:3.5 ^python@3.8.0:3.8") + depends_on("py-numpy@1.14.5:", type=("build", "run"), when="@3:") + depends_on("py-numpy@1.7:", type=("build", "run"), when="@:2") depends_on("py-six", type=("build", "run"), when="@:2") # Link dependencies (py-h5py v2 cannot build against HDF5 1.12 regardless # of API setting) - depends_on("hdf5@1.8.4:1.11 +hl", when="@:2") - depends_on("hdf5@1.8.4:1.12 +hl", when="@3:3.7") depends_on("hdf5@1.8.4:1.14 +hl", when="@3.8:") + depends_on("hdf5@1.8.4:1.12 +hl", when="@3:3.7") + depends_on("hdf5@1.8.4:1.11 +hl", when="@:2") # MPI dependencies depends_on("hdf5+mpi", when="+mpi") depends_on("mpi", when="+mpi") - depends_on("py-mpi4py", when="@:2 +mpi", type=("build", "run")) depends_on("py-mpi4py@3.0.2:", when="@3: +mpi", type=("build", "run")) + depends_on("py-mpi4py", when="@:2 +mpi", type=("build", "run")) def flag_handler(self, name, flags): if name == "cflags": diff --git a/var/spack/repos/builtin/packages/py-imagecodecs/package.py b/var/spack/repos/builtin/packages/py-imagecodecs/package.py index 03195578b6087f..e156b28e77b055 100644 --- a/var/spack/repos/builtin/packages/py-imagecodecs/package.py +++ b/var/spack/repos/builtin/packages/py-imagecodecs/package.py @@ -66,11 +66,12 @@ def patch(self): ) # 239 filter_file( - "append\('/usr/include/jxrlib'\)", # noqa: W605 + "append('/usr/include/jxrlib')", "extend(('{0}/libjxr/image', '{0}/libjxr/common', '{0}/libjxr/glue'))".format( # noqa: E501 spec["jxrlib-debian"].prefix.include ), "setup.py", + string=True, ) # 367 diff --git a/var/spack/repos/builtin/packages/py-jarowinkler/package.py b/var/spack/repos/builtin/packages/py-jarowinkler/package.py new file mode 100644 index 00000000000000..9256776613a20c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-jarowinkler/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyJarowinkler(PythonPackage): + """library for fast approximate string matching using Jaro and Jaro-Winkler similarity.""" + + homepage = "https://github.com/maxbachmann/JaroWinkler" + pypi = "jarowinkler/jarowinkler-1.2.3.tar.gz" + + maintainers("LydDeb") + + version("1.2.3", sha256="af28ea284cfbd1b21b29ff94b759f20e94e4f7c06f424b0b4702e701c2a21668") + + depends_on("py-setuptools@42:", type="build") + depends_on("py-scikit-build@0.15.0", type="build") + depends_on("py-rapidfuzz-capi@1.0.5", type="build") diff --git a/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py b/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py index 0d2274f997fb33..261e43dbd65543 100644 --- a/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py +++ b/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py @@ -15,9 +15,11 @@ class PyJsonpathNg(PythonPackage): homepage = "https://github.com/h2non/jsonpath-ng" pypi = "jsonpath-ng/jsonpath-ng-1.5.2.tar.gz" + version("1.6.0", sha256="5483f8e9d74c39c9abfab554c070ae783c1c8cbadf5df60d561bc705ac68a07e") + version("1.5.3", sha256="a273b182a82c1256daab86a313b937059261b5c5f8c4fa3fc38b882b344dd567") version("1.5.2", sha256="144d91379be14d9019f51973bd647719c877bfc07dc6f3f5068895765950c69d") depends_on("py-setuptools", type="build") depends_on("py-ply", type=("build", "run")) - depends_on("py-decorator", type=("build", "run")) - depends_on("py-six", type=("build", "run")) + depends_on("py-decorator", type=("build", "run"), when="@:1.5") + depends_on("py-six", type=("build", "run"), when="@:1.5") diff --git a/var/spack/repos/builtin/packages/py-kombu/package.py b/var/spack/repos/builtin/packages/py-kombu/package.py index 6f13c380ffb840..257b0acd7f0953 100644 --- a/var/spack/repos/builtin/packages/py-kombu/package.py +++ b/var/spack/repos/builtin/packages/py-kombu/package.py @@ -32,7 +32,7 @@ class PyKombu(PythonPackage): depends_on("py-amqp@5.0.0:5", when="@5.0.0:5.0.2", type=("build", "run")) depends_on("py-amqp@5.0.9:5.0", when="@5.2.3", type=("build", "run")) depends_on("py-vine", when="@5.1.0:", type=("build", "run")) - depends_on("py-importlib-metadata@0.18:", type=("build", "run"), when="python@:3.7") - depends_on("py-cached-property", type=("build", "run"), when="python@:3.7") + depends_on("py-importlib-metadata@0.18:", type=("build", "run"), when="^python@:3.7") + depends_on("py-cached-property", type=("build", "run"), when="^python@:3.7") depends_on("py-redis@3.4.1:3,4.0.2:", when="+redis", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-libensemble/package.py b/var/spack/repos/builtin/packages/py-libensemble/package.py index d47a3d68fb932e..57dd42fe76b7c0 100644 --- a/var/spack/repos/builtin/packages/py-libensemble/package.py +++ b/var/spack/repos/builtin/packages/py-libensemble/package.py @@ -12,13 +12,14 @@ class PyLibensemble(PythonPackage): """Library for managing ensemble-like collections of computations.""" homepage = "https://libensemble.readthedocs.io" - pypi = "libensemble/libensemble-1.0.0.tar.gz" + pypi = "libensemble/libensemble-1.1.0.tar.gz" git = "https://github.com/Libensemble/libensemble.git" maintainers("shuds13", "jlnav") tags = ["e4s"] version("develop", branch="develop") + version("1.1.0", sha256="3e3ddc4233272d3651e9d62c7bf420018930a4b9b135ef9ede01d5356235c1c6") version("1.0.0", sha256="b164e044f16f15b68fd565684ad8ce876c93aaeb84e5078f4ea2a29684b110ca") version("0.10.2", sha256="ef8dfe5d233dcae2636a3d6aa38f3c2ad0f42c65bd38f664e99b3e63b9f86622") version("0.10.1", sha256="56ae42ec9a28d3df8f46bdf7d016db9526200e9df2a28d849902e3c44fe5c1ba") diff --git a/var/spack/repos/builtin/packages/py-lightning/package.py b/var/spack/repos/builtin/packages/py-lightning/package.py index 8bec9806ee3478..032aa9c817aca7 100644 --- a/var/spack/repos/builtin/packages/py-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-lightning/package.py @@ -15,6 +15,8 @@ class PyLightning(PythonPackage): maintainers("adamjstewart") + version("2.1.2", sha256="3b2599a8a719916cb03526e6570356809729680c6cda09391232e2aba0a4ed4b") + version("2.1.1", sha256="865491940d20a9754eac7494aa18cab893e0c2b31e83743349eeeaf31dfb52db") version("2.1.0", sha256="1f78f5995ae7dcffa1edf34320db136902b73a0d1b304404c48ec8be165b3a93") version("2.0.9", sha256="2395ece6e29e12064718ff16b8edec5685df7f7095d4fee78edb0a654f5cd7eb") version("2.0.8", sha256="db914e211b5c3b079a821be6e4344e72d0a729163676a65c4e00aae98390ae7b") diff --git a/var/spack/repos/builtin/packages/py-macs3/package.py b/var/spack/repos/builtin/packages/py-macs3/package.py index be94b9c290cd7c..1a218e44a6ea98 100644 --- a/var/spack/repos/builtin/packages/py-macs3/package.py +++ b/var/spack/repos/builtin/packages/py-macs3/package.py @@ -24,3 +24,5 @@ class PyMacs3(PythonPackage): depends_on("py-numpy@1.19:", type=("build", "run")) depends_on("py-cykhash@2", type=("build", "run")) depends_on("py-hmmlearn@0.3:", type=("build", "run")) + + depends_on("zlib-api") diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py index 18d8d98f0ac880..3ca76ae4d3fe89 100644 --- a/var/spack/repos/builtin/packages/py-matplotlib/package.py +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -24,8 +24,10 @@ class PyMatplotlib(PythonPackage): "mpl_toolkits.mplot3d.tests", ] + version("3.8.2", sha256="01a978b871b881ee76017152f1f1a0cbf6bd5f7b8ff8c96df0df1bd57d8755a1") version("3.8.1", sha256="044df81c1f6f3a8e52d70c4cfcb44e77ea9632a10929932870dfaa90de94365d") version("3.8.0", sha256="df8505e1c19d5c2c26aff3497a7cbd3ccfc2e97043d1e4db3e76afa399164b69") + version("3.7.4", sha256="7cd4fef8187d1dd0d9dcfdbaa06ac326d396fb8c71c647129f0bf56835d77026") version("3.7.3", sha256="f09b3dd6bdeb588de91f853bbb2d6f0ff8ab693485b0c49035eaa510cb4f142e") version("3.7.2", sha256="a8cdb91dddb04436bd2f098b8fdf4b81352e68cf4d2c6756fcc414791076569b") version("3.7.1", sha256="7b73305f25eab4541bd7ee0b96d87e53ae9c9f1823be5659b806cd85786fe882") diff --git a/var/spack/repos/builtin/packages/py-mypy/package.py b/var/spack/repos/builtin/packages/py-mypy/package.py index 68896c792adb98..9f7e22c2bcc634 100644 --- a/var/spack/repos/builtin/packages/py-mypy/package.py +++ b/var/spack/repos/builtin/packages/py-mypy/package.py @@ -15,6 +15,13 @@ class PyMypy(PythonPackage): maintainers("adamjstewart") + version("1.7.0", sha256="1e280b5697202efa698372d2f39e9a6713a0395a756b1c6bd48995f8d72690dc") + version("1.6.1", sha256="4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1") + version("1.6.0", sha256="4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f") + version("1.5.1", sha256="b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92") + version("1.5.0", sha256="f3460f34b3839b9bc84ee3ed65076eb827cd99ed13ed08d723f9083cada4a212") + version("1.4.1", sha256="9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b") + version("1.4.0", sha256="de1e7e68148a213036276d1f5303b3836ad9a774188961eb2684eddff593b042") version("1.3.0", sha256="e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11") version("1.2.0", sha256="f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1") version("1.1.1", sha256="ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f") @@ -43,31 +50,34 @@ class PyMypy(PythonPackage): version("0.670", sha256="e80fd6af34614a0e898a57f14296d0dacb584648f0339c2e000ddbf0f4cc2f8d") # pyproject.toml - depends_on("py-setuptools@40.6.2:", when="@0.790:", type=("build", "run")) - depends_on("py-setuptools", type=("build", "run")) + depends_on("py-setuptools@40.6.2:", when="@0.790:", type="build") + depends_on("py-setuptools", type="build") depends_on("py-wheel@0.30:", when="@0.790:", type="build") + depends_on("py-types-psutil", when="@0.981:", type="build") + depends_on("py-types-setuptools", when="@0.981:", type="build") + + # setup.py + depends_on("python@3.8:", when="@1.5:", type=("build", "run")) + depends_on("python@3.7:", when="@0.981:", type=("build", "run")) + depends_on("py-typing-extensions@4.1:", when="@1.5:", type=("build", "run")) depends_on("py-typing-extensions@3.10:", when="@0.930:", type=("build", "run")) depends_on("py-typing-extensions@3.7.4:", when="@0.700:", type=("build", "run")) depends_on("py-mypy-extensions@1:", when="@1.1:", type=("build", "run")) depends_on("py-mypy-extensions@0.4.3:", when="@0.930:1.0", type=("build", "run")) depends_on("py-mypy-extensions@0.4.3:0.4", when="@0.700:0.929", type=("build", "run")) depends_on("py-mypy-extensions@0.4.0:0.4", when="@:0.699", type=("build", "run")) - depends_on("py-typed-ast@1.4.0:1", when="@0.920: ^python@:3.7", type=("build", "run")) - depends_on("py-typed-ast@1.4.0:1.4", when="@0.900:0.910 ^python@:3.7", type=("build", "run")) - depends_on("py-typed-ast@1.4.0:1.4", when="@0.700:0.899", type=("build", "run")) - depends_on("py-typed-ast@1.3.1:1.3", when="@:0.699", type=("build", "run")) depends_on("py-tomli@1.1:", when="@0.950: ^python@:3.10", type=("build", "run")) depends_on("py-tomli@1.1:", when="@0.930:0.949", type=("build", "run")) depends_on("py-tomli@1.1:2", when="@0.920:0.929", type=("build", "run")) - depends_on("py-types-psutil", when="@0.981:", type="build") - depends_on("py-types-setuptools", when="@0.981:", type="build") - depends_on("py-types-typed-ast@1.5.8:1.5", when="@0.981:", type="build") - - # setup.py - depends_on("python@3.7:", when="@0.981:", type=("build", "run")) # Historical dependencies + depends_on("py-types-typed-ast@1.5.8.5:1.5", when="@1.2:1.4", type="build") + depends_on("py-types-typed-ast@1.5.8:1.5", when="@0.981:1.1", type="build") depends_on("py-toml", when="@0.900:0.910", type=("build", "run")) + depends_on("py-typed-ast@1.4.0:1", when="@0.920:1.4 ^python@:3.7", type=("build", "run")) + depends_on("py-typed-ast@1.4.0:1.4", when="@0.900:0.910 ^python@:3.7", type=("build", "run")) + depends_on("py-typed-ast@1.4.0:1.4", when="@0.700:0.899", type=("build", "run")) + depends_on("py-typed-ast@1.3.1:1.3", when="@:0.699", type=("build", "run")) # https://github.com/python/mypy/issues/13627 conflicts("^python@3.10.7:", when="@:0.971") diff --git a/var/spack/repos/builtin/packages/py-nanobind/package.py b/var/spack/repos/builtin/packages/py-nanobind/package.py index 5c39cf271cfc14..95a38f5b763c0a 100644 --- a/var/spack/repos/builtin/packages/py-nanobind/package.py +++ b/var/spack/repos/builtin/packages/py-nanobind/package.py @@ -23,6 +23,9 @@ class PyNanobind(PythonPackage): maintainers("chrisrichardson", "garth-wells", "ma595") version("master", branch="master", submodules=True) + version( + "1.8.0", tag="v1.8.0", commit="1a309ba444a47e081dc6213d72345a2fbbd20795", submodules=True + ) version( "1.7.0", tag="v1.7.0", commit="555ec7595c89c60ce7cf53e803bc226dc4899abb", submodules=True ) diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index 8ee118d98e917e..05f5ceec494098 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -20,6 +20,7 @@ class PyNumpy(PythonPackage): maintainers("adamjstewart", "rgommers") version("main", branch="main") + version("1.26.2", sha256="f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea") version("1.26.1", sha256="c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe") version("1.26.0", sha256="f93fc78fe8bf15afe2b8d6b6499f1c73953169fad1e9a8dd086cdff3190e7fdf") version("1.25.2", sha256="fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760") diff --git a/var/spack/repos/builtin/packages/py-nvidia-dali/package.py b/var/spack/repos/builtin/packages/py-nvidia-dali/package.py index 2b1af9e19a3691..93804505fb5e5c 100644 --- a/var/spack/repos/builtin/packages/py-nvidia-dali/package.py +++ b/var/spack/repos/builtin/packages/py-nvidia-dali/package.py @@ -170,20 +170,20 @@ class PyNvidiaDali(PythonPackage): ) cuda120_versions = ( - "1.27.0-cuda120", - "1.26.0-cuda120", - "1.25.0-cuda120", - "1.24.0-cuda120", - "1.23.0-cuda120", - "1.22.0-cuda120", + "@1.27.0-cuda120", + "@1.26.0-cuda120", + "@1.25.0-cuda120", + "@1.24.0-cuda120", + "@1.23.0-cuda120", + "@1.22.0-cuda120", ) cuda110_versions = ( - "1.27.0-cuda110", - "1.26.0-cuda110", - "1.25.0-cuda110", - "1.24.0-cuda110", - "1.23.0-cuda110", - "1.22.0-cuda110", + "@1.27.0-cuda110", + "@1.26.0-cuda110", + "@1.25.0-cuda110", + "@1.24.0-cuda110", + "@1.23.0-cuda110", + "@1.22.0-cuda110", ) for v in cuda120_versions: diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index 3dea26ff2c4111..9d91ef08ace82c 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -17,6 +17,7 @@ class PyPandas(PythonPackage): maintainers("adamjstewart") + version("2.1.3", sha256="22929f84bca106921917eb73c1521317ddd0a4c71b395bcf767a106e3494209f") version("2.1.2", sha256="52897edc2774d2779fbeb6880d2cfb305daa0b1a29c16b91f531a18918a6e0f3") version("2.1.1", sha256="fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b") version("2.1.0", sha256="62c24c7fc59e42b775ce0679cfa7b14a5f9bfb7643cfbe708c960699e05fb918") diff --git a/var/spack/repos/builtin/packages/py-pdbfixer/package.py b/var/spack/repos/builtin/packages/py-pdbfixer/package.py index 2da9f24d1ac086..2dbd4aa3eec7bf 100644 --- a/var/spack/repos/builtin/packages/py-pdbfixer/package.py +++ b/var/spack/repos/builtin/packages/py-pdbfixer/package.py @@ -18,6 +18,6 @@ class PyPdbfixer(PythonPackage): version("1.7", sha256="a0bef3c52a7bbe69a6aea5333f51f3e7d158339be5829aed19b0344bd66d4eea") depends_on("py-setuptools", type="build") - depends_on("openmm@7.1:7.5", type=("build", "run"), when="1.7") + depends_on("openmm@7.1:7.5", type=("build", "run"), when="@1.7") depends_on("openmm@7.6:", type=("build", "run"), when="@1.8:") depends_on("py-numpy", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pygithub/package.py b/var/spack/repos/builtin/packages/py-pygithub/package.py index 24885aa333d76b..867403bc855724 100644 --- a/var/spack/repos/builtin/packages/py-pygithub/package.py +++ b/var/spack/repos/builtin/packages/py-pygithub/package.py @@ -7,16 +7,25 @@ class PyPygithub(PythonPackage): - """Use the full Github API v3""" + """Typed interactions with the GitHub API v3""" homepage = "https://pygithub.readthedocs.io/" pypi = "PyGithub/PyGithub-1.54.1.tar.gz" + version("2.1.1", sha256="ecf12c2809c44147bce63b047b3d2e9dac8a41b63e90fcb263c703f64936b97c") + version("1.59.1", sha256="c44e3a121c15bf9d3a5cc98d94c9a047a5132a9b01d22264627f58ade9ddc217") version("1.55", sha256="1bbfff9372047ff3f21d5cd8e07720f3dbfdaf6462fcaed9d815f528f1ba7283") depends_on("python@3.6:", type=("build", "run")) + depends_on("python@3.7:", type=("build", "run"), when="@1.57:") + depends_on("py-setuptools", type="build") - depends_on("py-deprecated", type=("build", "run")) - depends_on("py-pyjwt@2:", type=("build", "run")) + depends_on("py-setuptools-scm", type="build", when="@1.58.1:") depends_on("py-pynacl@1.4.0:", type=("build", "run")) + depends_on("py-python-dateutil", type=("build", "run"), when="@2.1.0:") depends_on("py-requests@2.14.0:", type=("build", "run")) + depends_on("py-pyjwt@2.4.0:", type=("build", "run")) + depends_on("py-pyjwt@2.4.0: +crypto", type=("build", "run"), when="@1.58.1:") + depends_on("py-typing-extensions@4:", type=("build", "run"), when="@2.1.0:") + depends_on("py-urllib3@1.26.0:", type=("build", "run"), when="@2.1.0:") + depends_on("py-deprecated", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pynucleus/package.py b/var/spack/repos/builtin/packages/py-pynucleus/package.py index c4f2f82b7a24ef..4194bb2c609e77 100644 --- a/var/spack/repos/builtin/packages/py-pynucleus/package.py +++ b/var/spack/repos/builtin/packages/py-pynucleus/package.py @@ -19,6 +19,9 @@ class PyPynucleus(PythonPackage): for ref in refs: version(ref, branch=ref) + variant("examples", default=True, description="Install examples") + variant("tests", default=True, description="Install tests") + depends_on("python@3.10:", type=("build", "run")) depends_on("py-mpi4py@2.0.0:", type=("build", "link", "run")) depends_on("py-cython@0.29.32:", type=("build", "run")) @@ -30,14 +33,14 @@ class PyPynucleus(PythonPackage): depends_on("py-h5py", type=("build", "run")) depends_on("py-tabulate", type=("build", "run")) depends_on("py-pyyaml", type=("build", "run")) - depends_on("py-matplotlib+latex", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) depends_on("py-scikit-sparse", type=("build", "run")) depends_on("py-modepy", type=("build", "run")) depends_on("py-meshpy", type=("build", "run")) depends_on("py-pytools", type=("build", "run")) depends_on("py-psutil", type="run") - - variant("examples", default=True, description="Install examples") + depends_on("py-pytest", when="+tests", type="run") + depends_on("py-pytest-html", when="+tests", type="run") import_modules = [ "PyNucleus", @@ -64,5 +67,9 @@ def install_python(self): def install_additional_files(self): spec = self.spec prefix = self.prefix - if "+examples" in spec: + if "+examples" in spec or "+tests" in spec: install_tree("drivers", prefix.drivers) + if "+examples" in spec: + install_tree("examples", prefix.examples) + if "+tests" in spec: + install_tree("tests", prefix.tests) diff --git a/var/spack/repos/builtin/packages/py-quantities/package.py b/var/spack/repos/builtin/packages/py-quantities/package.py index 024901ab674b08..0d9b38f69e418e 100644 --- a/var/spack/repos/builtin/packages/py-quantities/package.py +++ b/var/spack/repos/builtin/packages/py-quantities/package.py @@ -13,18 +13,19 @@ class PyQuantities(PythonPackage): pypi = "quantities/quantities-0.12.1.tar.gz" maintainers("apdavison") + version("0.14.1", sha256="efeafffc0c0364f891a9327239cd12496bccb55cd037a6d1bf44de706f722877") version("0.13.0", sha256="0fde20115410de21cefa786f3aeae69c1b51bb19ee492190324c1da705e61a81") version("0.12.5", sha256="67546963cb2a519b1a4aa43d132ef754360268e5d551b43dd1716903d99812f0") version("0.12.4", sha256="a33d636d1870c9e1127631185d89b0105a49f827d6aacd44ad9d8f151f331d8b") version("0.12.3", sha256="582f3c7aeba897846761e966615e01202a5e5d06add304492931b05085d19883") - depends_on("python@2.7.0:2.7,3.4:3.7", type=("build", "run"), when="@0.12.3") - depends_on("python@2.7.0:2.7,3.4:3.8", type=("build", "run"), when="@0.12.4:0.12.5") - depends_on("python@3.7:3.10", type=("build", "run"), when="@0.13:") + depends_on("python@3.8:", type=("build", "run"), when="@0.14:") - # pip silently replaces distutils with setuptools + depends_on("py-setuptools@61:", type="build", when="@0.14:") depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm+toml", type="build", when="@0.14:") - depends_on("py-numpy@1.8.2:1.16", type=("build", "run"), when="@0.12.3") + depends_on("py-numpy@1.19:", type=("build", "run"), when="@0.14:") + depends_on("py-numpy@1.16:", type=("build", "run"), when="@0.13") depends_on("py-numpy@1.8.2:1.17", type=("build", "run"), when="@0.12.4:0.12") - depends_on("py-numpy@1.16:", type=("build", "run"), when="@0.13.0:") + depends_on("py-numpy@1.8.2:1.16", type=("build", "run"), when="@0.12.3") diff --git a/var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py b/var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py new file mode 100644 index 00000000000000..af0ebea5b21c49 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyRapidfuzzCapi(PythonPackage): + """ + C-API of RapidFuzz, which can be used to extend RapidFuzz from separate packages. + """ + + homepage = "https://github.com/maxbachmann/rapidfuzz_capi" + pypi = "rapidfuzz_capi/rapidfuzz_capi-1.0.5.tar.gz" + + maintainers("LydDeb") + + version("1.0.5", sha256="b3af179874b28364ba1b7850e37d0d353de9cf5b844e3569c023b74da3a9c68e") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py index 5bfd1563b5bb78..a1213a1b379306 100644 --- a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py +++ b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py @@ -13,12 +13,16 @@ class PyRapidfuzz(PythonPackage): pypi = "rapidfuzz/rapidfuzz-1.8.2.tar.gz" version("3.3.1", sha256="6783b3852f15ed7567688e2e358757a7b4f38683a915ba5edc6c64f1a3f0b450") + version("2.2.0", sha256="acb8839aac452ec61a419fdc8799e8a6e6cd21bed53d04678cdda6fba1247e2f") version("1.8.2", sha256="d6efbb2b6b18b3a67d7bdfbcd9bb72732f55736852bbef823bdf210f9e0c6c90") depends_on("python", type=("build", "link", "run")) - depends_on("py-setuptools@42:", when="@3:", type="build") + depends_on("py-setuptools@42:", when="@2:", type="build") depends_on("py-setuptools", type="build") depends_on("py-scikit-build@0.17", when="@3:", type="build") + depends_on("py-scikit-build@0.13:", when="@2.2:", type="build") + depends_on("py-rapidfuzz-capi@1.0.5", when="@2", type="build") + depends_on("py-jarowinkler@1.2.0:1", when="@2", type=("build", "run")) # CMakeLists.txt depends_on("cmake@3.12:", type="build") diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index 51f89f7d4cdf92..5d053cbb91d1c6 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -17,6 +17,7 @@ class PyScipy(PythonPackage): version("main", branch="main") version("master", branch="master", deprecated=True) + version("1.11.4", sha256="90a2b78e7f5733b9de748f589f09225013685f9b218275257f8a8168ededaeaa") version("1.11.3", sha256="bba4d955f54edd61899776bad459bf7326e14b9fa1c552181f0479cc60a568cd") version("1.11.2", sha256="b29318a5e39bd200ca4381d80b065cdf3076c7d7281c5e36569e99273867f61d") version("1.11.1", sha256="fb5b492fa035334fd249f0973cc79ecad8b09c604b42a127a677b45a9a3d4289") diff --git a/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py b/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py index 1ad767902d9175..8189fa0c49cff8 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py @@ -29,5 +29,5 @@ class PyTensorflowDatasets(PythonPackage): depends_on("py-tensorflow-metadata", type=("build", "run")) depends_on("py-termcolor", type=("build", "run")) depends_on("py-tqdm", type=("build", "run")) - depends_on("py-typing-extensions", type=("build", "run"), when="python@:3.7") - depends_on("py-importlib-resources", type=("build", "run"), when="python@:3.8") + depends_on("py-typing-extensions", type=("build", "run"), when="^python@:3.7") + depends_on("py-importlib-resources", type=("build", "run"), when="^python@:3.8") diff --git a/var/spack/repos/builtin/packages/py-torch-cluster/package.py b/var/spack/repos/builtin/packages/py-torch-cluster/package.py index 13f59512b7faa7..69bb2161e0dfe3 100644 --- a/var/spack/repos/builtin/packages/py-torch-cluster/package.py +++ b/var/spack/repos/builtin/packages/py-torch-cluster/package.py @@ -7,24 +7,29 @@ class PyTorchCluster(PythonPackage): - """This package consists of a small extension library of - highly optimized graph cluster algorithms for the use in - PyTorch.""" + """This package consists of a small extension library of highly optimized graph cluster + algorithms for the use in PyTorch. + """ homepage = "https://github.com/rusty1s/pytorch_cluster" url = "https://github.com/rusty1s/pytorch_cluster/archive/1.5.7.tar.gz" + version("1.6.3", sha256="0e2b08095e03cf87ce9b23b7a7352236a25d3ed92d92351dc020fd927ea8dbfe") version("1.5.8", sha256="95c6e81e9c4a6235e1b2152ab917021d2060ad995199f6bd7fb39986d37310f0") version("1.5.7", sha256="71701d2f7f3e458ebe5904c982951349fdb60e6f1654e19c7e102a226e2de72e") variant("cuda", default=False, description="Enables CUDA support") - depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-pytest-runner", type="build") depends_on("py-scipy", type=("build", "run")) - depends_on("py-torch+cuda", when="+cuda") - depends_on("py-torch~cuda", when="~cuda") + depends_on("py-torch+cuda", when="+cuda", type=("build", "link", "run")) + depends_on("py-torch~cuda", when="~cuda", type=("build", "link", "run")) + + # https://github.com/rusty1s/pytorch_cluster/issues/120 + depends_on("py-torch~openmp", when="@:1.5 %apple-clang", type=("build", "link", "run")) + + # Historical dependencies + depends_on("py-pytest-runner", when="@:1.5", type="build") def setup_build_environment(self, env): if "+cuda" in self.spec: diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index d2edd9453837bf..e93522cd5c7e11 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -25,6 +25,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): version("main", branch="main") version("master", branch="main", deprecated=True) + version("2.1.1", tag="v2.1.1", commit="4c55dc50355d5e923642c59ad2a23d6ad54711e7") version("2.1.0", tag="v2.1.0", commit="7bcf7da3a268b435777fe87c7794c382f444e86d") version("2.0.1", tag="v2.0.1", commit="e9ebda29d87ce0916ab08c06ab26fd3766a870e5") version("2.0.0", tag="v2.0.0", commit="c263bd43e8e8502d4726643bc6fd046f0130ac0e") @@ -114,9 +115,6 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # https://github.com/pytorch/pytorch/issues/77811 conflicts("+qnnpack", when="platform=darwin target=aarch64:") - # https://github.com/pytorch/pytorch/issues/80805 - conflicts("+openmp", when="platform=darwin target=aarch64:") - # https://github.com/pytorch/pytorch/issues/97397 conflicts( "~tensorpipe", @@ -491,9 +489,8 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False): enable_or_disable("cuda") if "+cuda" in self.spec: - # cmake/public/cuda.cmake - # cmake/Modules_CUDA_fix/upstream/FindCUDA.cmake - env.unset("CUDA_ROOT") + env.set("CUDA_HOME", self.spec["cuda"].prefix) # Linux/macOS + env.set("CUDA_PATH", self.spec["cuda"].prefix) # Windows torch_cuda_arch = ";".join( "{0:.1f}".format(float(i) / 10.0) for i in self.spec.variants["cuda_arch"].value ) diff --git a/var/spack/repos/builtin/packages/py-torchaudio/package.py b/var/spack/repos/builtin/packages/py-torchaudio/package.py index d07ce1de2182c7..94bfd97e3e487c 100644 --- a/var/spack/repos/builtin/packages/py-torchaudio/package.py +++ b/var/spack/repos/builtin/packages/py-torchaudio/package.py @@ -15,6 +15,7 @@ class PyTorchaudio(PythonPackage): submodules = True version("main", branch="main") + version("2.1.1", tag="v2.1.1", commit="db624844f5c95bb7618fe5a5f532bf9b68efeb45") version("2.1.0", tag="v2.1.0", commit="6ea1133706801ec6e81bb29142da2e21a8583a0a") version("2.0.2", tag="v2.0.2", commit="31de77dad5c89274451b3f5c4bcb630be12787c4") version("2.0.1", tag="v2.0.1", commit="3b40834aca41957002dfe074175e900cf8906237") @@ -56,6 +57,7 @@ class PyTorchaudio(PythonPackage): depends_on("sox") depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.1", when="@2.1.1", type=("build", "link", "run")) depends_on("py-torch@2.1.0", when="@2.1.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@2.0.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@2.0.1", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchdata/package.py b/var/spack/repos/builtin/packages/py-torchdata/package.py index fd9367f31d0179..5c1eedcfb64c78 100644 --- a/var/spack/repos/builtin/packages/py-torchdata/package.py +++ b/var/spack/repos/builtin/packages/py-torchdata/package.py @@ -16,6 +16,7 @@ class PyTorchdata(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("0.7.1", sha256="1b6589336776ccba19fd3bf435588416105d372f6b85d58a9f2b008286f483bf") version("0.7.0", sha256="0b444719c3abc67201ed0fea92ea9c4100e7f36551ba0d19a09446cc11154eb3") version("0.6.1", sha256="c596db251c5e6550db3f00e4308ee7112585cca4d6a1c82a433478fd86693257") version("0.6.0", sha256="048dea12ee96c0ea1525097959fee811d7b38c2ed05f44a90f35f8961895fb5b") @@ -38,6 +39,7 @@ class PyTorchdata(PythonPackage): # https://github.com/pytorch/data#version-compatibility depends_on("py-torch@main", when="@main", type=("build", "run")) + depends_on("py-torch@2.1.1", when="@0.7.1", type=("build", "run")) depends_on("py-torch@2.1.0", when="@0.7.0", type=("build", "run")) depends_on("py-torch@2.0.1", when="@0.6.1", type=("build", "run")) depends_on("py-torch@2.0.0", when="@0.6.0", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchgeo/package.py b/var/spack/repos/builtin/packages/py-torchgeo/package.py index f5ef2ddc4bc783..b9069126fe0f53 100644 --- a/var/spack/repos/builtin/packages/py-torchgeo/package.py +++ b/var/spack/repos/builtin/packages/py-torchgeo/package.py @@ -16,6 +16,7 @@ class PyTorchgeo(PythonPackage): maintainers("adamjstewart", "calebrob6") version("main", branch="main") + version("0.5.1", sha256="5f86a34d18fe36eeb9146b057b21e5356252ef8ab6a9db33feebb120a01feff8") version("0.5.0", sha256="2bc2f9c4a19a569790cb3396499fdec17496632b0e52b86be390a2cc7a1a7033") version("0.4.1", sha256="a3692436bf63df8d2f9b76d16eea5ee309dd1bd74e0fde6e64456abfdb2a5b58") version("0.4.0", sha256="a0812487205aa2db7bc92119d896ae4bf4f1014e6fdc0ce0f75bcb24fada6613") diff --git a/var/spack/repos/builtin/packages/py-torchtext/package.py b/var/spack/repos/builtin/packages/py-torchtext/package.py index 180b555069bc5e..6457a832a7866a 100644 --- a/var/spack/repos/builtin/packages/py-torchtext/package.py +++ b/var/spack/repos/builtin/packages/py-torchtext/package.py @@ -17,6 +17,7 @@ class PyTorchtext(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("0.16.1", tag="v0.16.1", commit="66671007c84e07386da3c04e5ca403b8a417c8e5") version("0.16.0", tag="v0.16.0", commit="4e255c95c76b1ccde4f6650391c0bc30650d6dbe") version("0.15.2", tag="v0.15.2", commit="4571036cf66c539e50625218aeb99a288d79f3e1") version("0.15.1", tag="v0.15.1", commit="c696895e524c61fd2b8b26916dd006411c5f3ba5") @@ -58,6 +59,7 @@ class PyTorchtext(PythonPackage): # https://github.com/pytorch/text#installation depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.1", when="@0.16.1", type=("build", "link", "run")) depends_on("py-torch@2.1.0", when="@0.16.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@0.15.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@0.15.1", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchvision/package.py b/var/spack/repos/builtin/packages/py-torchvision/package.py index 5aef4c6aef8a29..2ed70ca4ee9984 100644 --- a/var/spack/repos/builtin/packages/py-torchvision/package.py +++ b/var/spack/repos/builtin/packages/py-torchvision/package.py @@ -17,6 +17,7 @@ class PyTorchvision(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("0.16.1", sha256="d31fe52e4540750c8d372b0f38f1bfa81d8261193f2c2c06577332831d203c50") version("0.16.0", sha256="79b30b082237e3ead21e74587cedf4a4d832f977cf7dfeccfb65f67988b12ceb") version("0.15.2", sha256="1efcb80e0a6e42c54f07ee16167839b4d302aeeecc12839cc47c74b06a2c20d4") version("0.15.1", sha256="689d23d4ebb0c7e54e8651c89b17155b64341c14ae4444a04ca7dc6f2b6a0a43") @@ -62,6 +63,7 @@ class PyTorchvision(PythonPackage): # https://github.com/pytorch/vision#installation depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.1", when="@0.16.1", type=("build", "link", "run")) depends_on("py-torch@2.1.0", when="@0.16.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@0.15.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@0.15.1", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-xyzservices/package.py b/var/spack/repos/builtin/packages/py-xyzservices/package.py new file mode 100644 index 00000000000000..c9760370bb9fc0 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-xyzservices/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyXyzservices(PythonPackage): + """xyzservices is a lightweight library providing a repository of + available XYZ services offering raster basemap tiles.""" + + homepage = "https://github.com/geopandas/xyzservices" + pypi = "xyzservices/xyzservices-2023.10.1.tar.gz" + + license("BSD-3-Clause") + + version("2023.10.1", sha256="091229269043bc8258042edbedad4fcb44684b0473ede027b5672ad40dc9fa02") + + depends_on("python@3.8:", type=("build", "run")) + + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") diff --git a/var/spack/repos/builtin/packages/q-e-sirius/package.py b/var/spack/repos/builtin/packages/q-e-sirius/package.py index ec78ba5702b1a1..1605e4e37d5703 100644 --- a/var/spack/repos/builtin/packages/q-e-sirius/package.py +++ b/var/spack/repos/builtin/packages/q-e-sirius/package.py @@ -93,7 +93,7 @@ def cmake_args(self): # Work around spack issue #19970 where spack sets # rpaths for MKL just during make, but cmake removes # them during make install. - if "^mkl" in self.spec: + if self.spec["lapack"].name in INTEL_MATH_LIBRARIES: args.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON") spec = self.spec args.append(self.define("BLAS_LIBRARIES", spec["blas"].libs.joined(";"))) diff --git a/var/spack/repos/builtin/packages/qmcpack/package.py b/var/spack/repos/builtin/packages/qmcpack/package.py index 65a02c1cf9dc8f..99612cedf9650b 100644 --- a/var/spack/repos/builtin/packages/qmcpack/package.py +++ b/var/spack/repos/builtin/packages/qmcpack/package.py @@ -376,7 +376,7 @@ def cmake_args(self): # Next two environment variables were introduced in QMCPACK 3.5.0 # Prior to v3.5.0, these lines should be benign but CMake # may issue a warning. - if "^mkl" in spec: + if spec["lapack"].name in INTEL_MATH_LIBRARIES: args.append("-DENABLE_MKL=1") args.append("-DMKL_ROOT=%s" % env["MKLROOT"]) else: diff --git a/var/spack/repos/builtin/packages/quantum-espresso/package.py b/var/spack/repos/builtin/packages/quantum-espresso/package.py index 4d41903cd637d7..40c036320d7a7a 100644 --- a/var/spack/repos/builtin/packages/quantum-espresso/package.py +++ b/var/spack/repos/builtin/packages/quantum-espresso/package.py @@ -242,6 +242,11 @@ class QuantumEspresso(CMakePackage, Package): depends_on("git@2.13:", type="build") depends_on("m4", type="build") + # If the Intel suite is used for Lapack, it must be used for fftw and vice-versa + for _intel_pkg in INTEL_MATH_LIBRARIES: + requires(f"^[virtuals=fftw-api] {_intel_pkg}", when=f"^[virtuals=lapack] {_intel_pkg}") + requires(f"^[virtuals=lapack] {_intel_pkg}", when=f"^[virtuals=fftw-api] {_intel_pkg}") + # CONFLICTS SECTION # Omitted for now due to concretizer bug # MKL with 64-bit integers not supported. @@ -489,7 +494,8 @@ def install(self, pkg, spec, prefix): # you need to pass it in the FFTW_INCLUDE and FFT_LIBS directory. # QE supports an internal FFTW2, but only an external FFTW3 interface. - if "^mkl" in spec: + is_using_intel_libraries = spec["lapack"].name in INTEL_MATH_LIBRARIES + if is_using_intel_libraries: # A seperate FFT library is not needed when linking against MKL options.append("FFTW_INCLUDE={0}".format(join_path(env["MKLROOT"], "include/fftw"))) if "^fftw@3:" in spec: @@ -531,11 +537,11 @@ def install(self, pkg, spec, prefix): if spec.satisfies("@:6.4"): # set even if MKL is selected options.append("BLAS_LIBS={0}".format(lapack_blas.ld_flags)) else: # behavior changed at 6.5 and later - if not spec.satisfies("^mkl"): + if not is_using_intel_libraries: options.append("BLAS_LIBS={0}".format(lapack_blas.ld_flags)) if "+scalapack" in spec: - if "^mkl" in spec: + if is_using_intel_libraries: if "^openmpi" in spec: scalapack_option = "yes" else: # mpich, intel-mpi diff --git a/var/spack/repos/builtin/packages/qwt/package.py b/var/spack/repos/builtin/packages/qwt/package.py index e2d7a8e0ee5656..7bc3d51ece7dce 100644 --- a/var/spack/repos/builtin/packages/qwt/package.py +++ b/var/spack/repos/builtin/packages/qwt/package.py @@ -31,7 +31,9 @@ class Qwt(QMakePackage): depends_on("qt+tools", when="+designer") depends_on("qt+opengl", when="+opengl") - depends_on("qt") + # Qwt does not support Qt6; this picks the right qmake provider + conflicts("^qt-base", msg="Qwt requires Qt5") + # the qt@5.14.2 limitation was lifted in qwt@6.1.5 # https://sourceforge.net/p/qwt/code/HEAD/tree/tags/qwt-6.1.6/CHANGES-6.1 depends_on("qt@:5.14.2", when="@:6.1.4") diff --git a/var/spack/repos/builtin/packages/r-rlang/package.py b/var/spack/repos/builtin/packages/r-rlang/package.py index 40120150b06b2c..2805d53bba2b10 100644 --- a/var/spack/repos/builtin/packages/r-rlang/package.py +++ b/var/spack/repos/builtin/packages/r-rlang/package.py @@ -14,6 +14,8 @@ class RRlang(RPackage): cran = "rlang" + version("1.1.2", sha256="2a0ee1dc6e5c59b283c32db5e74e869922a336197cb406fe92622b6ec66f8092") + version("1.1.1", sha256="5e5ec9a7796977216c39d94b1e342e08f0681746657067ba30de11b8fa8ada99") version("1.1.0", sha256="f89859d91c9edc05fd7ccf21163fe53ad58da907ee273a93d5ab004a8649335b") version("1.0.6", sha256="e6973d98a0ea301c0da1eeaa435e9e65d1c3f0b95ed68bdc2d6cb0c610166760") version("1.0.2", sha256="8de87c3e6fb0b3cce2dabc6908186f8e1528cc0c16b54de965fe02d405fdd7cc") diff --git a/var/spack/repos/builtin/packages/r/package.py b/var/spack/repos/builtin/packages/r/package.py index dfe397ca2c0d7c..a12d089808e1e2 100644 --- a/var/spack/repos/builtin/packages/r/package.py +++ b/var/spack/repos/builtin/packages/r/package.py @@ -4,7 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os -import re from spack.package import * @@ -60,21 +59,20 @@ class R(AutotoolsPackage): version("3.1.3", sha256="07e98323935baa38079204bfb9414a029704bb9c0ca5ab317020ae521a377312") version("3.1.2", sha256="bcd150afcae0e02f6efb5f35a6ab72432be82e849ec52ce0bb89d8c342a8fa7a") - variant( - "external-lapack", default=False, description="Links to externally installed BLAS/LAPACK" - ) variant("X", default=False, description="Enable X11 support (TCLTK, PNG, JPEG, TIFF, CAIRO)") variant("memory_profiling", default=False, description="Enable memory profiling") variant("rmath", default=False, description="Build standalone Rmath library") - depends_on("blas", when="+external-lapack") - depends_on("lapack", when="+external-lapack") + depends_on("blas") + depends_on("lapack") + depends_on("bzip2") depends_on("curl+libidn2") # R didn't anticipate the celebratory non-breaking major version bump of curl 8. depends_on("curl@:7", when="@:4.2") depends_on("icu4c") depends_on("java") + depends_on("libtirpc") depends_on("ncurses") depends_on("pcre", when="@:3.6.3") depends_on("pcre2", when="@4:") @@ -84,16 +82,18 @@ class R(AutotoolsPackage): depends_on("zlib-api") depends_on("zlib@1.2.5:", when="^zlib") depends_on("texinfo", type="build") - depends_on("cairo+X+gobject+pdf", when="+X") - depends_on("pango+X", when="+X") - depends_on("harfbuzz+graphite2", when="+X") - depends_on("jpeg", when="+X") - depends_on("libpng", when="+X") - depends_on("libtiff", when="+X") - depends_on("libx11", when="+X") - depends_on("libxmu", when="+X") - depends_on("libxt", when="+X") - depends_on("tk", when="+X") + + with when("+X"): + depends_on("cairo+X+gobject+pdf") + depends_on("pango+X") + depends_on("harfbuzz+graphite2") + depends_on("jpeg") + depends_on("libpng") + depends_on("libtiff") + depends_on("libx11") + depends_on("libxmu") + depends_on("libxt") + depends_on("tk") patch("zlib.patch", when="@:3.3.2") @@ -126,32 +126,34 @@ def configure_args(self): spec = self.spec prefix = self.prefix + extra_rpath = join_path(prefix, "rlib", "R", "lib") + + blas_flags: str = spec["blas"].libs.ld_flags + lapack_flags: str = spec["lapack"].libs.ld_flags + + # R uses LAPACK in Fortran, which requires libmkl_gf_* when gfortran is used. + # TODO: cleaning this up seem to require both compilers as dependencies and use variants. + if spec["lapack"].name in INTEL_MATH_LIBRARIES and "gfortran" in self.compiler.fc: + xlp64 = "ilp64" if spec["lapack"].satisfies("+ilp64") else "lp64" + blas_flags = blas_flags.replace(f"mkl_intel_{xlp64}", f"mkl_gf_{xlp64}") + lapack_flags = lapack_flags.replace(f"mkl_intel_{xlp64}", f"mkl_gf_{xlp64}") + config_args = [ "--with-internal-tzcode", "--libdir={0}".format(join_path(prefix, "rlib")), "--enable-R-shlib", - "--enable-BLAS-shlib", "--enable-R-framework=no", "--without-recommended-packages", - "LDFLAGS=-L{0} -Wl,-rpath,{0}".format(join_path(prefix, "rlib", "R", "lib")), + f"LDFLAGS=-Wl,-rpath,{extra_rpath}", + f"--with-blas={blas_flags}", + f"--with-lapack={lapack_flags}", + # cannot disable docs with a normal configure option + "ac_cv_path_PDFLATEX=", + "ac_cv_path_PDFTEX=", + "ac_cv_path_TEX=", + "ac_cv_path_TEXI2DVI=", ] - if "+external-lapack" in spec: - if "^mkl" in spec and "gfortran" in self.compiler.fc: - mkl_re = re.compile(r"(mkl_)intel(_i?lp64\b)") - config_args.extend( - [ - mkl_re.sub( - r"\g<1>gf\g<2>", "--with-blas={0}".format(spec["blas"].libs.ld_flags) - ), - "--with-lapack", - ] - ) - else: - config_args.extend( - ["--with-blas={0}".format(spec["blas"].libs.ld_flags), "--with-lapack"] - ) - if "+X" in spec: config_args.append("--with-cairo") config_args.append("--with-jpeglib") diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index 99221b9b08c7d3..30a63bc079eb18 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -162,6 +162,11 @@ def _get_sys_type(self, spec): sys_type = env["SYS_TYPE"] return sys_type + @property + def libs(self): + shared = "+shared" in self.spec + return find_libraries("libRAJA", root=self.prefix, shared=shared, recursive=True) + @property def cache_name(self): hostname = socket.gethostname() diff --git a/var/spack/repos/builtin/packages/rclone/package.py b/var/spack/repos/builtin/packages/rclone/package.py index a1dacabc7708e7..06dd69d9175dc7 100644 --- a/var/spack/repos/builtin/packages/rclone/package.py +++ b/var/spack/repos/builtin/packages/rclone/package.py @@ -15,6 +15,7 @@ class Rclone(Package): maintainers("alecbcs") + version("1.64.2", sha256="0c74d8fb887691e04e865e3b6bc32e8af47c3e54a9922ffdbed38c8323e281c9") version("1.63.1", sha256="0d8bf8b7460681f7906096a9d37eedecc5a1d1d3ad17652e68f0c6de104c2412") version("1.62.2", sha256="340371f94604e6771cc4a2c91e37d1bf00a524deab520340440fb0968e783f63") version("1.61.1", sha256="34b5f52047741c7bbf54572c02cc9998489c4736a753af3c99255296b1af125d") diff --git a/var/spack/repos/builtin/packages/restic/package.py b/var/spack/repos/builtin/packages/restic/package.py index 493e0c098331f2..569d51dc704c21 100644 --- a/var/spack/repos/builtin/packages/restic/package.py +++ b/var/spack/repos/builtin/packages/restic/package.py @@ -14,6 +14,7 @@ class Restic(Package): maintainers("alecbcs") + version("0.16.2", sha256="88165b5b89b6064df37a9964d660f40ac62db51d6536e459db9aaea6f2b2fc11") version("0.16.0", sha256="b91f5ef6203a5c50a72943c21aaef336e1344f19a3afd35406c00f065db8a8b9") version("0.15.2", sha256="52aca841486eaf4fe6422b059aa05bbf20db94b957de1d3fca019ed2af8192b7") version("0.15.1", sha256="fce382fdcdac0158a35daa640766d5e8a6e7b342ae2b0b84f2aacdff13990c52") diff --git a/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py b/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py index 71ff595e64247e..d0d4f1f6e9292d 100644 --- a/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py +++ b/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py @@ -18,6 +18,48 @@ class RiscvGnuToolchain(AutotoolsPackage): maintainers("wanlinwang") version("develop", branch="master", submodules=True) + version( + "2023.10.18", + tag="2023.10.18", + commit="b86b2b37d0acc607156ff56ff17ee105a9b48897", + submodules=True, + ) + version( + "2023.10.17", + tag="2023.10.17", + commit="c11f0748276c58df4f9d9602cdc2de5f17cbae8c", + submodules=True, + ) + version( + "2023.10.12", + tag="2023.10.12", + commit="e65e7fc58543c821baf4f1fb6d0ef700177b9d89", + submodules=True, + ) + version( + "2023.10.06", + tag="2023.10.06", + commit="6e7190e8c95e09d541e69f6f6e39163f808570d5", + submodules=True, + ) + version( + "2023.09.27", + tag="2023.09.27", + commit="5afde2de23c6597aaa5069f36574c61bcb39b007", + submodules=True, + ) + version( + "2023.09.26", + tag="2023.09.26", + commit="ffb5968884630c7baebba7b2af493f6b5f74ad80", + submodules=True, + ) + version( + "2023.09.13", + tag="2023.09.13", + commit="5437780994b830e9eabf467f85f22ed24b5fade1", + submodules=True, + ) version( "2022.08.08", tag="2022.08.08", diff --git a/var/spack/repos/builtin/packages/rkcommon/package.py b/var/spack/repos/builtin/packages/rkcommon/package.py index 4f0a07559ef9c6..8223c6836000a9 100644 --- a/var/spack/repos/builtin/packages/rkcommon/package.py +++ b/var/spack/repos/builtin/packages/rkcommon/package.py @@ -16,6 +16,7 @@ class Rkcommon(CMakePackage): # maintainers("github_user1",o"github_user2") + version("1.12.0", sha256="6abb901073811cdbcbe336772e1fcb458d78cab5ad8d5d61de2b57ab83581e80") version("1.11.0", sha256="9cfeedaccdefbdcf23c465cb1e6c02057100c4a1a573672dc6cfea5348cedfdd") version("1.10.0", sha256="57a33ce499a7fc5a5aaffa39ec7597115cf69ed4ff773546b5b71ff475ee4730") version("1.9.0", sha256="b68aa02ef44c9e35c168f826a14802bb5cc6a9d769ba4b64b2c54f347a14aa53") diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py index 6a6484b4b2e25c..6d03b1155dffeb 100644 --- a/var/spack/repos/builtin/packages/root/package.py +++ b/var/spack/repos/builtin/packages/root/package.py @@ -34,6 +34,7 @@ class Root(CMakePackage): # Development version (when more recent than production). # Production version + version("6.30.00", sha256="0592c066954cfed42312957c9cb251654456064fe2d8dabdcb8826f1c0099d71") version("6.28.06", sha256="af3b673b9aca393a5c9ae1bf86eab2672aaf1841b658c5c6e7a30ab93c586533") version("6.28.04", sha256="70f7f86a0cd5e3f2a0befdc59942dd50140d990ab264e8e56c7f17f6bfe9c965") version("6.28.02", sha256="6643c07710e68972b00227c68b20b1016fec16f3fba5f44a571fa1ce5bb42faa") @@ -160,9 +161,11 @@ class Root(CMakePackage): ) variant("mysql", default=False, description="Enable support for MySQL databases") variant("opengl", default=True, description="Enable OpenGL support") - variant("oracle", default=False, description="Enable support for Oracle databases") + variant( + "oracle", when="@:6.30", default=False, description="Enable support for Oracle databases" + ) variant("postgres", default=False, description="Enable postgres support") - variant("pythia6", default=False, description="Enable pythia6 support") + variant("pythia6", when="@:6.30", default=False, description="Enable pythia6 support") variant("pythia8", default=False, description="Enable pythia8 support") variant("python", default=True, description="Enable Python ROOT bindings") variant("qt4", when="@:6.17", default=False, description="Enable Qt graphics backend") @@ -290,6 +293,7 @@ class Root(CMakePackage): depends_on("unuran", when="+unuran") depends_on("vc@1.0:", when="@6.07.04: +vc") depends_on("vc@1.3.0:", when="@6.09.02: +vc") + depends_on("vc@1.4.4:", when="@6.29.02: +vc") depends_on("vdt", when="+vdt") depends_on("veccore", when="+veccore") depends_on("libxml2", when="+xml") @@ -320,6 +324,7 @@ class Root(CMakePackage): conflicts("+tmva", when="~mlp", msg="root+tmva requires MLP") conflicts("cxxstd=11", when="+root7", msg="root7 requires at least C++14") conflicts("cxxstd=11", when="@6.25.02:", msg="This version of root requires at least C++14") + conflicts("cxxstd=14", when="@6.30.00:", msg="This version of root requires at least C++17") conflicts( "cxxstd=20", when="@:6.28.02", msg="C++20 support requires root version at least 6.28.04" ) diff --git a/var/spack/repos/builtin/packages/rust-bootstrap/package.py b/var/spack/repos/builtin/packages/rust-bootstrap/package.py index 337618e20e38f2..a8e9f7baae34a8 100644 --- a/var/spack/repos/builtin/packages/rust-bootstrap/package.py +++ b/var/spack/repos/builtin/packages/rust-bootstrap/package.py @@ -21,6 +21,17 @@ class RustBootstrap(Package): # should update these binary releases as bootstrapping requirements are # modified by new releases of Rust. rust_releases = { + "1.73.0": { + "darwin": { + "x86_64": "ece9646bb153d4bc0f7f1443989de0cbcd8989a7d0bf3b7fb9956e1223954f0c", + "aarch64": "9c96e4c57328fb438ee2d87aa75970ce89b4426b49780ccb3c16af0d7c617cc6", + }, + "linux": { + "x86_64": "aa4cf0b7e66a9f5b7c623d4b340bb1ac2864a5f2c2b981f39f796245dc84f2cb", + "aarch64": "e54d7d886ba413ae573151f668e76ea537f9a44406d3d29598269a4a536d12f6", + "powerpc64le": "8fa215ee3e274fb64364e7084613bc570369488fa22cf5bc8e0fe6dc810fe2b9", + }, + }, "1.70.0": { "darwin": { "x86_64": "e5819fdbfc7f1a4d5d82cb4c3b7662250748450b45a585433bfb75648bc45547", diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py index b660697df3653e..8d0784d95a203b 100644 --- a/var/spack/repos/builtin/packages/rust/package.py +++ b/var/spack/repos/builtin/packages/rust/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os import re from spack.package import * @@ -17,6 +18,27 @@ class Rust(Package): maintainers("alecbcs") + # When adding a version of Rust you may need to add an additional version + # to rust-bootstrap as the minimum bootstrapping requirements increase. + # As a general rule of thumb Rust can be built with either the previous major + # version or the current version of the compiler as shown above. + + # Pre-release versions. + # Note: If you plan to use these versions remember to install with + # `-n` to prevent Spack from failing due to failed checksums. + # + # $ spack install -n rust@pre-release-version + # + version("beta") + version("master", branch="master", submodules=True) + version("nightly") + + # Stable versions. + version("1.73.0", sha256="96d62e6d1f2d21df7ac8acb3b9882411f9e7c7036173f7f2ede9e1f1f6b1bb3a") + version("1.70.0", sha256="b2bfae000b7a5040e4ec4bbc50a09f21548190cb7570b0ed77358368413bd27c") + version("1.65.0", sha256="5828bb67f677eabf8c384020582b0ce7af884e1c84389484f7f8d00dd82c0038") + version("1.60.0", sha256="20ca826d1cf674daf8e22c4f8c4b9743af07973211c839b85839742314c838b7") + # Core dependencies depends_on("cmake@3.13.4:", type="build") depends_on("curl+nghttp2") @@ -40,26 +62,7 @@ class Rust(Package): depends_on("rust-bootstrap@1.59:1.60", type="build", when="@1.60") depends_on("rust-bootstrap@1.64:1.65", type="build", when="@1.65") depends_on("rust-bootstrap@1.69:1.70", type="build", when="@1.70") - - # When adding a version of Rust you may need to add an additional version - # to rust-bootstrap as the minimum bootstrapping requirements increase. - # As a general rule of thumb Rust can be built with either the previous major - # version or the current version of the compiler as shown above. - - # Pre-release versions. - # Note: If you plan to use these versions remember to install with - # `-n` to prevent Spack from failing due to failed checksums. - # - # $ spack install -n rust@pre-release-version - # - version("beta") - version("master", branch="master", submodules=True) - version("nightly") - - # Stable versions. - version("1.70.0", sha256="b2bfae000b7a5040e4ec4bbc50a09f21548190cb7570b0ed77358368413bd27c") - version("1.65.0", sha256="5828bb67f677eabf8c384020582b0ce7af884e1c84389484f7f8d00dd82c0038") - version("1.60.0", sha256="20ca826d1cf674daf8e22c4f8c4b9743af07973211c839b85839742314c838b7") + depends_on("rust-bootstrap@1.72:1.73", type="build", when="@1.73") variant( "analysis", @@ -91,9 +94,33 @@ def setup_build_environment(self, env): ar = which("ar", required=True) env.set("AR", ar.path) - # Manually inject the path of openssl's certs for build. - certs = join_path(self.spec["openssl"].prefix, "etc/openssl/cert.pem") - env.set("CARGO_HTTP_CAINFO", certs) + # Manually inject the path of openssl's certs for build + # if certs are present on system via Spack or via external + # openssl. + def get_test_path(p): + certs = join_path(p, "cert.pem") + if os.path.exists(certs): + return certs + return None + + # find certs, don't set if no file is found in case + # ca-certificates isn't installed + certs = None + openssl = self.spec["openssl"] + if openssl.external: + try: + output = which("openssl", required=True)("version", "-d", output=str, error=str) + openssl_dir = re.match('OPENSSLDIR: "([^"]+)"', output) + if openssl_dir: + certs = get_test_path(openssl_dir.group(1)) + except ProcessError: + pass + + if certs is None: + certs = get_test_path(join_path(openssl.prefix, "etc/openssl")) + + if certs is not None: + env.set("CARGO_HTTP_CAINFO", certs) def configure(self, spec, prefix): opts = [] diff --git a/var/spack/repos/builtin/packages/scafacos/package.py b/var/spack/repos/builtin/packages/scafacos/package.py new file mode 100644 index 00000000000000..84f73ac0e1339c --- /dev/null +++ b/var/spack/repos/builtin/packages/scafacos/package.py @@ -0,0 +1,40 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Scafacos(AutotoolsPackage): + """ScaFaCoS is a library of scalable fast coulomb solvers.""" + + homepage = "http://www.scafacos.de/" + url = "https://github.com/scafacos/scafacos/releases/download/v1.0.4/scafacos-1.0.4.tar.gz" + + maintainers("hmenke") + + license("GPL-3.0-or-later OR LGPL-3.0-or-later") + + version("1.0.4", sha256="6634c4202e825e771d1dd75bbe9cac5cee41136c87653fde98fbd634681c1be6") + version("1.0.3", sha256="d3579f4cddb10a562722c190c2452ebc455592d44f6dbde8f155849ba6e2b3d0") + version("1.0.2", sha256="158078665e48e28fd12b7895063db056cee5d135423fc36802e39c9160102b97") + version("1.0.1", sha256="2b125f313795c81b0e87eb920082e91addf94c17444f9486d979e691aaded99b") + version("1.0.0", sha256="cc5762edbecfec0323126b6a6a535dcc3e134fcfef4b00f63eb05fae15244a96") + + depends_on("fftw") + depends_on("file") + depends_on("gmp") + depends_on("gsl") + depends_on("mpi") + depends_on("pfft") + depends_on("pnfft") + + def configure_args(self): + args = [ + "--disable-doc", + "--enable-fcs-solvers=direct,ewald,fmm,p3m", + "FC={0}".format(self.spec["mpi"].mpifc), + "F77={0}".format(self.spec["mpi"].mpif77), + ] + return args diff --git a/var/spack/repos/builtin/packages/sherpa/package.py b/var/spack/repos/builtin/packages/sherpa/package.py index 7ae8efc2e45997..38fa6277f59a09 100644 --- a/var/spack/repos/builtin/packages/sherpa/package.py +++ b/var/spack/repos/builtin/packages/sherpa/package.py @@ -217,7 +217,7 @@ def configure_args(self): args.extend(self.enable_or_disable("pythia")) hepmc_root = lambda x: self.spec["hepmc"].prefix args.extend(self.enable_or_disable("hepmc2", activation_value=hepmc_root)) - if self.spec.satisfies("@2.2.13:"): + if self.spec.satisfies("@3:"): args.extend(self.enable_or_disable("hepmc3", activation_value="prefix")) args.extend(self.enable_or_disable("rivet", activation_value="prefix")) args.extend(self.enable_or_disable("lhapdf", activation_value="prefix")) diff --git a/var/spack/repos/builtin/packages/sleef/package.py b/var/spack/repos/builtin/packages/sleef/package.py index 79227766691a76..43c50e2d3c8156 100644 --- a/var/spack/repos/builtin/packages/sleef/package.py +++ b/var/spack/repos/builtin/packages/sleef/package.py @@ -53,10 +53,9 @@ class Sleef(CMakePackage): # conflicts("^mpfr@4.2:") def cmake_args(self): - # Taken from PyTorch's aten/src/ATen/CMakeLists.txt + # https://salsa.debian.org/science-team/sleef/-/blob/master/debian/rules return [ - self.define("BUILD_SHARED_LIBS", False), self.define("BUILD_DFT", False), - self.define("BUILD_GNUABI_LIBS", False), + self.define("SLEEF_TEST_ALL_IUT", True), self.define("BUILD_TESTS", False), ] diff --git a/var/spack/repos/builtin/packages/slurm-drmaa/package.py b/var/spack/repos/builtin/packages/slurm-drmaa/package.py index 100b328b9a4dfc..012fc6dcd1b531 100644 --- a/var/spack/repos/builtin/packages/slurm-drmaa/package.py +++ b/var/spack/repos/builtin/packages/slurm-drmaa/package.py @@ -10,7 +10,7 @@ class SlurmDrmaa(AutotoolsPackage): """ DRMAA for Slurm is an implementation of Open Grid Forum DRMAA 1.0 (Distributed Resource Management Application API) specification for submission and control of - jobs to SLURM. Using DRMAA, grid applications builders, portal developers and + jobs to Slurm. Using DRMAA, grid applications builders, portal developers and ISVs can use the same high-level API to link their software with different cluster/resource management systems. """ diff --git a/var/spack/repos/builtin/packages/stdexec/package.py b/var/spack/repos/builtin/packages/stdexec/package.py index eeebe847d91bd0..ae6b2bfed39d34 100644 --- a/var/spack/repos/builtin/packages/stdexec/package.py +++ b/var/spack/repos/builtin/packages/stdexec/package.py @@ -14,6 +14,8 @@ class Stdexec(CMakePackage): git = "https://github.com/NVIDIA/stdexec.git" maintainers("msimberg", "aurianer") + license("Apache-2.0") + version("23.03", sha256="2c9dfb6e56a190543049d2300ccccd1b626f4bb82af5b607869c626886fadd15") version("main", branch="main") diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py index 71ae9186a00578..b05d31f360e328 100644 --- a/var/spack/repos/builtin/packages/sundials/package.py +++ b/var/spack/repos/builtin/packages/sundials/package.py @@ -22,11 +22,13 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage): test_requires_compiler = True maintainers("balos1", "cswoodward", "gardner48") + license("BSD-3-Clause") # ========================================================================== # Versions # ========================================================================== version("develop", branch="develop") + version("6.6.2", sha256="08f8223a5561327e44c072e46faa7f665c0c0bc8cd7e45d23f486c3d24c65009") version("6.6.1", sha256="21f71e4aef95b18f954c8bbdc90b62877443950533d595c68051ab768b76984b") version("6.6.0", sha256="f90029b8da846c8faff5530fd1fa4847079188d040554f55c1d5d1e04743d29d") version("6.5.1", sha256="4252303805171e4dbdd19a01e52c1dcfe0dafc599c3cfedb0a5c2ffb045a8a75") diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index 241dc4b552c7ff..7336f4ed8850ff 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -20,6 +20,7 @@ class SuperluDist(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="master") version("amd", branch="amd") + version("8.2.0", sha256="d53573e5a399b2b4ab1fcc36e8421c1b6fab36345c0af14f8fa20326e3365f1f") version("8.1.2", sha256="7b16c442bb01ea8b298c0aab9a2584aa4615d09786aac968cb2f3118c058206b") version("8.1.1", sha256="766d70b84ece79d88249fe10ff51d2a397a29f274d9fd1e4a4ac39179a9ef23f") version("8.1.0", sha256="9308844b99a7e762d5704934f7e9f79daf158b0bfc582994303c2e0b31518b34") @@ -53,14 +54,16 @@ class SuperluDist(CMakePackage, CudaPackage, ROCmPackage): ), ) variant("shared", default=True, description="Build shared libraries") + variant("parmetis", default=True, description="Enable ParMETIS library") depends_on("mpi") depends_on("blas") depends_on("lapack") - depends_on("parmetis +int64", when="+int64") - depends_on("metis@5: +int64", when="+int64") - depends_on("parmetis ~int64", when="~int64") - depends_on("metis@5: ~int64", when="~int64") + with when("+parmetis"): + depends_on("metis@5: +int64", when="+int64") + depends_on("parmetis +int64", when="+int64") + depends_on("metis@5: ~int64", when="~int64") + depends_on("parmetis ~int64", when="~int64") depends_on("cmake@3.18.1:", type="build", when="@7.1.0:") depends_on("hipblas", when="+rocm") depends_on("rocsolver", when="+rocm") @@ -93,13 +96,17 @@ def append_from_variant(*args): append_define("TPL_LAPACK_LIBRARIES", spec["lapack"].libs) append_define("TPL_ENABLE_LAPACKLIB", True) append_define("USE_XSDK_DEFAULTS", True) - append_define( - "TPL_PARMETIS_LIBRARIES", [spec["parmetis"].libs.ld_flags, spec["metis"].libs.ld_flags] - ) - append_define( - "TPL_PARMETIS_INCLUDE_DIRS", - [spec["parmetis"].prefix.include, spec["metis"].prefix.include], - ) + + append_from_variant("TPL_ENABLE_PARMETISLIB", "parmetis") + if "+parmetis" in spec: + append_define( + "TPL_PARMETIS_LIBRARIES", + [spec["parmetis"].libs.ld_flags, spec["metis"].libs.ld_flags], + ) + append_define( + "TPL_PARMETIS_INCLUDE_DIRS", + [spec["parmetis"].prefix.include, spec["metis"].prefix.include], + ) append_define("XSDK_INDEX_SIZE", "64" if "+int64" in spec else "32") diff --git a/var/spack/repos/builtin/packages/taskflow/package.py b/var/spack/repos/builtin/packages/taskflow/package.py index df921639b28781..1694dc7c95f8c6 100644 --- a/var/spack/repos/builtin/packages/taskflow/package.py +++ b/var/spack/repos/builtin/packages/taskflow/package.py @@ -16,6 +16,7 @@ class Taskflow(CMakePackage): git = "https://github.com/taskflow/taskflow.git" version("master", branch="master") + version("3.6.0", sha256="5a1cd9cf89f93a97fcace58fd73ed2fc8ee2053bcb43e047acb6bc121c3edf4c") version("2.7.0", sha256="bc2227dcabec86abeba1fee56bb357d9d3c0ef0184f7c2275d7008e8758dfc3e") # Compiler must offer C++14 support diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py index c0082dc52cc1f7..dee78161bb1149 100644 --- a/var/spack/repos/builtin/packages/tcl/package.py +++ b/var/spack/repos/builtin/packages/tcl/package.py @@ -37,6 +37,8 @@ class Tcl(AutotoolsPackage, SourceforgePackage): configure_directory = "unix" + filter_compiler_wrappers("tclConfig.sh", relative_root="lib") + def install(self, spec, prefix): with working_dir(self.build_directory): make("install") diff --git a/var/spack/repos/builtin/packages/tracy-client/package.py b/var/spack/repos/builtin/packages/tracy-client/package.py index dd219f31ee039a..c0ff6a7b712ed2 100644 --- a/var/spack/repos/builtin/packages/tracy-client/package.py +++ b/var/spack/repos/builtin/packages/tracy-client/package.py @@ -14,6 +14,8 @@ class TracyClient(CMakePackage): url = "https://github.com/wolfpld/tracy/archive/v0.0.0.tar.gz" maintainers("msimberg") + license("BSD-3-Clause") + version("master", git="https://github.com/wolfpld/tracy.git", branch="master") version("0.10", sha256="a76017d928f3f2727540fb950edd3b736caa97b12dbb4e5edce66542cbea6600") version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc") diff --git a/var/spack/repos/builtin/packages/tracy/package.py b/var/spack/repos/builtin/packages/tracy/package.py index 111b4a86534600..021e18d00f4f73 100644 --- a/var/spack/repos/builtin/packages/tracy/package.py +++ b/var/spack/repos/builtin/packages/tracy/package.py @@ -14,6 +14,8 @@ class Tracy(MakefilePackage): url = "https://github.com/wolfpld/tracy/archive/v0.0.0.tar.gz" maintainers("msimberg") + license("BSD-3-Clause") + version("master", git="https://github.com/wolfpld/tracy.git", branch="master") version("0.10", sha256="a76017d928f3f2727540fb950edd3b736caa97b12dbb4e5edce66542cbea6600") version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc") diff --git a/var/spack/repos/builtin/packages/ut/package.py b/var/spack/repos/builtin/packages/ut/package.py index 9c5d9f44603de7..7d7d2b573ab92d 100644 --- a/var/spack/repos/builtin/packages/ut/package.py +++ b/var/spack/repos/builtin/packages/ut/package.py @@ -15,6 +15,8 @@ class Ut(CMakePackage): maintainers("msimberg") + license("BSL-1.0") + version("master", branch="master") version("1.1.9", sha256="1a666513157905aa0e53a13fac602b5673dcafb04a869100a85cd3f000c2ed0d") diff --git a/var/spack/repos/builtin/packages/vc/package.py b/var/spack/repos/builtin/packages/vc/package.py index f00154e851ed61..73b48537a06526 100644 --- a/var/spack/repos/builtin/packages/vc/package.py +++ b/var/spack/repos/builtin/packages/vc/package.py @@ -13,6 +13,7 @@ class Vc(CMakePackage): git = "https://github.com/VcDevel/Vc.git" url = "https://github.com/VcDevel/Vc/archive/refs/tags/1.3.3.tar.gz" + version("1.4.4", sha256="5933108196be44c41613884cd56305df320263981fe6a49e648aebb3354d57f3") version("1.4.3", sha256="988ea0053f3fbf17544ca776a2749c097b3139089408b0286fa4e9e8513e037f") version("1.4.2", sha256="50d3f151e40b0718666935aa71d299d6370fafa67411f0a9e249fbce3e6e3952") version("1.4.1", sha256="7e8b57ed5ff9eb0835636203898c21302733973ff8eaede5134dd7cb87f915f6") diff --git a/var/spack/repos/builtin/packages/votca/package.py b/var/spack/repos/builtin/packages/votca/package.py index db9d260f86530d..9dbd3a65ca506c 100644 --- a/var/spack/repos/builtin/packages/votca/package.py +++ b/var/spack/repos/builtin/packages/votca/package.py @@ -20,6 +20,7 @@ class Votca(CMakePackage): maintainers("junghans") version("master", branch="master") + version("2023", sha256="6150a38c77379d05592a56ae4392a00c4636d02198bb06108a3dc739a45115f8") version("2022.1", sha256="358119b2645fe60f88ca621aed508c49fb61f88d29d3e3fa24b5b831ed4a66ec") version("2022", sha256="7991137098ff4511f4ca2c6f1b6c45f53d92d9f84e5c0d0e32fbc31768f73a83") diff --git a/var/spack/repos/builtin/packages/whip/package.py b/var/spack/repos/builtin/packages/whip/package.py index a269097ad6bc8f..44c6f1ad57391e 100644 --- a/var/spack/repos/builtin/packages/whip/package.py +++ b/var/spack/repos/builtin/packages/whip/package.py @@ -15,6 +15,8 @@ class Whip(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/eth-cscs/whip.git" maintainers("msimberg", "rasolca") + license("BSD-3-Clause") + version("main", branch="main") version("0.2.0", sha256="d8fec662526accbd1624922fdf01a077d6f312cf253382660e4a2f65e28e8686") version("0.1.0", sha256="5d557794f4afc8332fc660948a342f69e22bc9e5d575ffb3e3944cf526db5ec9") diff --git a/var/spack/repos/builtin/packages/xrdcl-record/package.py b/var/spack/repos/builtin/packages/xrdcl-record/package.py index c43f668a612149..d045fcd06c8f8e 100644 --- a/var/spack/repos/builtin/packages/xrdcl-record/package.py +++ b/var/spack/repos/builtin/packages/xrdcl-record/package.py @@ -13,8 +13,6 @@ class XrdclRecord(CMakePackage): homepage = "https://github.com/xrootd/xrdcl-record" url = "https://github.com/xrootd/xrdcl-record/archive/refs/tags/v5.4.2.tar.gz" - maintainers("iarspider") - version("5.4.2", sha256="fb76284491ff4e723bce4c9e9d87347e98e278e70c597167bc39a162bc876734") depends_on("xrootd") diff --git a/var/spack/repos/builtin/packages/xsdk/package.py b/var/spack/repos/builtin/packages/xsdk/package.py index 3e02dbd8b36a61..2087df88a14c5e 100644 --- a/var/spack/repos/builtin/packages/xsdk/package.py +++ b/var/spack/repos/builtin/packages/xsdk/package.py @@ -85,9 +85,11 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): maintainers("balay", "luszczek", "balos1", "shuds13", "v-dobrev") version("develop") + version("1.0.0") version("0.8.0") version("0.7.0", deprecated=True) + variant("sycl", default=False, sticky=True, description="Enable sycl variant of xsdk packages") variant("trilinos", default=True, sticky=True, description="Enable trilinos package build") variant("datatransferkit", default=True, description="Enable datatransferkit package build") variant("omega-h", default=True, description="Enable omega-h package build") @@ -107,8 +109,14 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): variant("exago", default=True, description="Enable exago build") variant("hiop", default=True, description="Enable hiop build") variant("raja", default=(sys.platform != "darwin"), description="Enable raja for hiop, exago") + variant("pflotran", default=True, description="Enable pflotran package build") - xsdk_depends_on("hypre@develop+superlu-dist+shared", when="@develop", cuda_var="cuda") + xsdk_depends_on( + "hypre@develop+superlu-dist+shared", when="@develop", cuda_var="cuda", rocm_var="rocm" + ) + xsdk_depends_on( + "hypre@2.30.0+superlu-dist+shared", when="@1.0.0", cuda_var="cuda", rocm_var="rocm" + ) xsdk_depends_on("hypre@2.26.0+superlu-dist+shared", when="@0.8.0", cuda_var="cuda") xsdk_depends_on("hypre@2.23.0+superlu-dist+shared", when="@0.7.0", cuda_var="cuda") @@ -118,6 +126,12 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): cuda_var="cuda", rocm_var="rocm", ) + xsdk_depends_on( + "mfem@4.6.0+shared+mpi+superlu-dist+petsc+sundials+examples+miniapps", + when="@1.0.0", + cuda_var="cuda", + rocm_var="rocm", + ) xsdk_depends_on( "mfem@4.5.0+shared+mpi+superlu-dist+petsc+sundials+examples+miniapps", when="@0.8.0", @@ -131,16 +145,26 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): rocm_var="rocm", ) - xsdk_depends_on("superlu-dist@develop", when="@develop") + xsdk_depends_on("superlu-dist@develop", when="@develop", cuda_var="cuda", rocm_var="rocm") + xsdk_depends_on("superlu-dist@8.2.0", when="@1.0.0", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("superlu-dist@8.1.2", when="@0.8.0") xsdk_depends_on("superlu-dist@7.1.1", when="@0.7.0") + + xsdk_depends_on("trilinos +superlu-dist", when="@1.0.0: +trilinos ~cuda ~rocm") xsdk_depends_on( - "trilinos@develop+hypre+superlu-dist+hdf5~mumps+boost" + "trilinos@develop+hypre+hdf5~mumps+boost" + "~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2" + "~exodus~dtk+intrepid2+shards+stratimikos gotype=int" + " cxxstd=14", when="@develop +trilinos", ) + xsdk_depends_on( + "trilinos@14.4.0+hypre+hdf5~mumps+boost" + + "~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2" + + "~exodus~dtk+intrepid2+shards+stratimikos gotype=int" + + " cxxstd=17", + when="@1.0.0 +trilinos", + ) xsdk_depends_on( "trilinos@13.4.1+hypre+superlu-dist+hdf5~mumps+boost" + "~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2" @@ -157,17 +181,25 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): ) xsdk_depends_on("datatransferkit@master", when="@develop +trilinos +datatransferkit") + xsdk_depends_on("datatransferkit@3.1.1", when="@1.0.0 +trilinos +datatransferkit") dtk7ver = "3.1-rc2" if sys.platform == "darwin" else "3.1-rc3" xsdk_depends_on("datatransferkit@" + dtk7ver, when="@0.8.0 +trilinos +datatransferkit") xsdk_depends_on("datatransferkit@" + dtk7ver, when="@0.7.0 +trilinos +datatransferkit") xsdk_depends_on("petsc +batch", when="@0.7.0: ^cray-mpich") + xsdk_depends_on("petsc +sycl +kokkos", when="@1.0.0: +sycl") xsdk_depends_on( "petsc@main+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64", when="@develop", cuda_var="cuda", rocm_var="rocm", ) + xsdk_depends_on( + "petsc@3.20.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64", + when="@1.0.0", + cuda_var="cuda", + rocm_var="rocm", + ) xsdk_depends_on( "petsc@3.18.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64", when="@0.8.0", @@ -184,9 +216,14 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("dealii ~trilinos", when="~trilinos +dealii") xsdk_depends_on( "dealii@master~assimp~python~doc~gmsh+petsc+slepc+mpi~int64" - + "~netcdf+metis+sundials~ginkgo~symengine~nanoflann~simplex~arborx~cgal", + + "~netcdf+metis+sundials~ginkgo~symengine~nanoflann~simplex~arborx~cgal~oce", when="@develop +dealii", ) + xsdk_depends_on( + "dealii@9.5.1~assimp~python~doc~gmsh+petsc+slepc+mpi~int64" + + "~netcdf+metis+sundials~ginkgo~symengine~simplex~arborx~cgal~oce", + when="@1.0.0 +dealii", + ) xsdk_depends_on( "dealii@9.4.0~assimp~python~doc~gmsh+petsc+slepc+mpi~int64" + "~netcdf+metis+sundials~ginkgo~symengine~simplex~arborx~cgal", @@ -198,22 +235,31 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): when="@0.7.0 +dealii", ) - xsdk_depends_on("pflotran@develop", when="@develop") - xsdk_depends_on("pflotran@4.0.1", when="@0.8.0") - xsdk_depends_on("pflotran@3.0.2", when="@0.7.0") + xsdk_depends_on("pflotran@develop", when="@develop +pflotran") + xsdk_depends_on("pflotran@5.0.0", when="@1.0.0 +pflotran") + xsdk_depends_on("pflotran@4.0.1", when="@0.8.0 +pflotran") + xsdk_depends_on("pflotran@3.0.2", when="@0.7.0 +pflotran") xsdk_depends_on("alquimia@master", when="@develop +alquimia") + xsdk_depends_on("alquimia@1.1.0", when="@1.0.0 +alquimia") xsdk_depends_on("alquimia@1.0.10", when="@0.8.0 +alquimia") xsdk_depends_on("alquimia@1.0.9", when="@0.7.0 +alquimia") xsdk_depends_on("sundials +trilinos", when="+trilinos @0.7.0:") xsdk_depends_on("sundials +ginkgo", when="+ginkgo @0.8.0:") + xsdk_depends_on("sundials +sycl cxxstd=17", when="@1.0.0: +sycl") xsdk_depends_on( "sundials@develop~int64+hypre+petsc+superlu-dist", when="@develop", cuda_var=["cuda", "?magma"], rocm_var=["rocm", "?magma"], ) + xsdk_depends_on( + "sundials@6.6.2~int64+hypre+petsc+superlu-dist", + when="@1.0.0", + cuda_var=["cuda", "?magma"], + rocm_var=["rocm", "?magma"], + ) xsdk_depends_on( "sundials@6.4.1~int64+hypre+petsc+superlu-dist", when="@0.8.0", @@ -228,13 +274,16 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): ) xsdk_depends_on("plasma@develop:", when="@develop %gcc@6.0:") + xsdk_depends_on("plasma@23.8.2:", when="@1.0.0 %gcc@6.0:") xsdk_depends_on("plasma@22.9.29:", when="@0.8.0 %gcc@6.0:") xsdk_depends_on("plasma@21.8.29:", when="@0.7.0 %gcc@6.0:") xsdk_depends_on("magma@master", when="@develop", cuda_var="?cuda", rocm_var="?rocm") + xsdk_depends_on("magma@2.7.1", when="@1.0.0", cuda_var="?cuda", rocm_var="?rocm") xsdk_depends_on("magma@2.7.0", when="@0.8.0", cuda_var="?cuda", rocm_var="?rocm") xsdk_depends_on("magma@2.6.1", when="@0.7.0", cuda_var="?cuda", rocm_var="?rocm") + xsdk_depends_on("amrex +sycl", when="@1.0.0: +sycl") xsdk_depends_on( "amrex@develop+sundials", when="@develop %intel", cuda_var="cuda", rocm_var="rocm" ) @@ -244,6 +293,9 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on( "amrex@develop+sundials", when="@develop %cce", cuda_var="cuda", rocm_var="rocm" ) + xsdk_depends_on("amrex@23.08+sundials", when="@1.0.0 %intel", cuda_var="cuda", rocm_var="rocm") + xsdk_depends_on("amrex@23.08+sundials", when="@1.0.0 %gcc", cuda_var="cuda", rocm_var="rocm") + xsdk_depends_on("amrex@23.08+sundials", when="@1.0.0 %cce", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("amrex@22.09+sundials", when="@0.8.0 %intel", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("amrex@22.09+sundials", when="@0.8.0 %gcc", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("amrex@22.09+sundials", when="@0.8.0 %cce", cuda_var="cuda", rocm_var="rocm") @@ -252,32 +304,39 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("amrex@21.10+sundials", when="@0.7.0 %cce", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("slepc@main", when="@develop") + xsdk_depends_on("slepc@3.20.0", when="@1.0.0", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("slepc@3.18.1", when="@0.8.0", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("slepc@3.16.0", when="@0.7.0") xsdk_depends_on("omega-h +trilinos", when="+trilinos +omega-h") xsdk_depends_on("omega-h ~trilinos", when="~trilinos +omega-h") xsdk_depends_on("omega-h@main", when="@develop +omega-h") + xsdk_depends_on("omega-h@scorec.10.6.0", when="@1.0.0 +omega-h") xsdk_depends_on("omega-h@9.34.13", when="@0.8.0 +omega-h") xsdk_depends_on("omega-h@9.34.1", when="@0.7.0 +omega-h") xsdk_depends_on("strumpack ~cuda", when="~cuda @0.7.0: +strumpack") xsdk_depends_on("strumpack ~slate~openmp", when="~slate @0.8.0: +strumpack") xsdk_depends_on("strumpack@master", when="@develop +strumpack", cuda_var=["cuda"]) + xsdk_depends_on("strumpack@7.2.0", when="@1.0.0 +strumpack", cuda_var=["cuda"]) xsdk_depends_on("strumpack@7.0.1", when="@0.8.0 +strumpack", cuda_var=["cuda"]) xsdk_depends_on("strumpack@6.1.0~slate~openmp", when="@0.7.0 +strumpack") xsdk_depends_on("pumi@master+shared", when="@develop") + xsdk_depends_on("pumi@2.2.8+shared", when="@1.0.0") xsdk_depends_on("pumi@2.2.7+shared", when="@0.8.0") xsdk_depends_on("pumi@2.2.6", when="@0.7.0") tasmanian_openmp = "~openmp" if sys.platform == "darwin" else "+openmp" xsdk_depends_on( - "tasmanian@develop+xsdkflags+blas" + tasmanian_openmp, + "tasmanian@develop+blas" + tasmanian_openmp, when="@develop", cuda_var=["cuda", "?magma"], rocm_var=["rocm", "?magma"], ) + xsdk_depends_on( + "tasmanian@8.0+mpi+blas" + tasmanian_openmp, when="@1.0.0", cuda_var=["cuda", "?magma"] + ) xsdk_depends_on( "tasmanian@7.9+xsdkflags+mpi+blas" + tasmanian_openmp, when="@0.8.0", @@ -290,6 +349,8 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): ) xsdk_depends_on("arborx@master", when="@develop +arborx") + xsdk_depends_on("arborx+sycl", when="@1.0.0: +arborx +sycl") + xsdk_depends_on("arborx@1.4.1", when="@1.0.0 +arborx") xsdk_depends_on("arborx@1.2", when="@0.8.0 +arborx") xsdk_depends_on("arborx@1.1", when="@0.7.0 +arborx") @@ -302,12 +363,17 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("phist kernel_lib=tpetra", when="+trilinos +phist") xsdk_depends_on("phist kernel_lib=petsc", when="~trilinos +phist") xsdk_depends_on("phist@develop ~fortran ~scamac ~openmp ~host ~int64", when="@develop +phist") + xsdk_depends_on("phist@1.12.0 ~fortran ~scamac ~openmp ~host ~int64", when="@1.0.0 +phist") xsdk_depends_on("phist@1.11.2 ~fortran ~scamac ~openmp ~host ~int64", when="@0.8.0 +phist") xsdk_depends_on("phist@1.9.5 ~fortran ~scamac ~openmp ~host ~int64", when="@0.7.0 +phist") + xsdk_depends_on("ginkgo+sycl", when="@1.0.0: +ginkgo +sycl") xsdk_depends_on( "ginkgo@develop +mpi ~openmp", when="@develop +ginkgo", cuda_var="cuda", rocm_var="rocm" ) + xsdk_depends_on( + "ginkgo@1.7.0 +mpi ~openmp", when="@1.0.0 +ginkgo", cuda_var="cuda", rocm_var="rocm" + ) xsdk_depends_on( "ginkgo@1.5.0 +mpi ~openmp", when="@0.8.0 +ginkgo", cuda_var="cuda", rocm_var="rocm" ) @@ -317,6 +383,8 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("py-libensemble@develop+petsc4py", when="@develop +libensemble") xsdk_depends_on("py-petsc4py@main", when="@develop +libensemble") + xsdk_depends_on("py-libensemble@1.0.0+petsc4py", when="@1.0.0 +libensemble") + xsdk_depends_on("py-petsc4py@3.20.1", when="@1.0.0 +libensemble") xsdk_depends_on("py-libensemble@0.9.3+petsc4py", when="@0.8.0 +libensemble") xsdk_depends_on("py-petsc4py@3.18.1", when="@0.8.0 +libensemble") xsdk_depends_on("py-libensemble@0.8.0+petsc4py", when="@0.7.0 +libensemble") @@ -324,11 +392,13 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("precice ~petsc", when="+precice ^cray-mpich") xsdk_depends_on("precice@develop", when="@develop +precice") + xsdk_depends_on("precice@2.5.0", when="@1.0.0 +precice") xsdk_depends_on("precice@2.5.0", when="@0.8.0 +precice") xsdk_depends_on("precice@2.3.0", when="@0.7.0 +precice") bfpk_openmp = "~openmp" if sys.platform == "darwin" else "+openmp" xsdk_depends_on("butterflypack@master", when="@develop +butterflypack") + xsdk_depends_on("butterflypack@2.4.0" + bfpk_openmp, when="@1.0.0 +butterflypack") xsdk_depends_on("butterflypack@2.2.2" + bfpk_openmp, when="@0.8.0 +butterflypack") xsdk_depends_on("butterflypack@2.0.0", when="@0.7.0 +butterflypack") @@ -338,6 +408,12 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): cuda_var=["cuda", "?magma"], rocm_var=["rocm", "?magma"], ) + xsdk_depends_on( + "heffte@2.4.0+fftw", + when="@1.0.0 +heffte", + cuda_var=["cuda", "?magma"], + rocm_var=["rocm", "?magma"], + ) xsdk_depends_on( "heffte@2.3.0+fftw", when="@0.8.0 +heffte", @@ -352,15 +428,20 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): ) xsdk_depends_on("slate@master", when="@develop +slate", cuda_var="cuda") + xsdk_depends_on("slate@2023.08.25", when="@1.0.0 +slate", cuda_var="cuda") xsdk_depends_on("slate@2022.07.00", when="@0.8.0 +slate", cuda_var="cuda") xsdk_depends_on("slate@2021.05.02", when="@0.7.0 +slate %gcc@6.0:", cuda_var="cuda") xsdk_depends_on("exago@develop~ipopt~hiop~python", when="@develop +exago ~raja") xsdk_depends_on("exago@develop~ipopt+hiop+raja", when="@develop +exago +raja", cuda_var="cuda") + xsdk_depends_on("exago@1.6.0~ipopt~hiop~python", when="@1.0.0 +exago ~raja") + xsdk_depends_on("exago@1.6.0~ipopt+hiop+raja", when="@1.0.0 +exago +raja", cuda_var="cuda") xsdk_depends_on("exago@1.5.0~ipopt~hiop~python", when="@0.8.0 +exago ~raja") xsdk_depends_on("exago@1.5.0~ipopt+hiop+raja", when="@0.8.0 +exago +raja", cuda_var="cuda") xsdk_depends_on("hiop@develop", when="@develop +hiop ~raja") xsdk_depends_on("hiop@develop+raja", when="@develop +hiop +raja", cuda_var="cuda") + xsdk_depends_on("hiop@1.0.0", when="@1.0.0 +hiop ~raja") + xsdk_depends_on("hiop@1.0.0+raja", when="@1.0.0 +hiop +raja", cuda_var="cuda") xsdk_depends_on("hiop@0.7.1", when="@0.8.0 +hiop ~raja") xsdk_depends_on("hiop@0.7.1+raja", when="@0.8.0 +hiop +raja", cuda_var="cuda") diff --git a/var/spack/repos/tutorial/packages/libpspio/package.py b/var/spack/repos/tutorial/packages/libpspio/package.py new file mode 100644 index 00000000000000..9cd4e7fc4df251 --- /dev/null +++ b/var/spack/repos/tutorial/packages/libpspio/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libpspio(AutotoolsPackage): + """Library to perform I/O operations on pseudopotential data files.""" + + homepage = "https://gitlab.com/ElectronicStructureLibrary/libpspio" + url = "https://gitlab.com/ElectronicStructureLibrary/libpspio/-/archive/0.3.0/libpspio-0.3.0.tar.gz" + + maintainers("hmenke") + + license("MPL-2.0") + + version("0.3.0", sha256="4dc092457e481e5cd703eeecd87e6f17749941fe274043550c8a2557a649afc5") + + variant("fortran", default=False, description="Enable Fortran bindings") + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("m4", type="build") + depends_on("pkgconfig", type="build") + + depends_on("check") + depends_on("gsl") + + def autoreconf(self, spec, prefix): + Executable("./autogen.sh")() + + def configure_args(self): + args = self.enable_or_disable("fortran") + return args From 5411071662e58d2846b374e857ec6c25971e172e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 10:37:40 +0000 Subject: [PATCH 408/408] build(deps): bump isort in /.github/workflows/style Bumps [isort](https://github.com/pycqa/isort) from 5.12.0 to 5.13.2. - [Release notes](https://github.com/pycqa/isort/releases) - [Changelog](https://github.com/PyCQA/isort/blob/main/CHANGELOG.md) - [Commits](https://github.com/pycqa/isort/compare/5.12.0...5.13.2) --- updated-dependencies: - dependency-name: isort dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/style/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/style/requirements.txt b/.github/workflows/style/requirements.txt index aadcd83c09fd32..d922745ea06e44 100644 --- a/.github/workflows/style/requirements.txt +++ b/.github/workflows/style/requirements.txt @@ -1,7 +1,7 @@ black==23.11.0 clingo==5.6.2 flake8==6.1.0 -isort==5.12.0 +isort==5.13.2 mypy==1.6.1 types-six==1.16.21.9 vermin==1.5.2