repo_name
stringlengths
6
97
path
stringlengths
3
341
text
stringlengths
8
1.02M
player1537-forks/spack
var/spack/repos/builtin/packages/libunistring/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Libunistring(AutotoolsPackage, GNUMirrorPackage): """This library provides functions for manipulating Unicode strings and for manipulating C strings according to the Unicode standard.""" homepage = "https://www.gnu.org/software/libunistring/" gnu_mirror_path = "libunistring/libunistring-0.9.10.tar.xz" git = 'https://git.savannah.gnu.org/git/libunistring.git' maintainers = ['bernhardkaindl'] version('master', branch='master') version('0.9.10', sha256='eb8fb2c3e4b6e2d336608377050892b54c3c983b646c561836550863003c05d7') version('0.9.9', sha256='a4d993ecfce16cf503ff7579f5da64619cee66226fb3b998dafb706190d9a833') version('0.9.8', sha256='7b9338cf52706facb2e18587dceda2fbc4a2a3519efa1e15a3f2a68193942f80') version('0.9.7', sha256='2e3764512aaf2ce598af5a38818c0ea23dedf1ff5460070d1b6cee5c3336e797') version('0.9.6', sha256='2df42eae46743e3f91201bf5c100041540a7704e8b9abfd57c972b2d544de41b') depends_on('iconv') with when('@master'): depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('texinfo', type='build') depends_on('gperf', type='build') # glibc 2.28+ removed libio.h and thus _IO_ftrylockfile patch('removed_libio.patch', when='@:0.9.9') @when('@0.9.10') def patch(self): # Applies upstream fix for testcase: pragma weak conflicts with --as-needed # https://bugs.gentoo.org/688464#c9 (this links to all further info) filter_file('# pragma weak pthread_create', '', 'tests/glthread/thread.h') @when('@master') def autoreconf(self, spec, prefix): which('./gitsub.sh')('pull') which('./autogen.sh')()
player1537-forks/spack
var/spack/repos/builtin.mock/packages/dep-with-variants/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class DepWithVariants(Package): """Package that has a variant which adds a dependency forced to use non default values. """ homepage = "https://dev.null" version('1.0') variant('foo', default=False, description='nope') variant('bar', default=False, description='nope') variant('baz', default=False, description='nope')
player1537-forks/spack
var/spack/repos/builtin/packages/py-radiant-mlhub/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyRadiantMlhub(PythonPackage): """A Python client for Radiant MLHub.""" homepage = "https://github.com/radiantearth/radiant-mlhub" pypi = "radiant-mlhub/radiant_mlhub-0.2.1.tar.gz" maintainers = ['adamjstewart'] version('0.2.1', sha256='75a2f096b09a87191238fe557dc64dda8c44156351b4026c784c848c7d84b6fb') depends_on('python@3.6:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-requests@2.25.1:2.25', type=('build', 'run')) depends_on('py-pystac@0.5.4', type=('build', 'run')) depends_on('py-click@7.1.2:7.1', type=('build', 'run')) depends_on('py-tqdm@4.56.0:4.56', type=('build', 'run'))
player1537-forks/spack
lib/spack/spack/util/unparse/unparser.py
# Copyright (c) 2014-2021, <NAME> and Spack Project Developers. # # SPDX-License-Identifier: Python-2.0 "Usage: unparse.py <path to source file>" from __future__ import print_function, unicode_literals import ast import sys from contextlib import contextmanager import six from six import StringIO # TODO: if we require Python 3.7, use its `nullcontext()` @contextmanager def nullcontext(): yield # Large float and imaginary literals get turned into infinities in the AST. # We unparse those infinities to INFSTR. INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1) class _Precedence: """Precedence table that originated from python grammar.""" TUPLE = 0 YIELD = 1 # 'yield', 'yield from' TEST = 2 # 'if'-'else', 'lambda' OR = 3 # 'or' AND = 4 # 'and' NOT = 5 # 'not' CMP = 6 # '<', '>', '==', '>=', '<=', '!=', 'in', 'not in', 'is', 'is not' EXPR = 7 BOR = EXPR # '|' BXOR = 8 # '^' BAND = 9 # '&' SHIFT = 10 # '<<', '>>' ARITH = 11 # '+', '-' TERM = 12 # '*', '@', '/', '%', '//' FACTOR = 13 # unary '+', '-', '~' POWER = 14 # '**' AWAIT = 15 # 'await' ATOM = 16 def pnext(precedence): return min(precedence + 1, _Precedence.ATOM) def interleave(inter, f, seq): """Call f on each item in seq, calling inter() in between. """ seq = iter(seq) try: f(next(seq)) except StopIteration: pass else: for x in seq: inter() f(x) _SINGLE_QUOTES = ("'", '"') _MULTI_QUOTES = ('"""', "'''") _ALL_QUOTES = _SINGLE_QUOTES + _MULTI_QUOTES def is_simple_tuple(slice_value): # when unparsing a non-empty tuple, the parantheses can be safely # omitted if there aren't any elements that explicitly requires # parantheses (such as starred expressions). return ( isinstance(slice_value, ast.Tuple) and slice_value.elts and ( # Python 2 doesn't allow starred elements in tuples like Python 3 six.PY2 or not any( isinstance(elt, ast.Starred) for elt in slice_value.elts ) ) ) class Unparser: """Methods in this class recursively traverse an AST and output source code for the abstract syntax; original formatting is disregarded. """ def __init__(self, py_ver_consistent=False, _avoid_backslashes=False): """Traverse an AST and generate its source. Arguments: py_ver_consistent (bool): if True, generate unparsed code that is consistent between Python 2.7 and 3.5-3.10. Consistency is achieved by: 1. Ensuring that *args and **kwargs are always the last arguments, regardless of the python version, because Python 2's AST does not have sufficient information to reconstruct star-arg order. 2. Always unparsing print as a function. 3. Unparsing Python3 unicode literals the way Python 2 would. Without these changes, the same source can generate different code for Python 2 and Python 3, depending on subtle AST differences. The first of these two causes this module to behave differently from Python 3.8+'s `ast.unparse()` One place where single source will generate an inconsistent AST is with multi-argument print statements, e.g.:: print("foo", "bar", "baz") In Python 2, this prints a tuple; in Python 3, it is the print function with multiple arguments. Use ``from __future__ import print_function`` to avoid this inconsistency. """ self.future_imports = [] self._indent = 0 self._py_ver_consistent = py_ver_consistent self._precedences = {} self._avoid_backslashes = _avoid_backslashes def items_view(self, traverser, items): """Traverse and separate the given *items* with a comma and append it to the buffer. If *items* is a single item sequence, a trailing comma will be added.""" if len(items) == 1: traverser(items[0]) self.write(",") else: interleave(lambda: self.write(", "), traverser, items) def visit(self, tree, output_file): """Traverse tree and write source code to output_file.""" self.f = output_file self.dispatch(tree) self.f.flush() def fill(self, text=""): "Indent a piece of text, according to the current indentation level" self.f.write("\n" + " " * self._indent + text) def write(self, text): "Append a piece of text to the current line." self.f.write(six.text_type(text)) class _Block: """A context manager for preparing the source for blocks. It adds the character ':', increases the indentation on enter and decreases the indentation on exit.""" def __init__(self, unparser): self.unparser = unparser def __enter__(self): self.unparser.write(":") self.unparser._indent += 1 def __exit__(self, exc_type, exc_value, traceback): self.unparser._indent -= 1 def block(self): return self._Block(self) @contextmanager def delimit(self, start, end): """A context manager for preparing the source for expressions. It adds *start* to the buffer and enters, after exit it adds *end*.""" self.write(start) yield self.write(end) def delimit_if(self, start, end, condition): if condition: return self.delimit(start, end) else: return nullcontext() def require_parens(self, precedence, node): """Shortcut to adding precedence related parens""" return self.delimit_if("(", ")", self.get_precedence(node) > precedence) def get_precedence(self, node): return self._precedences.get(node, _Precedence.TEST) def set_precedence(self, precedence, *nodes): for node in nodes: self._precedences[node] = precedence def dispatch(self, tree): "Dispatcher function, dispatching tree type T to method _T." if isinstance(tree, list): for node in tree: self.dispatch(node) return meth = getattr(self, "visit_" + tree.__class__.__name__) meth(tree) # # Unparsing methods # # There should be one method per concrete grammar type Constructors # should be # grouped by sum type. Ideally, this would follow the order # in the grammar, but currently doesn't. def visit_Module(self, tree): for stmt in tree.body: self.dispatch(stmt) def visit_Interactive(self, tree): for stmt in tree.body: self.dispatch(stmt) def visit_Expression(self, tree): self.dispatch(tree.body) # stmt def visit_Expr(self, tree): self.fill() self.set_precedence(_Precedence.YIELD, tree.value) self.dispatch(tree.value) def visit_NamedExpr(self, tree): with self.require_parens(_Precedence.TUPLE, tree): self.set_precedence(_Precedence.ATOM, tree.target, tree.value) self.dispatch(tree.target) self.write(" := ") self.dispatch(tree.value) def visit_Import(self, node): self.fill("import ") interleave(lambda: self.write(", "), self.dispatch, node.names) def visit_ImportFrom(self, node): # A from __future__ import may affect unparsing, so record it. if node.module and node.module == '__future__': self.future_imports.extend(n.name for n in node.names) self.fill("from ") self.write("." * node.level) if node.module: self.write(node.module) self.write(" import ") interleave(lambda: self.write(", "), self.dispatch, node.names) def visit_Assign(self, node): self.fill() for target in node.targets: self.dispatch(target) self.write(" = ") self.dispatch(node.value) def visit_AugAssign(self, node): self.fill() self.dispatch(node.target) self.write(" " + self.binop[node.op.__class__.__name__] + "= ") self.dispatch(node.value) def visit_AnnAssign(self, node): self.fill() with self.delimit_if( "(", ")", not node.simple and isinstance(node.target, ast.Name)): self.dispatch(node.target) self.write(": ") self.dispatch(node.annotation) if node.value: self.write(" = ") self.dispatch(node.value) def visit_Return(self, node): self.fill("return") if node.value: self.write(" ") self.dispatch(node.value) def visit_Pass(self, node): self.fill("pass") def visit_Break(self, node): self.fill("break") def visit_Continue(self, node): self.fill("continue") def visit_Delete(self, node): self.fill("del ") interleave(lambda: self.write(", "), self.dispatch, node.targets) def visit_Assert(self, node): self.fill("assert ") self.dispatch(node.test) if node.msg: self.write(", ") self.dispatch(node.msg) def visit_Exec(self, node): self.fill("exec ") self.dispatch(node.body) if node.globals: self.write(" in ") self.dispatch(node.globals) if node.locals: self.write(", ") self.dispatch(node.locals) def visit_Print(self, node): # Use print function so that python 2 unparsing is consistent with 3 if self._py_ver_consistent: self.fill("print") with self.delimit("(", ")"): values = node.values # Can't tell print(foo, bar, baz) and print((foo, bar, baz)) apart in # python 2 and 3, so treat them the same to make hashes consistent. # Single-tuple print are rare and unlikely to affect package hashes, # esp. as they likely print to stdout. if len(values) == 1 and isinstance(values[0], ast.Tuple): values = node.values[0].elts do_comma = False for e in values: if do_comma: self.write(", ") else: do_comma = True self.dispatch(e) if not node.nl: if do_comma: self.write(", ") else: do_comma = True self.write("end=''") if node.dest: if do_comma: self.write(", ") else: do_comma = True self.write("file=") self.dispatch(node.dest) else: # unparse Python 2 print statements self.fill("print ") do_comma = False if node.dest: self.write(">>") self.dispatch(node.dest) do_comma = True for e in node.values: if do_comma: self.write(", ") else: do_comma = True self.dispatch(e) if not node.nl: self.write(",") def visit_Global(self, node): self.fill("global ") interleave(lambda: self.write(", "), self.write, node.names) def visit_Nonlocal(self, node): self.fill("nonlocal ") interleave(lambda: self.write(", "), self.write, node.names) def visit_Await(self, node): with self.require_parens(_Precedence.AWAIT, node): self.write("await") if node.value: self.write(" ") self.set_precedence(_Precedence.ATOM, node.value) self.dispatch(node.value) def visit_Yield(self, node): with self.require_parens(_Precedence.YIELD, node): self.write("yield") if node.value: self.write(" ") self.set_precedence(_Precedence.ATOM, node.value) self.dispatch(node.value) def visit_YieldFrom(self, node): with self.require_parens(_Precedence.YIELD, node): self.write("yield from") if node.value: self.write(" ") self.set_precedence(_Precedence.ATOM, node.value) self.dispatch(node.value) def visit_Raise(self, node): self.fill("raise") if six.PY3: if not node.exc: assert not node.cause return self.write(" ") self.dispatch(node.exc) if node.cause: self.write(" from ") self.dispatch(node.cause) else: self.write(" ") if node.type: self.dispatch(node.type) if node.inst: self.write(", ") self.dispatch(node.inst) if node.tback: self.write(", ") self.dispatch(node.tback) def visit_Try(self, node): self.fill("try") with self.block(): self.dispatch(node.body) for ex in node.handlers: self.dispatch(ex) if node.orelse: self.fill("else") with self.block(): self.dispatch(node.orelse) if node.finalbody: self.fill("finally") with self.block(): self.dispatch(node.finalbody) def visit_TryExcept(self, node): self.fill("try") with self.block(): self.dispatch(node.body) for ex in node.handlers: self.dispatch(ex) if node.orelse: self.fill("else") with self.block(): self.dispatch(node.orelse) def visit_TryFinally(self, node): if len(node.body) == 1 and isinstance(node.body[0], ast.TryExcept): # try-except-finally self.dispatch(node.body) else: self.fill("try") with self.block(): self.dispatch(node.body) self.fill("finally") with self.block(): self.dispatch(node.finalbody) def visit_ExceptHandler(self, node): self.fill("except") if node.type: self.write(" ") self.dispatch(node.type) if node.name: self.write(" as ") if six.PY3: self.write(node.name) else: self.dispatch(node.name) with self.block(): self.dispatch(node.body) def visit_ClassDef(self, node): self.write("\n") for deco in node.decorator_list: self.fill("@") self.dispatch(deco) self.fill("class " + node.name) if six.PY3: with self.delimit_if("(", ")", condition=node.bases or node.keywords): comma = False for e in node.bases: if comma: self.write(", ") else: comma = True self.dispatch(e) for e in node.keywords: if comma: self.write(", ") else: comma = True self.dispatch(e) if sys.version_info[:2] < (3, 5): if node.starargs: if comma: self.write(", ") else: comma = True self.write("*") self.dispatch(node.starargs) if node.kwargs: if comma: self.write(", ") else: comma = True self.write("**") self.dispatch(node.kwargs) elif node.bases: with self.delimit("(", ")"): for a in node.bases[:-1]: self.dispatch(a) self.write(", ") self.dispatch(node.bases[-1]) with self.block(): self.dispatch(node.body) def visit_FunctionDef(self, node): self.__FunctionDef_helper(node, "def") def visit_AsyncFunctionDef(self, node): self.__FunctionDef_helper(node, "async def") def __FunctionDef_helper(self, node, fill_suffix): self.write("\n") for deco in node.decorator_list: self.fill("@") self.dispatch(deco) def_str = fill_suffix + " " + node.name self.fill(def_str) with self.delimit("(", ")"): self.dispatch(node.args) if getattr(node, "returns", False): self.write(" -> ") self.dispatch(node.returns) with self.block(): self.dispatch(node.body) def visit_For(self, node): self.__For_helper("for ", node) def visit_AsyncFor(self, node): self.__For_helper("async for ", node) def __For_helper(self, fill, node): self.fill(fill) self.dispatch(node.target) self.write(" in ") self.dispatch(node.iter) with self.block(): self.dispatch(node.body) if node.orelse: self.fill("else") with self.block(): self.dispatch(node.orelse) def visit_If(self, node): self.fill("if ") self.dispatch(node.test) with self.block(): self.dispatch(node.body) # collapse nested ifs into equivalent elifs. while (node.orelse and len(node.orelse) == 1 and isinstance(node.orelse[0], ast.If)): node = node.orelse[0] self.fill("elif ") self.dispatch(node.test) with self.block(): self.dispatch(node.body) # final else if node.orelse: self.fill("else") with self.block(): self.dispatch(node.orelse) def visit_While(self, node): self.fill("while ") self.dispatch(node.test) with self.block(): self.dispatch(node.body) if node.orelse: self.fill("else") with self.block(): self.dispatch(node.orelse) def _generic_With(self, node, async_=False): self.fill("async with " if async_ else "with ") if hasattr(node, 'items'): interleave(lambda: self.write(", "), self.dispatch, node.items) else: self.dispatch(node.context_expr) if node.optional_vars: self.write(" as ") self.dispatch(node.optional_vars) with self.block(): self.dispatch(node.body) def visit_With(self, node): self._generic_With(node) def visit_AsyncWith(self, node): self._generic_With(node, async_=True) def _str_literal_helper( self, string, quote_types=_ALL_QUOTES, escape_special_whitespace=False ): """Helper for writing string literals, minimizing escapes. Returns the tuple (string literal to write, possible quote types). """ def escape_char(c): # \n and \t are non-printable, but we only escape them if # escape_special_whitespace is True if not escape_special_whitespace and c in "\n\t": return c # Always escape backslashes and other non-printable characters if c == "\\" or not c.isprintable(): return c.encode("unicode_escape").decode("ascii") return c escaped_string = "".join(map(escape_char, string)) possible_quotes = quote_types if "\n" in escaped_string: possible_quotes = [q for q in possible_quotes if q in _MULTI_QUOTES] possible_quotes = [q for q in possible_quotes if q not in escaped_string] if not possible_quotes: # If there aren't any possible_quotes, fallback to using repr # on the original string. Try to use a quote from quote_types, # e.g., so that we use triple quotes for docstrings. string = repr(string) quote = next((q for q in quote_types if string[0] in q), string[0]) return string[1:-1], [quote] if escaped_string: # Sort so that we prefer '''"''' over """\"""" possible_quotes.sort(key=lambda q: q[0] == escaped_string[-1]) # If we're using triple quotes and we'd need to escape a final # quote, escape it if possible_quotes[0][0] == escaped_string[-1]: assert len(possible_quotes[0]) == 3 escaped_string = escaped_string[:-1] + "\\" + escaped_string[-1] return escaped_string, possible_quotes def _write_str_avoiding_backslashes(self, string, quote_types=_ALL_QUOTES): """Write string literal value w/a best effort attempt to avoid backslashes.""" string, quote_types = self._str_literal_helper(string, quote_types=quote_types) quote_type = quote_types[0] self.write("{quote_type}{string}{quote_type}".format( quote_type=quote_type, string=string, )) # expr def visit_Bytes(self, node): self.write(repr(node.s)) def visit_Str(self, tree): if six.PY3: # Python 3.5, 3.6, and 3.7 can't tell if something was written as a # unicode constant. Try to make that consistent with 'u' for '\u- literals if self._py_ver_consistent and repr(tree.s).startswith("'\\u"): self.write("u") self._write_constant(tree.s) elif self._py_ver_consistent: self.write(repr(tree.s)) # just do a python 2 repr for consistency else: # if from __future__ import unicode_literals is in effect, # then we want to output string literals using a 'b' prefix # and unicode literals with no prefix. if "unicode_literals" not in self.future_imports: self.write(repr(tree.s)) elif isinstance(tree.s, str): self.write("b" + repr(tree.s)) elif isinstance(tree.s, unicode): # noqa self.write(repr(tree.s).lstrip("u")) else: assert False, "shouldn't get here" def visit_JoinedStr(self, node): # JoinedStr(expr* values) self.write("f") if self._avoid_backslashes: string = StringIO() self._fstring_JoinedStr(node, string.write) self._write_str_avoiding_backslashes(string.getvalue()) return # If we don't need to avoid backslashes globally (i.e., we only need # to avoid them inside FormattedValues), it's cosmetically preferred # to use escaped whitespace. That is, it's preferred to use backslashes # for cases like: f"{x}\n". To accomplish this, we keep track of what # in our buffer corresponds to FormattedValues and what corresponds to # Constant parts of the f-string, and allow escapes accordingly. buffer = [] for value in node.values: meth = getattr(self, "_fstring_" + type(value).__name__) string = StringIO() meth(value, string.write) buffer.append((string.getvalue(), isinstance(value, ast.Constant))) new_buffer = [] quote_types = _ALL_QUOTES for value, is_constant in buffer: # Repeatedly narrow down the list of possible quote_types value, quote_types = self._str_literal_helper( value, quote_types=quote_types, escape_special_whitespace=is_constant ) new_buffer.append(value) value = "".join(new_buffer) quote_type = quote_types[0] self.write("{quote_type}{value}{quote_type}".format( quote_type=quote_type, value=value, )) def visit_FormattedValue(self, node): # FormattedValue(expr value, int? conversion, expr? format_spec) self.write("f") string = StringIO() self._fstring_JoinedStr(node, string.write) self._write_str_avoiding_backslashes(string.getvalue()) def _fstring_JoinedStr(self, node, write): for value in node.values: print(" ", value) meth = getattr(self, "_fstring_" + type(value).__name__) print(meth) meth(value, write) def _fstring_Str(self, node, write): value = node.s.replace("{", "{{").replace("}", "}}") write(value) def _fstring_Constant(self, node, write): assert isinstance(node.value, str) value = node.value.replace("{", "{{").replace("}", "}}") write(value) def _fstring_FormattedValue(self, node, write): write("{") expr = StringIO() unparser = type(self)( py_ver_consistent=self._py_ver_consistent, _avoid_backslashes=True, ) unparser.set_precedence(pnext(_Precedence.TEST), node.value) unparser.visit(node.value, expr) expr = expr.getvalue().rstrip("\n") if expr.startswith("{"): write(" ") # Separate pair of opening brackets as "{ {" if "\\" in expr: raise ValueError("Unable to avoid backslash in f-string expression part") write(expr) if node.conversion != -1: conversion = chr(node.conversion) assert conversion in "sra" write("!{conversion}".format(conversion=conversion)) if node.format_spec: write(":") meth = getattr(self, "_fstring_" + type(node.format_spec).__name__) meth(node.format_spec, write) write("}") def visit_Name(self, node): self.write(node.id) def visit_NameConstant(self, node): self.write(repr(node.value)) def visit_Repr(self, node): self.write("`") self.dispatch(node.value) self.write("`") def _write_constant(self, value): if isinstance(value, (float, complex)): # Substitute overflowing decimal literal for AST infinities. self.write(repr(value).replace("inf", INFSTR)) elif isinstance(value, str) and self._py_ver_consistent: # emulate a python 2 repr with raw unicode escapes # see _Str for python 2 counterpart raw = repr(value.encode("raw_unicode_escape")).lstrip('b') if raw.startswith(r"'\\u"): raw = "'\\" + raw[3:] self.write(raw) elif self._avoid_backslashes and isinstance(value, str): self._write_str_avoiding_backslashes(value) else: self.write(repr(value)) def visit_Constant(self, node): value = node.value if isinstance(value, tuple): with self.delimit("(", ")"): self.items_view(self._write_constant, value) elif value is Ellipsis: # instead of `...` for Py2 compatibility self.write("...") else: if node.kind == "u": self.write("u") self._write_constant(node.value) def visit_Num(self, node): repr_n = repr(node.n) if six.PY3: self.write(repr_n.replace("inf", INFSTR)) else: # Parenthesize negative numbers, to avoid turning (-1)**2 into -1**2. with self.require_parens(pnext(_Precedence.FACTOR), node): if "inf" in repr_n and repr_n.endswith("*j"): repr_n = repr_n.replace("*j", "j") # Substitute overflowing decimal literal for AST infinities. self.write(repr_n.replace("inf", INFSTR)) def visit_List(self, node): with self.delimit("[", "]"): interleave(lambda: self.write(", "), self.dispatch, node.elts) def visit_ListComp(self, node): with self.delimit("[", "]"): self.dispatch(node.elt) for gen in node.generators: self.dispatch(gen) def visit_GeneratorExp(self, node): with self.delimit("(", ")"): self.dispatch(node.elt) for gen in node.generators: self.dispatch(gen) def visit_SetComp(self, node): with self.delimit("{", "}"): self.dispatch(node.elt) for gen in node.generators: self.dispatch(gen) def visit_DictComp(self, node): with self.delimit("{", "}"): self.dispatch(node.key) self.write(": ") self.dispatch(node.value) for gen in node.generators: self.dispatch(gen) def visit_comprehension(self, node): if getattr(node, 'is_async', False): self.write(" async for ") else: self.write(" for ") self.set_precedence(_Precedence.TUPLE, node.target) self.dispatch(node.target) self.write(" in ") self.set_precedence(pnext(_Precedence.TEST), node.iter, *node.ifs) self.dispatch(node.iter) for if_clause in node.ifs: self.write(" if ") self.dispatch(if_clause) def visit_IfExp(self, node): with self.require_parens(_Precedence.TEST, node): self.set_precedence(pnext(_Precedence.TEST), node.body, node.test) self.dispatch(node.body) self.write(" if ") self.dispatch(node.test) self.write(" else ") self.set_precedence(_Precedence.TEST, node.orelse) self.dispatch(node.orelse) def visit_Set(self, node): assert(node.elts) # should be at least one element with self.delimit("{", "}"): interleave(lambda: self.write(", "), self.dispatch, node.elts) def visit_Dict(self, node): def write_key_value_pair(k, v): self.dispatch(k) self.write(": ") self.dispatch(v) def write_item(item): k, v = item if k is None: # for dictionary unpacking operator in dicts {**{'y': 2}} # see PEP 448 for details self.write("**") self.set_precedence(_Precedence.EXPR, v) self.dispatch(v) else: write_key_value_pair(k, v) with self.delimit("{", "}"): interleave( lambda: self.write(", "), write_item, zip(node.keys, node.values) ) def visit_Tuple(self, node): with self.delimit("(", ")"): self.items_view(self.dispatch, node.elts) unop = { "Invert": "~", "Not": "not", "UAdd": "+", "USub": "-" } unop_precedence = { "~": _Precedence.FACTOR, "not": _Precedence.NOT, "+": _Precedence.FACTOR, "-": _Precedence.FACTOR, } def visit_UnaryOp(self, node): operator = self.unop[node.op.__class__.__name__] operator_precedence = self.unop_precedence[operator] with self.require_parens(operator_precedence, node): self.write(operator) # factor prefixes (+, -, ~) shouldn't be separated # from the value they belong, (e.g: +1 instead of + 1) if operator_precedence != _Precedence.FACTOR: self.write(" ") self.set_precedence(operator_precedence, node.operand) if (six.PY2 and isinstance(node.op, ast.USub) and isinstance(node.operand, ast.Num)): # If we're applying unary minus to a number, parenthesize the number. # This is necessary: -2147483648 is different from -(2147483648) on # a 32-bit machine (the first is an int, the second a long), and # -7j is different from -(7j). (The first has real part 0.0, the second # has real part -0.0.) with self.delimit("(", ")"): self.dispatch(node.operand) else: self.dispatch(node.operand) binop = { "Add": "+", "Sub": "-", "Mult": "*", "MatMult": "@", "Div": "/", "Mod": "%", "LShift": "<<", "RShift": ">>", "BitOr": "|", "BitXor": "^", "BitAnd": "&", "FloorDiv": "//", "Pow": "**", } binop_precedence = { "+": _Precedence.ARITH, "-": _Precedence.ARITH, "*": _Precedence.TERM, "@": _Precedence.TERM, "/": _Precedence.TERM, "%": _Precedence.TERM, "<<": _Precedence.SHIFT, ">>": _Precedence.SHIFT, "|": _Precedence.BOR, "^": _Precedence.BXOR, "&": _Precedence.BAND, "//": _Precedence.TERM, "**": _Precedence.POWER, } binop_rassoc = frozenset(("**",)) def visit_BinOp(self, node): operator = self.binop[node.op.__class__.__name__] operator_precedence = self.binop_precedence[operator] with self.require_parens(operator_precedence, node): if operator in self.binop_rassoc: left_precedence = pnext(operator_precedence) right_precedence = operator_precedence else: left_precedence = operator_precedence right_precedence = pnext(operator_precedence) self.set_precedence(left_precedence, node.left) self.dispatch(node.left) self.write(" %s " % operator) self.set_precedence(right_precedence, node.right) self.dispatch(node.right) cmpops = { "Eq": "==", "NotEq": "!=", "Lt": "<", "LtE": "<=", "Gt": ">", "GtE": ">=", "Is": "is", "IsNot": "is not", "In": "in", "NotIn": "not in", } def visit_Compare(self, node): with self.require_parens(_Precedence.CMP, node): self.set_precedence(pnext(_Precedence.CMP), node.left, *node.comparators) self.dispatch(node.left) for o, e in zip(node.ops, node.comparators): self.write(" " + self.cmpops[o.__class__.__name__] + " ") self.dispatch(e) boolops = { "And": "and", "Or": "or", } boolop_precedence = { "and": _Precedence.AND, "or": _Precedence.OR, } def visit_BoolOp(self, node): operator = self.boolops[node.op.__class__.__name__] # use a dict instead of nonlocal for Python 2 compatibility op = {"precedence": self.boolop_precedence[operator]} def increasing_level_dispatch(node): op["precedence"] = pnext(op["precedence"]) self.set_precedence(op["precedence"], node) self.dispatch(node) with self.require_parens(op["precedence"], node): s = " %s " % operator interleave(lambda: self.write(s), increasing_level_dispatch, node.values) def visit_Attribute(self, node): self.set_precedence(_Precedence.ATOM, node.value) self.dispatch(node.value) # Special case: 3.__abs__() is a syntax error, so if node.value # is an integer literal then we need to either parenthesize # it or add an extra space to get 3 .__abs__(). num_type = getattr(ast, 'Constant', getattr(ast, 'Num', None)) if isinstance(node.value, num_type) and isinstance(node.value.n, int): self.write(" ") self.write(".") self.write(node.attr) def visit_Call(self, node): self.set_precedence(_Precedence.ATOM, node.func) args = node.args if self._py_ver_consistent: # make print(a, b, c) and print((a, b, c)) equivalent, since you can't # tell them apart between Python 2 and 3. See _Print() for more details. if getattr(node.func, "id", None) == "print": if len(node.args) == 1 and isinstance(node.args[0], ast.Tuple): args = node.args[0].elts self.dispatch(node.func) with self.delimit("(", ")"): comma = False # starred arguments last in Python 3.5+, for consistency w/earlier versions star_and_kwargs = [] move_stars_last = sys.version_info[:2] >= (3, 5) for e in args: if move_stars_last and isinstance(e, ast.Starred): star_and_kwargs.append(e) else: if comma: self.write(", ") else: comma = True self.dispatch(e) for e in node.keywords: # starting from Python 3.5 this denotes a kwargs part of the invocation if e.arg is None and move_stars_last: star_and_kwargs.append(e) else: if comma: self.write(", ") else: comma = True self.dispatch(e) if move_stars_last: for e in star_and_kwargs: if comma: self.write(", ") else: comma = True self.dispatch(e) if sys.version_info[:2] < (3, 5): if node.starargs: if comma: self.write(", ") else: comma = True self.write("*") self.dispatch(node.starargs) if node.kwargs: if comma: self.write(", ") else: comma = True self.write("**") self.dispatch(node.kwargs) def visit_Subscript(self, node): self.set_precedence(_Precedence.ATOM, node.value) self.dispatch(node.value) with self.delimit("[", "]"): if is_simple_tuple(node.slice): self.items_view(self.dispatch, node.slice.elts) else: self.dispatch(node.slice) def visit_Starred(self, node): self.write("*") self.set_precedence(_Precedence.EXPR, node.value) self.dispatch(node.value) # slice def visit_Ellipsis(self, node): self.write("...") # used in Python <= 3.8 -- see _Subscript for 3.9+ def visit_Index(self, node): if is_simple_tuple(node.value): self.set_precedence(_Precedence.ATOM, node.value) self.items_view(self.dispatch, node.value.elts) else: self.set_precedence(_Precedence.TUPLE, node.value) self.dispatch(node.value) def visit_Slice(self, node): if node.lower: self.dispatch(node.lower) self.write(":") if node.upper: self.dispatch(node.upper) if node.step: self.write(":") self.dispatch(node.step) def visit_ExtSlice(self, node): interleave(lambda: self.write(', '), self.dispatch, node.dims) # argument def visit_arg(self, node): self.write(node.arg) if node.annotation: self.write(": ") self.dispatch(node.annotation) # others def visit_arguments(self, node): first = True # normal arguments all_args = getattr(node, 'posonlyargs', []) + node.args defaults = [None] * (len(all_args) - len(node.defaults)) + node.defaults for index, elements in enumerate(zip(all_args, defaults), 1): a, d = elements if first: first = False else: self.write(", ") self.dispatch(a) if d: self.write("=") self.dispatch(d) if index == len(getattr(node, 'posonlyargs', ())): self.write(", /") # varargs, or bare '*' if no varargs but keyword-only arguments present if node.vararg or getattr(node, "kwonlyargs", False): if first: first = False else: self.write(", ") self.write("*") if node.vararg: if hasattr(node.vararg, 'arg'): self.write(node.vararg.arg) if node.vararg.annotation: self.write(": ") self.dispatch(node.vararg.annotation) else: self.write(node.vararg) if getattr(node, 'varargannotation', None): self.write(": ") self.dispatch(node.varargannotation) # keyword-only arguments if getattr(node, "kwonlyargs", False): for a, d in zip(node.kwonlyargs, node.kw_defaults): if first: first = False else: self.write(", ") self.dispatch(a), if d: self.write("=") self.dispatch(d) # kwargs if node.kwarg: if first: first = False else: self.write(", ") if hasattr(node.kwarg, 'arg'): self.write("**" + node.kwarg.arg) if node.kwarg.annotation: self.write(": ") self.dispatch(node.kwarg.annotation) else: self.write("**" + node.kwarg) if getattr(node, 'kwargannotation', None): self.write(": ") self.dispatch(node.kwargannotation) def visit_keyword(self, node): if node.arg is None: # starting from Python 3.5 this denotes a kwargs part of the invocation self.write("**") else: self.write(node.arg) self.write("=") self.dispatch(node.value) def visit_Lambda(self, node): with self.require_parens(_Precedence.TEST, node): self.write("lambda ") self.dispatch(node.args) self.write(": ") self.set_precedence(_Precedence.TEST, node.body) self.dispatch(node.body) def visit_alias(self, node): self.write(node.name) if node.asname: self.write(" as " + node.asname) def visit_withitem(self, node): self.dispatch(node.context_expr) if node.optional_vars: self.write(" as ") self.dispatch(node.optional_vars)
player1537-forks/spack
var/spack/repos/builtin/packages/r-corhmm/package.py
<reponame>player1537-forks/spack<filename>var/spack/repos/builtin/packages/r-corhmm/package.py<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RCorhmm(RPackage): """Hidden Markov Models of Character Evolution. Fits hidden Markov models of discrete character evolution which allow different transition rate classes on different portions of a phylogeny. Beaulieu et al (2013) <doi:10.1093/sysbio/syt034>.""" cran = "corHMM" version('2.7', sha256='0d54ba0f6b3f884343bcc26919d8febc05efb0b739cb962d3072ca0bc0ce270a') version('2.6', sha256='726de9707ede8ef447915171a3abe1003a0e42fe8e17eb440442cac9adf8cdcf') version('1.22', sha256='d262fa1183eab32087afb70f1789fabae6fb49bec01d627974c54a088a48b10d') depends_on('r-ape', type=('build', 'run')) depends_on('r-nloptr', type=('build', 'run')) depends_on('r-gensa', type=('build', 'run')) depends_on('r-expm', type=('build', 'run')) depends_on('r-numderiv', type=('build', 'run')) depends_on('r-corpcor', type=('build', 'run')) depends_on('r-mass', type=('build', 'run'), when='@2.6:') depends_on('r-nnet', type=('build', 'run')) depends_on('r-phangorn', type=('build', 'run')) depends_on('r-viridis', type=('build', 'run'), when='@2.6:') depends_on('r-rmpfr', type=('build', 'run')) depends_on('r-igraph', type=('build', 'run'), when='@2.6:') depends_on('r-phytools', type=('build', 'run'), when='@2.6:')
player1537-forks/spack
var/spack/repos/builtin/packages/stacks/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Stacks(AutotoolsPackage): """Stacks is a software pipeline for building loci from short-read sequences, such as those generated on the Illumina platform.""" homepage = "https://catchenlab.life.illinois.edu/stacks/" url = "https://catchenlab.life.illinois.edu/stacks/source/stacks-1.46.tar.gz" version('2.53', sha256='ee1efceaeeeb7a39f0c2e804ad7c0a003094db28c9101120c38ddb02846e90fd') version('2.3b', sha256='a46786d8811a730ebcdc17891e89f50d4f4ae196734439dac86091f45c92ac72') version('1.46', sha256='45a0725483dc0c0856ad6b1f918e65d91c1f0fe7d8bf209f76b93f85c29ea28a') variant('sparsehash', default=True, description='Improve Stacks memory usage with SparseHash') depends_on('perl', type=('build', 'run')) depends_on('sparsehash', when='+sparsehash') depends_on('zlib', when='@2.3b:') conflicts('%gcc@:4.9.0', when='@2.3b:') def configure_args(self): args = [] if '+sparsehash' in self.spec: args.append('--enable-sparsehash') else: args.append('--disable-sparsehash') return args
player1537-forks/spack
var/spack/repos/builtin/packages/perl-devel-globaldestruction/package.py
<filename>var/spack/repos/builtin/packages/perl-devel-globaldestruction/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PerlDevelGlobaldestruction(PerlPackage): """Makes Perl's global destruction less tricky to deal with""" homepage = "https://metacpan.org/pod/Devel::GlobalDestruction" url = "http://search.cpan.org/CPAN/authors/id/H/HA/HAARG/Devel-GlobalDestruction-0.14.tar.gz" version('0.14', sha256='34b8a5f29991311468fe6913cadaba75fd5d2b0b3ee3bb41fe5b53efab9154ab')
player1537-forks/spack
var/spack/repos/builtin/packages/r-corpcor/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RCorpcor(RPackage): """Efficient Estimation of Covariance and (Partial) Correlation. Implements a James-Stein-type shrinkage estimator for the covariance matrix, with separate shrinkage for variances and correlations. The details of the method are explained in Schafer and Strimmer (2005) <DOI:10.2202/1544-6115.1175> and Opgen-Rhein and Strimmer (2007) <DOI:10.2202/1544-6115.1252>. The approach is both computationally as well as statistically very efficient, it is applicable to "small n, large p" data, and always returns a positive definite and well-conditioned covariance matrix. In addition to inferring the covariance matrix the package also provides shrinkage estimators for partial correlations and partial variances. The inverse of the covariance and correlation matrix can be efficiently computed, as well as any arbitrary power of the shrinkage correlation matrix. Furthermore, functions are available for fast singular value decomposition, for computing the pseudoinverse, and for checking the rank and positive definiteness of a matrix.""" cran = "corpcor" version('1.6.10', sha256='71a04c503c93ec95ddde09abe8c7ddeb36175b7da76365a14b27066383e10e09') version('1.6.9', sha256='2e4fabd1d3936fecea67fa365233590147ca50bb45cf80efb53a10345a8a23c2') depends_on('r@3.0.2:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/libasr/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Libasr(AutotoolsPackage): """libasr is a free, simple and portable asynchronous resolver library.""" homepage = "https://github.com/OpenSMTPD/libasr" url = "https://github.com/OpenSMTPD/libasr/releases/download/1.0.4/libasr-1.0.4.tar.gz" version('1.0.4', sha256='19fb6bed10d15c9775c8d008cd1130155917ae4e801c729fe85e6d88a545dab4') version('1.0.3', sha256='9cd88e0172e6d426438875e09229d1d473d56db546d02b630f9dd14db226d68d')
player1537-forks/spack
var/spack/repos/builtin/packages/xbacklight/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Xbacklight(AutotoolsPackage, XorgPackage): """Xbacklight is used to adjust the backlight brightness where supported. It uses the RandR extension to find all outputs on the X server supporting backlight brightness control and changes them all in the same way.""" homepage = "https://cgit.freedesktop.org/xorg/app/xbacklight" xorg_mirror_path = "app/xbacklight-1.2.1.tar.gz" version('1.2.1', sha256='82c80cd851e3eb6d7a216d92465fcf6d5e456c2d5ac12c63cd2757b39fb65b10') depends_on('libxcb') depends_on('xcb-util') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/r-wk/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RWk(RPackage): """Lightweight Well-Known Geometry Parsing. Provides a minimal R and C++ API for parsing well-known binary and well-known text representation of geometries to and from R-native formats. Well-known binary is compact and fast to parse; well-known text is human-readable and is useful for writing tests. These formats are only useful in R if the information they contain can be accessed in R, for which high-performance functions are provided here.""" cran = "wk" version('0.6.0', sha256='af2c2837056a6dcc9f64d5ace29601d6d668c95769f855ca0329648d7326eaf5') version('0.4.1', sha256='daa7351af0bd657740972016906c686f335b8fa922ba10250e5000ddc2bb8950') depends_on('r-cpp11', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/gatetools/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Gatetools(PythonPackage): """Python tools for GATE, see https://github.com/OpenGATE/Gate""" homepage = "https://github.com/OpenGATE/GateTools" pypi = "gatetools/gatetools-0.9.14.tar.gz" maintainers = ['glennpj'] version('0.9.14', sha256='78fe864bb52fd4c6aeeee90d8f6c1bc5406ce02ac6f48712379efac606b5c006') depends_on('python@3.6:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-matplotlib', type=('build', 'run')) depends_on('py-click', type=('build', 'run')) depends_on('py-numpy', type=('build', 'run')) depends_on('py-scipy', type=('build', 'run')) depends_on('py-pydicom', type=('build', 'run')) depends_on('py-tqdm', type=('build', 'run')) depends_on('py-colored', type=('build', 'run')) depends_on('py-itk@5.1.0:', type=('build', 'run')) depends_on('py-uproot3', type=('build', 'run')) depends_on('py-wget', type=('build', 'run')) depends_on('gate+rtk', type='run') # The readme.md file is not in the distribution, so fake it. @run_before('install') def readme(self): touch('readme.md')
player1537-forks/spack
lib/spack/spack/util/cpus.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import multiprocessing import os def cpus_available(): """ Returns the number of CPUs available for the current process, or the number of phyiscal CPUs when that information cannot be retrieved. The number of available CPUs might differ from the number of physical CPUs when using spack through Slurm or container runtimes. """ try: return len(os.sched_getaffinity(0)) # novermin except Exception: return multiprocessing.cpu_count()
player1537-forks/spack
lib/spack/spack/extensions.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """Service functions and classes to implement the hooks for Spack's command extensions. """ import importlib import os import re import sys import types import llnl.util.lang import spack.config import spack.error import spack.util.path _extension_regexp = re.compile(r'spack-(\w[-\w]*)$') # TODO: For consistency we should use spack.cmd.python_name(), but # currently this would create a circular relationship between # spack.cmd and spack.extensions. def _python_name(cmd_name): return cmd_name.replace('-', '_') def extension_name(path): """Returns the name of the extension in the path passed as argument. Args: path (str): path where the extension resides Returns: The extension name. Raises: ExtensionNamingError: if path does not match the expected format for a Spack command extension. """ regexp_match = re.search(_extension_regexp, os.path.basename(os.path.normpath(path))) if not regexp_match: raise ExtensionNamingError(path) return regexp_match.group(1) def load_command_extension(command, path): """Loads a command extension from the path passed as argument. Args: command (str): name of the command (contains ``-``, not ``_``). path (str): base path of the command extension Returns: A valid module if found and loadable; None if not found. Module loading exceptions are passed through. """ extension = _python_name(extension_name(path)) # Compute the name of the module we search, exit early if already imported cmd_package = '{0}.{1}.cmd'.format(__name__, extension) python_name = _python_name(command) module_name = '{0}.{1}'.format(cmd_package, python_name) if module_name in sys.modules: return sys.modules[module_name] # Compute the absolute path of the file to be loaded, along with the # name of the python module where it will be stored cmd_path = os.path.join(path, extension, 'cmd', python_name + '.py') # Short circuit if the command source file does not exist if not os.path.exists(cmd_path): return None def ensure_package_creation(name): package_name = '{0}.{1}'.format(__name__, name) if package_name in sys.modules: return parts = [path] + name.split('.') + ['__init__.py'] init_file = os.path.join(*parts) if os.path.exists(init_file): m = llnl.util.lang.load_module_from_file(package_name, init_file) else: m = types.ModuleType(package_name) # Setting __path__ to give spack extensions the # ability to import from their own tree, see: # # https://docs.python.org/3/reference/import.html#package-path-rules # m.__path__ = [os.path.dirname(init_file)] sys.modules[package_name] = m # Create a searchable package for both the root folder of the extension # and the subfolder containing the commands ensure_package_creation(extension) ensure_package_creation(extension + '.cmd') module = importlib.import_module(module_name) sys.modules[module_name] = module return module def get_extension_paths(): """Return the list of canonicalized extension paths from config:extensions. """ extension_paths = spack.config.get('config:extensions') or [] paths = [spack.util.path.canonicalize_path(p) for p in extension_paths] return paths def get_command_paths(): """Return the list of paths where to search for command files.""" command_paths = [] extension_paths = get_extension_paths() for path in extension_paths: extension = _python_name(extension_name(path)) command_paths.append(os.path.join(path, extension, 'cmd')) return command_paths def path_for_extension(target_name, *paths): """Return the test root dir for a given extension. Args: target_name (str): name of the extension to test *paths: paths where the extensions reside Returns: Root directory where tests should reside or None """ for path in paths: name = extension_name(path) if name == target_name: return path else: raise IOError('extension "{0}" not found'.format(target_name)) def get_module(cmd_name): """Imports the extension module for a particular command name and returns it. Args: cmd_name (str): name of the command for which to get a module (contains ``-``, not ``_``). """ # If built-in failed the import search the extension # directories in order extensions = get_extension_paths() for folder in extensions: module = load_command_extension(cmd_name, folder) if module: return module else: raise CommandNotFoundError(cmd_name) def get_template_dirs(): """Returns the list of directories where to search for templates in extensions. """ extension_dirs = get_extension_paths() extensions = [os.path.join(x, 'templates') for x in extension_dirs] return extensions class CommandNotFoundError(spack.error.SpackError): """Exception class thrown when a requested command is not recognized as such. """ def __init__(self, cmd_name): super(CommandNotFoundError, self).__init__( '{0} is not a recognized Spack command or extension command;' ' check with `spack commands`.'.format(cmd_name)) class ExtensionNamingError(spack.error.SpackError): """Exception class thrown when a configured extension does not follow the expected naming convention. """ def __init__(self, path): super(ExtensionNamingError, self).__init__( '{0} does not match the format for a Spack extension path.' .format(path))
player1537-forks/spack
var/spack/repos/builtin/packages/r-rcmdcheck/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RRcmdcheck(RPackage): """Run 'R CMD check' from 'R' and Capture Results. Run 'R CMD check' from 'R' and capture the results of the individual checks. Supports running checks in the background, timeouts, pretty printing and comparing check results.""" cran = "rcmdcheck" version('1.4.0', sha256='bbd4ef7d514b8c2076196a7c4a6041d34623d55fbe73f2771758ce61fd32c9d0') version('1.3.3', sha256='1ab679eb1976d74cd3be5bcad0af7fcc673dbdfd4406bbce32591c8fddfb93b4') depends_on('r-callr@3.1.1.9000:', type=('build', 'run')) depends_on('r-cli@1.1.0:', type=('build', 'run')) depends_on('r-cli@3.0.0:', type=('build', 'run'), when='@1.4.0:') depends_on('r-curl', type=('build', 'run'), when='@1.4.0:') depends_on('r-desc@1.2.0:', type=('build', 'run')) depends_on('r-digest', type=('build', 'run')) depends_on('r-pkgbuild', type=('build', 'run')) depends_on('r-prettyunits', type=('build', 'run')) depends_on('r-r6', type=('build', 'run')) depends_on('r-rprojroot', type=('build', 'run')) depends_on('r-sessioninfo@1.1.1:', type=('build', 'run')) depends_on('r-withr', type=('build', 'run')) depends_on('r-xopen', type=('build', 'run')) depends_on('r-crayon', type=('build', 'run'), when='@:1.3.3')
player1537-forks/spack
var/spack/repos/builtin/packages/impalajit-llvm/package.py
<filename>var/spack/repos/builtin/packages/impalajit-llvm/package.py # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class ImpalajitLlvm(CMakePackage): """Impala is a calculator-like language. This is a fork of the original ImpalaJIT project and was enhanced with LLVM JIT. Impala acts as a backend for `easi` project. """ homepage = "https://github.com/ravil-mobile/ImpalaJIT" git = "https://github.com/ravil-mobile/ImpalaJIT" maintainers = ['ravil-mobile'] version('develop', branch='master') version('1.0.0', tag='v1.0.0') variant('shared', default=True, description='build as a shared library') depends_on('llvm@10.0.0:11.1.0') depends_on('z3') def cmake_args(self): args = [] args.append(self.define_from_variant('SHARED_LIB', 'shared')) return args
player1537-forks/spack
var/spack/repos/builtin/packages/perl-xml-writer/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PerlXmlWriter(PerlPackage): """XML::Writer is a helper module for Perl programs that write an XML document. The module handles all escaping for attribute values and character data and constructs different types of markup, such as tags, comments, and processing instructions. By default, the module performs several well-formedness checks to catch errors during output. This behaviour can be extremely useful during development and debugging, but it can be turned off for production-grade code. The module can operate either in regular mode in or Namespace processing mode. In Namespace mode, the module will generate Namespace Declarations itself, and will perform additional checks on the output.""" homepage = "https://metacpan.org/pod/XML::Writer" url = "https://cpan.metacpan.org/authors/id/J/JO/JOSEPHW/XML-Writer-0.625.tar.gz" version('0.625', sha256='e080522c6ce050397af482665f3965a93c5d16f5e81d93f6e2fe98084ed15fbe')
player1537-forks/spack
var/spack/repos/builtin/packages/cassandra/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Cassandra(Package): """ Apache Cassandra is a highly-scalable partitioned row store. Rows are organized into tables with a required primary key. """ homepage = "https://github.com/apache/cassandra" url = "https://archive.apache.org/dist/cassandra/4.0.1/apache-cassandra-4.0.1-bin.tar.gz" version('4.0.1', sha256='ed7022e30d9b77d9ce1072f8de95ab01ef7c5c6ed30f304e413dd5a3f92a52f8') version('3.11.11', sha256='a5639af781005410995a96f512d505c1def7b70cf5bbbec52e7cd5ff31b6cea3') version('3.11.6', sha256='ce34edebd1b6bb35216ae97bd06d3efc338c05b273b78267556a99f85d30e45b', deprecated=True) version('3.11.5', sha256='a765adcaa42a6c881f5e79d030854d082900992cc11da40eee413bb235970a6a', deprecated=True) version('2.2.19', sha256='5496c0254a66b6d50bde7999d1bab9129b0406b71ad3318558f4d7dbfbed0ab9') depends_on('java@9:', type=('build', 'run'), when='@4.0.0:') depends_on('java@:8', type=('build', 'run'), when='@:3.11.11') def install(self, spec, prefix): install_tree('.', prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/r-kpmt/package.py
<reponame>player1537-forks/spack<filename>var/spack/repos/builtin/packages/r-kpmt/package.py<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RKpmt(RPackage): """Known Population Median Test. Functions that implement the known population median test.""" cran = "kpmt" version('0.1.0', sha256='6342ad02c93bfa7a764d028821bb6115bb8bc8c55b057a5860736cc0e034a295') depends_on('r@2.10:', type=('build', 'run')) depends_on('r-matrixstats', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/sumaclust/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Sumaclust(MakefilePackage): """Sumaclust aims to cluster sequences in a way that is fast and exact at the same time.""" homepage = "https://git.metabarcoding.org/obitools/sumaclust" version('1.0.20', sha256='b697495f9a2b93fe069ecdb3bc6bba75b07ec3ef9f01ed66c4dd69587a40cfc1', url="https://git.metabarcoding.org/obitools/sumaclust/uploads/69f757c42f2cd45212c587e87c75a00f/sumaclust_v1.0.20.tar.gz") def build(self, spec, prefix): make('CC={0}'.format(spack_cc)) def install(self, spec, prefix): mkdirp(prefix.bin) install('sumaclust', prefix.bin)
player1537-forks/spack
var/spack/repos/builtin/packages/py-wcsaxes/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyWcsaxes(PythonPackage): """WCSAxes is a framework for making plots of Astronomical data in Matplotlib.""" homepage = "https://wcsaxes.readthedocs.io/en/latest/index.html" url = "https://github.com/astrofrog/wcsaxes/archive/v0.8.tar.gz" version('0.8', sha256='9c6addc1ec04cc99617850354b2c03dbd4099d2e43b45a81f8bc3069de9c8e83') extends('python', ignore=r'bin/') depends_on('py-setuptools', type='build') depends_on('py-numpy', type=('build', 'run')) depends_on('py-matplotlib', type=('build', 'run')) depends_on('py-astropy', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/py-hepdata-validator/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyHepdataValidator(PythonPackage): """Validation schema and code for HEPdata submissions.""" homepage = "https://github.com/hepdata/hepdata-validator" pypi = "hepdata_validator/hepdata_validator-0.1.16.tar.gz" tags = ['hep'] version('0.3.0', sha256='d603ddf908ce3838bac09bf7334184db4b35f03e2b215572c67b5e1fabbf0d9b') version('0.2.3', sha256='314e75eae7d4a134bfc8291440259839d82aabefdd720f237c0bf8ea5c9be4dc') version('0.1.16', sha256='3d7f725328ecdbb66826bff2e48a40a1d9234249859c8092ca0e92be7fb78111') version('0.1.15', sha256='1030654b1a1cfc387c2759f8613f033da467c8182dc027e181227aeb52854bb2') version('0.1.14', sha256='d1596741fb26be234c2adb6972306908f09b049dc670d8312cf2636f1a615a52') version('0.1.8', sha256='08686563e0130c5dd6d9fb8d5c7bf5a2617a637b105a42f7106b96a31eaffa61') depends_on('py-setuptools', type='build') depends_on('py-jsonschema', type=('build', 'run')) depends_on('py-pyyaml', type=('build', 'run')) depends_on('py-requests', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/dicom3tools/package.py
<gh_stars>1-10 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Dicom3tools(MakefilePackage): """Command line utilities for creating, modifying, dumping and validating files of DICOM attributes, and conversion of proprietary image formats to DICOM. Can handle older ACR/NEMA format data, and some proprietary versions of that such as SPI.""" homepage = "https://www.dclunie.com/dicom3tools.html" url = "https://www.dclunie.com/dicom3tools/workinprogress/dicom3tools_1.00.snapshot.20210306100017.tar.bz2" version('1.00.snapshot.20210306100017', sha256='3cc2d6056e349e0ac6a093d231d8f4dd7a77e26ed29c1ebaca945dd5e56c1520') variant( 'uid_root', default='0.0.0.0', values=lambda x: True, description='default UID Root assignment' ) depends_on('bzip2', type='build') depends_on('imake', type='build') depends_on('libx11') def edit(self, spec, prefix): defines = [ '#define InstallBinDir "{0}"'.format(prefix.bin), '#define InstallIncDir "{0}"'.format(prefix.include), '#define InstallLibDir "{0}"'.format(prefix.lib), '#define InstallManDir "{0}"'.format(prefix.man), '#define X11LibraryPath "{0}"'.format(spec['libx11'].prefix.lib), '#define X11IncludePath "{0}"'.format(spec['libx11'].prefix.include), ] with working_dir('config'): with open('site.p-def', 'a') as inc: for define in defines: inc.write('{0}\n'.format(define)) configure = FileFilter('Configure') configure.filter('usegcc=.*', 'usegcc={0}'.format(spack_cc)) configure.filter('usegpp=.*', 'usegpp={0}'.format(spack_cxx)) def build(self, spec, prefix): uid_root = spec.variants['uid_root'].value configure = Executable(join_path('.', 'Configure')) configure() imake = which('imake') imake('-I./config', '-DDefaultUIDRoot={0}'.format(uid_root)) make('World') def install(self, spec, prefix): make('install') make('install.man')
player1537-forks/spack
var/spack/repos/builtin/packages/mcutils/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Mcutils(MakefilePackage): """A collection of routines for classification and manipulation of particle physics simulated HepMC event records.""" homepage = "https://gitlab.com/hepcedar/mcutils" git = "https://gitlab.com/hepcedar/mcutils.git" tags = ['hep'] version('1.3.5', tag='mcutils-1.3.5') version('1.3.4', tag='mcutils-1.3.4') version('1.3.3', tag='mcutils-1.3.3') version('1.3.2', tag='mcutils-1.3.2') version('1.3.1', tag='mcutils-1.3.1') version('1.3.1', tag='mcutils-1.3.0') version('1.2.1', tag='mcutils-1.2.1') version('1.2.0', tag='mcutils-1.2.0') version('1.1.2', tag='mcutils-1.1.2') version('1.1.1', tag='mcutils-1.1.1') version('1.1.0', tag='mcutils-1.1.0') version('1.0.3', tag='mcutils-1.0.3') version('1.0.2', tag='mcutils-1.0.2') version('1.0.1', tag='mcutils-1.0.1') version('1.0.0', tag='mcutils-1.0.0') depends_on('heputils', when='@1.1.0:') def install(self, spec, prefix): make('install', 'PREFIX={0}'.format(prefix))
player1537-forks/spack
lib/spack/spack/platforms/linux.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import platform import archspec.cpu import spack.target from spack.operating_systems.linux_distro import LinuxDistro from ._platform import Platform class Linux(Platform): priority = 90 def __init__(self): super(Linux, self).__init__('linux') for name in archspec.cpu.TARGETS: self.add_target(name, spack.target.Target(name)) # Get specific default self.default = archspec.cpu.host().name self.front_end = self.default self.back_end = self.default linux_dist = LinuxDistro() self.default_os = str(linux_dist) self.front_os = self.default_os self.back_os = self.default_os self.add_operating_system(str(linux_dist), linux_dist) @classmethod def detect(cls): return 'linux' in platform.system().lower()
player1537-forks/spack
var/spack/repos/builtin/packages/r-r-cache/package.py
<filename>var/spack/repos/builtin/packages/r-r-cache/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RRCache(RPackage): """Fast and Light-Weight Caching (Memoization) of Objects and Results to Speed Up Computations. Memoization can be used to speed up repetitive and computational expensive function calls. The first time a function that implements memoization is called the results are stored in a cache memory. The next time the function is called with the same set of parameters, the results are momentarily retrieved from the cache avoiding repeating the calculations. With this package, any R object can be cached in a key-value storage where the key can be an arbitrary set of R objects. The cache memory is persistent (on the file system).""" cran = "R.cache" version('0.15.0', sha256='adb4d3b08f7917e10fe6188c7b90a3318701a974c58eaa09943b929382bdf126') version('0.14.0', sha256='18af4e372440b9f28b4b71346c8ed9de220232f9903730ccee2bfb3c612c16d9') depends_on('r@2.14.0:', type=('build', 'run')) depends_on('r-r-methodss3@1.7.1:', type=('build', 'run')) depends_on('r-r-methodss3@1.8.1:', type=('build', 'run'), when='@0.15.0:') depends_on('r-r-oo@1.23.0:', type=('build', 'run')) depends_on('r-r-oo@1.24.0:', type=('build', 'run'), when='@0.15.0:') depends_on('r-r-utils@2.8.0:', type=('build', 'run')) depends_on('r-r-utils@2.10.1:', type=('build', 'run'), when='@0.15.0:') depends_on('r-digest@0.6.13:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/tbl2asn/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from os import chmod from spack import * class Tbl2asn(Package): """Tbl2asn is a command-line program that automates the creation of sequence records for submission to GenBank.""" homepage = "https://www.ncbi.nlm.nih.gov/genbank/tbl2asn2/" version('2020-03-01', sha256='7cc1119d3cfcbbffdbd4ecf33cef8bbdd44fc5625c72976bee08b1157625377e') def url_for_version(self, ver): return "https://ftp.ncbi.nih.gov/toolbox/ncbi_tools/converters/by_program/tbl2asn/linux.tbl2asn.gz" def install(self, spec, prefix): mkdirp(prefix.bin) install('../linux.tbl2asn', prefix.bin.tbl2asn) chmod(prefix.bin.tbl2asn, 0o775)
player1537-forks/spack
var/spack/repos/builtin/packages/cpio/package.py
<filename>var/spack/repos/builtin/packages/cpio/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import re from spack import * class Cpio(AutotoolsPackage, GNUMirrorPackage): """GNU cpio copies files into or out of a cpio or tar archive and the file system. The archive can be another file on the disk, a magnetic tape, or a pipe. """ homepage = "https://www.gnu.org/software/cpio/" gnu_mirror_path = "cpio/cpio-2.13.tar.gz" executables = ['^cpio$'] version('2.13', sha256='e87470d9c984317f658567c03bfefb6b0c829ff17dbf6b0de48d71a4c8f3db88') build_directory = 'spack-build' def patch(self): """Fix mutiple definition of char *program_name for gcc@10: and clang""" filter_file(r'char \*program_name;', '', 'src/global.c') @classmethod def determine_version(cls, exe): output = Executable(exe)('--version', output=str, error=str) match = re.search(r'\(GNU cpio\)\s+(\S+)', output) return match.group(1) if match else None def flag_handler(self, name, flags): spec = self.spec if name == 'cflags': if '%intel@:17' in spec: flags.append('-no-gcc') elif '%clang' in spec or '%fj' in spec: flags.append('--rtlib=compiler-rt') return (flags, None, None)
player1537-forks/spack
lib/spack/spack/test/cmd/python.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import platform import sys import pytest import spack from spack.main import SpackCommand python = SpackCommand('python') def test_python(): out = python('-c', 'import spack; print(spack.spack_version)') assert out.strip() == spack.spack_version def test_python_interpreter_path(): out = python('--path') assert out.strip() == sys.executable def test_python_version(): out = python('-V') assert platform.python_version() in out def test_python_with_module(): # pytest rewrites a lot of modules, which interferes with runpy, so # it's hard to test this. Trying to import a module like sys, that # has no code associated with it, raises an error reliably in python # 2 and 3, which indicates we successfully ran runpy.run_module. with pytest.raises(ImportError, match="No code object"): python('-m', 'sys') def test_python_raises(): out = python('--foobar', fail_on_error=False) assert "Error: Unknown arguments" in out
player1537-forks/spack
var/spack/repos/builtin/packages/gaussian-view/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) # ---------------------------------------------------------------------------- import os import llnl.util.tty as tty from spack import * class GaussianView(Package): """GaussView 6 is the latest iteration of a graphical interface used with Gaussian. It aids in the creation of Gaussian input files, enables the user to run Gaussian calculations from a graphical interface without the need for using a command line instruction, and helps in the interpretation of Gaussian output. Needs post-install steps to make it run! See package installation logs for details.""" homepage = "https://gaussian.com/gaussview6/" manual_download = True maintainers = ['antoniokaust', 'dev-zero'] version('6.1.6', sha256='c9824fd0372c27425b53de350f3a83b27de75ca694219b3ef18cd7d92937db6c', extension='tbz') version('6.0.16', '5dd6a8df8c81763e43a308b3a18d2d3b825d3597e9628dcf43e563d1867b9638', extension='tbz') variant( 'gaussian-src', default=False, description='Use gaussian-src instead of gaussian (prebuilt binary)' ) depends_on('gaussian@16-B.01', type='run', when='@:6.0') # TODO: add the checksum for gaussian@16-C.01 before uncommenting # depends_on('gaussian@16-C.01', type='run', when='~gaussian-src@6.1:') depends_on('gaussian-src@16-C.01', type='run', when='+gaussian-src@6.1:') conflicts('+gaussian-src', when='@:6.0') depends_on('libx11', type=('run', 'link')) depends_on('libxext', type=('run', 'link')) depends_on('gl@3:', type=('run', 'link')) depends_on('glu@1.3', type=('run', 'link')) depends_on('libxrender', type=('run', 'link')) depends_on('libsm', type=('run', 'link')) depends_on('libice', type=('run', 'link')) depends_on('patchelf', type='build') def url_for_version(self, version): return "file://{0}/gv{1}-linux-x86_64.tbz".format( os.getcwd(), version.up_to(2).joined) def install(self, spec, prefix): install_tree('.', prefix) # make sure the executable finds and uses the Spack-provided # libraries, otherwise the executable may or may not run depending # on what is installed on the host # the $ORIGIN prefix is required for the executable to find its # own bundled shared libraries patchelf = which('patchelf') rpath = '$ORIGIN:$ORIGIN/lib' + ':'.join( self.spec[dep].libs.directories[0] for dep in ['libx11', 'libxext', 'libxrender', 'libice', 'libsm', 'gl', 'glu']) patchelf('--set-rpath', rpath, join_path(self.prefix, 'gview.exe')) @run_after('install') def caveats(self): perm_script = 'spack_perms_fix.sh' perm_script_path = join_path(self.spec.prefix.bin, perm_script) with open(perm_script_path, 'w') as f: env = spack.tengine.make_environment(dirs=self.package_dir) t = env.get_template(perm_script + '.j2') f.write(t.render({'prefix': self.spec.prefix})) chmod = which('chmod') chmod('0555', perm_script_path) tty.warn(""" For a working GaussianView installation, all executable files can only be accessible by the owner and the group but not the world. We've installed a script that will make the necessary changes; read through it and then execute it: {0} If you have to give others access, please customize the group membership of the package files as documented here: https://spack.readthedocs.io/en/latest/build_settings.html#package-permissions""" .format(perm_script_path)) @when('@:6.0') def setup_run_environment(self, env): env.set('GV_DIR', self.prefix) env.set('GV_LIB_PATH', self.prefix.lib) env.set('ALLOWINDIRECT', '1') env.prepend_path('PATH', self.prefix) env.prepend_path('GV_LIB_PATH', self.prefix.lib.MesaGL) env.prepend_path('LD_LIBRARY_PATH', self.prefix.lib.MesaGL) env.prepend_path('QT_PLUGIN_PATH', self.prefix.plugins) @when('@6.1:') def setup_run_environment(self, env): env.set('GV_DIR', self.prefix) # the wrappers in gv/exec setup everything just nicely env.prepend_path('PATH', join_path(self.prefix, 'exec'))
player1537-forks/spack
lib/spack/spack/test/url_substitution.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """Tests Spack's ability to substitute a different version into a URL.""" import os import pytest import spack.url @pytest.mark.parametrize('base_url,version,expected', [ # Ensures that substituting the same version results in the same URL ('http://www.mr511.de/software/libelf-0.8.13.tar.gz', '0.8.13', 'http://www.mr511.de/software/libelf-0.8.13.tar.gz'), # Test a completely different version syntax ('http://www.prevanders.net/libdwarf-20130729.tar.gz', '8.12', 'http://www.prevanders.net/libdwarf-8.12.tar.gz'), # Test a URL where the version appears twice # It should get substituted both times ('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3', 'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz'), # Test now with a partial prefix earlier in the URL # This is hard to figure out so Spack only substitutes # the last instance of the version ('https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.0.tar.bz2', '2.2.0', 'https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.2.0.tar.bz2'), ('https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.0.tar.bz2', '2.2', 'https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.2.tar.bz2'), # No separator between the name and version of the package ('file://{0}/turbolinux702.tar.gz'.format(os.getcwd()), '703', 'file://{0}/turbolinux703.tar.gz'.format(os.getcwd())), ('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '2.0.7', 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true'), ('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '4.7', 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v4.7.tgz?raw=true'), # Package name contains regex characters ('http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz', '1.2.3', 'http://math.lbl.gov/voro++/download/dir/voro++-1.2.3.tar.gz'), ]) def test_url_substitution(base_url, version, expected): computed = spack.url.substitute_version(base_url, version) assert computed == expected
player1537-forks/spack
var/spack/repos/builtin/packages/apple-libuuid/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class AppleLibuuid(BundlePackage): """Placeholder package for Apple's analogue to non-GNU libuuid""" homepage = "https://opensource.apple.com/tarballs/Libsystem/" version('1353.100.2') provides('uuid') # Only supported on 'platform=darwin' conflicts('platform=linux') conflicts('platform=cray') @property def libs(self): """Export the Apple libuuid library. According to https://bugs.freedesktop.org/show_bug.cgi?id=105366, libuuid is provided as part of libsystem_c. The Apple libsystem_c library cannot be linked to directly using an absolute path; doing so will cause the linker to throw an error 'cannot link directly with /usr/lib/system/libsystem_c.dylib' and the linker will suggest linking with System.framework instead. Linking to this framework is equivalent to linking with libSystem.dylib, which can be confirmed on a macOS system by executing at a terminal the command `ls -l /System/Library/Frameworks/System.Framework` -- the file "System" is a symlink to `/usr/lib/libSystem.B.dylib`, and `/usr/lib/libSystem.dylib` also symlinks to this file. Running `otool -L /usr/lib/libSystem.dylib` confirms that it will link dynamically to `/usr/lib/system/libsystem_c.dylib`.""" return LibraryList('/usr/lib/libSystem.dylib') @property def headers(self): """Export the Apple libuuid header.""" return HeaderList(self.prefix.include.uuid.join('uuid.h'))
player1537-forks/spack
var/spack/repos/builtin/packages/sga/package.py
<filename>var/spack/repos/builtin/packages/sga/package.py<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Sga(AutotoolsPackage): """SGA is a de novo genome assembler based on the concept of string graphs. The major goal of SGA is to be very memory efficient, which is achieved by using a compressed representation of DNA sequence reads.""" homepage = "https://www.msi.umn.edu/sw/sga" url = "https://github.com/jts/sga/archive/v0.10.15.tar.gz" version('0.10.15', sha256='1b18996e6ec47985bc4889a8cbc3cd4dd3a8c7d385ae9f450bd474e36342558b') version('0.10.14', sha256='763c011b302e1085048c93d917f081ea9348a8470e222dfd369064548e8b3994') version('0.10.13', sha256='77859ab233980594941aa4c4cb5c2cbe1f5c43f2519f329c3a88a97865dee599') version('0.10.12', sha256='f27f13ce1e7c1a3f35f9f4eed6c1896f3b92471bc4acc7f2364a12ce098e9779') version('0.10.11', sha256='4704ad74705931311ed66a0886453e57616798147d149e16e13ac5acd9b5b87c') version('0.10.10', sha256='5a75a81d405d22d51f3b7388c42d5baced4388110d39e5d77249bf3eac76a83a') version('0.10.9', sha256='34573cb7423affd5e15c1175d9af69f7495b094b60ddfcbafd910fd703c25006') version('0.10.8', sha256='55c5e0e425e14902e83d68cfb8cee4c86ee186459e54113a484b2a1b06d223c8') version('0.10.3', sha256='c000823a58428d9db2979b30a571ad89aec78a8cb1af60bae1ce252dd4e8adac') depends_on('zlib') depends_on('sparsehash') depends_on('jemalloc') depends_on('bamtools') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') configure_directory = 'src' def configure_args(self): return [ '--with-sparsehash={0}'.format(self.spec['sparsehash'].prefix), '--with-bamtools={0}'.format(self.spec['bamtools'].prefix), '--with-jemalloc={0}'.format(self.spec['jemalloc'].prefix) ]
player1537-forks/spack
var/spack/repos/builtin/packages/kbproto/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Kbproto(AutotoolsPackage, XorgPackage): """X Keyboard Extension. This extension defines a protcol to provide a number of new capabilities and controls for text keyboards.""" homepage = "https://cgit.freedesktop.org/xorg/proto/kbproto" xorg_mirror_path = "proto/kbproto-1.0.7.tar.gz" version('1.0.7', sha256='828cb275b91268b1a3ea950d5c0c5eb076c678fdf005d517411f89cc8c3bb416') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build')
player1537-forks/spack
lib/spack/spack/test/package_sanity.py
<filename>lib/spack/spack/test/package_sanity.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """This test does sanity checks on Spack's builtin package database.""" import ast import os.path import pickle import re import pytest import llnl.util.tty as tty # A few functions from this module are used to # do sanity checks only on packagess modified by a PR import spack.cmd.style as style import spack.fetch_strategy import spack.package import spack.paths import spack.repo import spack.util.crypto as crypto import spack.util.executable as executable import spack.util.package_hash as ph import spack.variant def check_repo(): """Get all packages in the builtin repo to make sure they work.""" for name in spack.repo.all_package_names(): spack.repo.get(name) @pytest.mark.maybeslow def test_get_all_packages(): """Get all packages once and make sure that works.""" check_repo() def test_packages_are_pickleable(): failed_to_pickle = list() for name in spack.repo.all_package_names(): pkg = spack.repo.get(name) try: pickle.dumps(pkg) except Exception: # If there are any failures, keep track of all packages that aren't # pickle-able and re-run the pickling later on to recreate the # error failed_to_pickle.append(name) if failed_to_pickle: tty.msg('The following packages failed to pickle: ' + ', '.join(failed_to_pickle)) for name in failed_to_pickle: pkg = spack.repo.get(name) pickle.dumps(pkg) def test_packages_are_unparseable(): """Ensure that all packages can unparse and that unparsed code is valid Python.""" failed_to_unparse = [] failed_to_compile = [] for name in spack.repo.all_package_names(): try: source = ph.canonical_source(name, filter_multimethods=False) except Exception: failed_to_unparse.append(name) try: compile(source, "internal", "exec", ast.PyCF_ONLY_AST) except Exception: failed_to_compile.append(name) if failed_to_unparse: tty.msg('The following packages failed to unparse: ' + ', '.join(failed_to_unparse)) assert False if failed_to_compile: tty.msg('The following unparsed packages failed to compile: ' + ', '.join(failed_to_compile)) assert False def test_repo_getpkg_names_and_classes(): """Ensure that all_packages/names/classes are consistent.""" names = spack.repo.path.all_package_names() print(names) classes = spack.repo.path.all_package_classes() print(list(classes)) pkgs = spack.repo.path.all_packages() print(list(pkgs)) for name, cls, pkg in zip(names, classes, pkgs): assert cls.name == name assert pkg.name == name def test_get_all_mock_packages(): """Get the mock packages once each too.""" db = spack.repo.RepoPath(spack.paths.mock_packages_path) with spack.repo.use_repositories(db): check_repo() def test_all_versions_are_lowercase(): """Spack package names must be lowercase, and use `-` instead of `_`.""" errors = [] for name in spack.repo.all_package_names(): if re.search(r'[_A-Z]', name): errors.append(name) assert len(errors) == 0 def test_all_virtual_packages_have_default_providers(): """All virtual packages must have a default provider explicitly set.""" defaults = spack.config.get('packages', scope='defaults') default_providers = defaults['all']['providers'] providers = spack.repo.path.provider_index.providers default_providers_filename = \ spack.config.config.scopes['defaults'].get_section_filename('packages') for provider in providers: assert provider in default_providers, \ "all providers must have a default in %s" \ % default_providers_filename def test_package_version_consistency(): """Make sure all versions on builtin packages produce a fetcher.""" for name in spack.repo.all_package_names(): pkg = spack.repo.get(name) spack.fetch_strategy.check_pkg_attributes(pkg) for version in pkg.versions: assert spack.fetch_strategy.for_package_version(pkg, version) def test_no_fixme(): """Packages should not contain any boilerplate such as FIXME or example.com.""" errors = [] fixme_regexes = [ r'remove this boilerplate', r'FIXME: Put', r'FIXME: Add', r'example.com', ] for name in spack.repo.all_package_names(): filename = spack.repo.path.filename_for_package_name(name) with open(filename, 'r') as package_file: for i, line in enumerate(package_file): pattern = next((r for r in fixme_regexes if re.search(r, line)), None) if pattern: errors.append( "%s:%d: boilerplate needs to be removed: %s" % (filename, i, line.strip()) ) assert [] == errors def test_docstring(): """Ensure that every package has a docstring.""" for name in spack.repo.all_package_names(): pkg = spack.repo.get(name) assert pkg.__doc__ def test_all_packages_use_sha256_checksums(): """Make sure that no packages use md5 checksums.""" errors = [] for name in spack.repo.all_package_names(): pkg = spack.repo.path.get(name) # for now, don't enforce on packages that require manual downloads # TODO: eventually fix these, too. if pkg.manual_download: continue def invalid_sha256_digest(fetcher): if getattr(fetcher, "digest", None): h = crypto.hash_algo_for_digest(fetcher.digest) if h != "sha256": return h for v, args in pkg.versions.items(): fetcher = spack.fetch_strategy.for_package_version(pkg, v) bad_digest = invalid_sha256_digest(fetcher) if bad_digest: errors.append( "All packages must use sha256 checksums. %s@%s uses %s." % (name, v, bad_digest) ) for _, resources in pkg.resources.items(): for resource in resources: bad_digest = invalid_sha256_digest(resource.fetcher) if bad_digest: errors.append( "All packages must use sha256 checksums." "Resource in %s uses %s." % (name, bad_digest) ) assert [] == errors def test_api_for_build_and_run_environment(): """Ensure that every package uses the correct API to set build and run environment, and not the old one. """ failing = [] for pkg in spack.repo.path.all_packages(): add_to_list = (hasattr(pkg, 'setup_environment') or hasattr(pkg, 'setup_dependent_environment')) if add_to_list: failing.append(pkg) msg = ('there are {0} packages using the old API to set build ' 'and run environment [{1}], for further information see ' 'https://github.com/spack/spack/pull/11115') assert not failing, msg.format( len(failing), ','.join(x.name for x in failing) ) @pytest.mark.skipif( not executable.which('git'), reason='requires git to be installed' ) def test_prs_update_old_api(): """Ensures that every package modified in a PR doesn't contain deprecated calls to any method. """ ref = os.getenv("GITHUB_BASE_REF") if not ref: pytest.skip("No base ref found") changed_package_files = [ x for x in style.changed_files(base=ref) if style.is_package(x) ] failing = [] for file in changed_package_files: if 'builtin.mock' not in file: # don't restrict packages for tests name = os.path.basename(os.path.dirname(file)) pkg = spack.repo.get(name) failed = (hasattr(pkg, 'setup_environment') or hasattr(pkg, 'setup_dependent_environment')) if failed: failing.append(name) msg = ('there are {0} packages using the old API to set build ' 'and run environment [{1}], for further information see ' 'https://github.com/spack/spack/pull/11115') assert not failing, msg.format( len(failing), ','.join(failing) ) def test_all_dependencies_exist(): """Make sure no packages have nonexisting dependencies.""" missing = {} pkgs = [pkg for pkg in spack.repo.path.all_package_names()] spack.package.possible_dependencies( *pkgs, transitive=True, missing=missing) lines = [ "%s: [%s]" % (name, ", ".join(deps)) for name, deps in missing.items() ] assert not missing, "These packages have missing dependencies:\n" + ( "\n".join(lines) ) def test_variant_defaults_are_parsable_from_cli(): """Ensures that variant defaults are parsable from cli.""" failing = [] for pkg in spack.repo.path.all_packages(): for variant_name, entry in pkg.variants.items(): variant, _ = entry default_is_parsable = ( # Permitting a default that is an instance on 'int' permits # to have foo=false or foo=0. Other falsish values are # not allowed, since they can't be parsed from cli ('foo=') isinstance(variant.default, int) or variant.default ) if not default_is_parsable: failing.append((pkg.name, variant_name)) assert not failing def test_variant_defaults_listed_explicitly_in_values(): failing = [] for pkg in spack.repo.path.all_packages(): for variant_name, entry in pkg.variants.items(): variant, _ = entry vspec = variant.make_default() try: variant.validate_or_raise(vspec, pkg=pkg) except spack.variant.InvalidVariantValueError: failing.append((pkg.name, variant.name)) assert not failing
player1537-forks/spack
var/spack/repos/builtin/packages/py-cclib/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyCclib(PythonPackage): """Open source library for parsing and interpreting the results of computational chemistry packages""" homepage = "https://cclib.github.io/" version('1.5.post1', sha256='c2bf043432ab8df461d61b4289d0eb869fe134eee545ea5a78f8dea14b392f47', url="https://github.com/cclib/cclib/releases/download/v1.5/cclib-1.5.post1.tar.gz") # pip silently replaces distutils with setuptools depends_on('py-setuptools', type='build') depends_on('py-numpy@1.5:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/perl-set-intspan/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PerlSetIntspan(PerlPackage): """Set::IntSpan - Manages sets of integers""" homepage = "https://metacpan.org/pod/Set::IntSpan" url = "https://cpan.metacpan.org/authors/id/S/SW/SWMCD/Set-IntSpan-1.19.tar.gz" version('1.19', sha256='11b7549b13ec5d87cc695dd4c777cd02983dd5fe9866012877fb530f48b3dfd0')
player1537-forks/spack
var/spack/repos/builtin/packages/neovim/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Neovim(CMakePackage): """Neovim: Vim-fork focused on extensibility and usability""" homepage = "https://neovim.io" git = "https://github.com/neovim/neovim.git" url = "https://github.com/neovim/neovim/archive/v0.4.3.tar.gz" maintainers = ['albestro'] version('master', branch='master') version('stable', tag='stable') version('0.6.1', sha256='dd882c21a52e5999f656cae3f336b5fc702d52addd4d9b5cd3dc39cfff35e864') version('0.6.0', sha256='2cfd600cfa5bb57564cc22ffbbbcb2c91531053fc3de992df33656614384fa4c') version('0.5.1', sha256='aa449795e5cc69bdd2eeed7095f20b9c086c6ecfcde0ab62ab97a9d04243ec84') version('0.5.0', sha256='6bcfa5192c9460c946e853dbd1a0baf659df5de184436144147711d1bceedeee') version('0.4.4', sha256='2f76aac59363677f37592e853ab2c06151cca8830d4b3fe4675b4a52d41fc42c') version('0.4.3', sha256='91a0b5d32204a821bf414690e6b48cf69224d1961d37158c2b383f6a6cf854d2') version('0.3.4', sha256='a641108bdebfaf319844ed46b1bf35d6f7c30ef5aeadeb29ba06e19c3274bc0e') version('0.3.1', sha256='bc5e392d4c076407906ccecbc283e1a44b7832c2f486cad81aa04cc29973ad22') version('0.3.0', sha256='f7acb61b16d3f521907d99c486b7a9f1e505e8b2a18c9ef69a6d7f18f29f74b8') version('0.2.2', sha256='a838ee07cc9a2ef8ade1b31a2a4f2d5e9339e244ade68e64556c1f4b40ccc5ed') version('0.2.1', sha256='9e2c068a8994c9023a5f84cde9eb7188d3c85996a7e42e611e3cd0996e345dd3') version('0.2.0', sha256='72e263f9d23fe60403d53a52d4c95026b0be428c1b9c02b80ab55166ea3f62b5') depends_on('cmake@3.0:', type='build') depends_on('pkgconfig', type='build') depends_on('gettext', type=('build', 'link')) depends_on('lua@5.1.0:5.1.9', type=('build', 'link')) depends_on('lua-lpeg', type='link') depends_on('lua-mpack', type='link') depends_on('lua-bitlib', type='link') depends_on('libuv', type='link') depends_on('libuv@1.28:', type='link', when='@0.4:,stable') depends_on('jemalloc', type='link') depends_on('libtermkey', type='link') depends_on('libtermkey@0.18:', type='link', when='@0.3.4:,stable') depends_on('libvterm@0.0.0', type='link', when='@0.2.0:0.3') depends_on('libvterm@0.1:', type='link', when='@0.4:,stable') depends_on('unibilium', type='link') depends_on('unibilium@:1.2.0', type='link', when='@0.2.0') depends_on('unibilium@2.0:', type='link', when='@0.4:,stable') depends_on('msgpack-c', type='link') depends_on('msgpack-c@1.0.0:', type='link', when='@0.4:,stable') depends_on('gperf', type='link') depends_on('libluv@1.30.0:', type='link', when='@0.4:,stable') depends_on('tree-sitter', when='@0.5:') def cmake_args(self): return ['-DPREFER_LUA=ON']
player1537-forks/spack
var/spack/repos/builtin/packages/py-packaging/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyPackaging(PythonPackage): """Core utilities for Python packages.""" homepage = "https://github.com/pypa/packaging" pypi = "packaging/packaging-19.2.tar.gz" version('21.3', sha256='dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb') version('21.0', sha256='7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7') version('20.9', sha256='5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5') version('19.2', sha256='28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47') version('19.1', sha256='c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe') version('19.0', sha256='0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af') version('17.1', sha256='f019b770dd64e585a99714f1fd5e01c7a8f11b45635aa953fd41c689a657375b') version('16.8', sha256='5d50835fdf0a7edf0b55e311b7c887786504efea1177abd7e69329a8e5ea619e') depends_on('python@3.6:', when='@21:', type=('build', 'run')) depends_on('python@2.7:2,3.4:', type=('build', 'run')) depends_on('py-setuptools@40.8.0:', when='@20.8:', type='build') depends_on('py-setuptools', type='build') depends_on('py-pyparsing@2.0.2:3.0.4,3.0.6:', when='@21.3:', type=('build', 'run')) depends_on('py-pyparsing@2.0.2:2', when='@21.1:21.2', type=('build', 'run')) depends_on('py-pyparsing@2.0.2:', type=('build', 'run')) depends_on('py-six', when='@:20.7', type=('build', 'run')) depends_on('py-attrs', when='@19.1', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/r-clusterprofiler/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RClusterprofiler(RPackage): """statistical analysis and visualization of functional profiles for genes and gene clusters. This package implements methods to analyze and visualize functional profiles (GO and KEGG) of gene and gene clusters.""" bioc = "clusterProfiler" version('4.2.2', commit='<PASSWORD>') version('3.18.0', commit='<PASSWORD>2<PASSWORD>3<PASSWORD>b907bee4<PASSWORD>ff821') version('3.12.0', commit='<PASSWORD>') version('3.10.1', commit='<KEY>') version('3.8.1', commit='81e1a7ac49e4713703c55f87f945b20de5e7ab36') version('3.6.0', commit='<KEY>') version('3.4.4', commit='b86b00e8405fe130e439362651a5567736e2d9d7') depends_on('r@3.3.1:', type=('build', 'run')) depends_on('r@3.4.0:', type=('build', 'run'), when='@3.8.1:') depends_on('r@3.5.0:', type=('build', 'run'), when='@4.2.2:') depends_on('r-annotationdbi', type=('build', 'run')) depends_on('r-downloader', type=('build', 'run'), when='@3.18.0:') depends_on('r-dose@3.1.3:', type=('build', 'run')) depends_on('r-dose@3.3.2:', type=('build', 'run'), when='@3.6.0:') depends_on('r-dose@3.5.1:', type=('build', 'run'), when='@3.8.1:') depends_on('r-dose@3.13.1:', type=('build', 'run'), when='@3.18.0:') depends_on('r-dplyr', type=('build', 'run'), when='@3.18.0:') depends_on('r-enrichplot@0.99.7:', type=('build', 'run'), when='@3.8.1:') depends_on('r-enrichplot@1.9.3:', type=('build', 'run'), when='@3.18.0:') depends_on('r-go-db', type=('build', 'run')) depends_on('r-gosemsim', type=('build', 'run')) depends_on('r-gosemsim@2.0.0:', type=('build', 'run'), when='@3.4.4:3.6.0') depends_on('r-magrittr', type=('build', 'run')) depends_on('r-plyr', type=('build', 'run')) depends_on('r-qvalue', type=('build', 'run')) depends_on('r-rlang', type=('build', 'run'), when='@3.18.0:') depends_on('r-tidyr', type=('build', 'run')) depends_on('r-yulab-utils', type=('build', 'run'), when='@4.2.2:') depends_on('r-ggplot2', type=('build', 'run'), when='@:3.12.0') depends_on('r-rvcheck', type=('build', 'run'), when='@:3.18.0')
player1537-forks/spack
var/spack/repos/builtin/packages/nfs-ganesha/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class NfsGanesha(CMakePackage): """NFS-Ganesha is an NFSv3,v4,v4.1 fileserver that runs in user mode on most UNIX/Linux systems. It also supports the 9p.2000L protocol.""" homepage = "https://github.com/nfs-ganesha/nfs-ganesha/wiki" url = "https://github.com/nfs-ganesha/nfs-ganesha/archive/V3.2.tar.gz" version('3.2', sha256='1e3635f0eb0bc32868ea7d923d061d0f6b1bd03b45da34356c7c53d4c0ebafbd') version('3.1', sha256='c4cf78929f39b8af44b05e813783b2c39e348b485043c6290c4bca705bb5015f') version('3.0.3', sha256='fcc0361b9a2752be7eb4e990230765e17de373452ac24514be22c81a5447a460') version('3.0', sha256='136c5642ff21ec6e8a4e77c037f6218a39b2eeba77798b13556f1abbb0923ccd') depends_on('bison', type='build') depends_on('flex', type='build') depends_on('py-stsci-distutils', type='build') depends_on('userspace-rcu') depends_on('ntirpc') depends_on('krb5') root_cmakelists_dir = 'src' def setup_build_environment(self, env): env.prepend_path('CPATH', self.spec['ntirpc'].prefix.include.ntirpc)
player1537-forks/spack
var/spack/repos/builtin/packages/yambo/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Yambo(AutotoolsPackage): """Yambo is a FORTRAN/C code for Many-Body calculations in solid state and molecular physics. Yambo relies on the Kohn-Sham wavefunctions generated by two DFT public codes: abinit, and PWscf. The code was originally developed in the Condensed Matter Theoretical Group of the Physics Department at the University of Rome "Tor Vergata" by <NAME>. Previous to its release under the GPL license, yambo was known as SELF. """ homepage = "http://www.yambo-code.org/index.php" url = "https://github.com/yambo-code/yambo/archive/4.2.2.tar.gz" version('4.2.2', sha256='86b4ebe679387233266aba49948246c85a32b1e6840d024f162962bd0112448c') version('4.2.1', sha256='8ccd0ca75cc32d9266d4a37edd2a7396cf5038f3a68be07c0f0f77d1afc72bdc') version('4.2.0', sha256='9f78c4237ff363ff4e9ea5eeea671b6fff783d9a6078cc31b0b1abeb1f040f4d') variant('dp', default=False, description='Enable double precision') variant( 'profile', values=any_combination_of('time', 'memory'), description='Activate profiling of specific sections' ) variant( 'io', values=any_combination_of('iotk', 'etsf-io'), description='Activate support for different io formats (requires network access)', # noqa ) # MPI + OpenMP parallelism variant('mpi', default=True, description='Enable MPI support') variant('openmp', default=False, description='Enable OpenMP support') depends_on('blas') depends_on('lapack') # MPI dependencies are forced, until we have proper forwarding of variants # # Note that yambo is used as an application, and not linked as a library, # thus there will be no case where another package pulls-in e.g. # netcdf-c+mpi and wants to depend on yambo~mpi. depends_on('mpi', when='+mpi') depends_on('netcdf-c+mpi', when='+mpi') depends_on('hdf5+mpi', when='+mpi') depends_on('fftw+mpi', when='+mpi') depends_on('scalapack', when='+mpi') depends_on('netcdf-c~mpi', when='~mpi') depends_on('hdf5~mpi', when='~mpi') depends_on('fftw~mpi', when='~mpi') depends_on('hdf5+fortran') depends_on('netcdf-c') depends_on('netcdf-fortran') depends_on('libxc@2.0.3:') build_targets = ['all'] parallel = False # The configure in the package has the string 'cat config/report' # hard-coded, which causes a failure at configure time due to the # current working directory in Spack. Fix this by using the absolute # path to the file. @run_before('configure') def filter_configure(self): report_abspath = join_path(self.build_directory, 'config', 'report') filter_file('config/report', report_abspath, 'configure') def enable_or_disable_time(self, activated): return '--enable-time-profile' if activated else '--disable-time-profile' # noqa: E501 def enable_or_disable_memory(self, activated): return '--enable-memory-profile' if activated else '--disable-memory-profile' # noqa: E501 def enable_or_disable_openmp(self, activated): return '--enable-open-mp' if activated else '--disable-open-mp' def configure_args(self): args = [ # As of version 4.2.1 there are hard-coded paths that make # the build process fail if the target prefix is not the # configure directory '--prefix={0}'.format(self.stage.source_path), '--disable-keep-objects', '--with-editor=none' ] spec = self.spec # Double precision args.extend(self.enable_or_disable('dp')) # Application profiling args.extend(self.enable_or_disable('profile')) # MPI + threading args.extend(self.enable_or_disable('mpi')) args.extend(self.enable_or_disable('openmp')) # LAPACK if '+mpi' in spec: args.append('--with-scalapack-libs={0}'.format( spec['scalapack'].libs + spec['lapack'].libs + spec['blas'].libs )) args.extend([ '--with-blas-libs={0}'.format(spec['blas'].libs), '--with-lapack-libs={0}'.format(spec['lapack'].libs) ]) # Netcdf args.extend([ '--enable-netcdf-hdf5', '--enable-hdf5-compression', '--with-hdf5-libs={0}'.format(spec['hdf5'].libs), '--with-netcdf-path={0}'.format(spec['netcdf-c'].prefix), '--with-netcdff-path={0}'.format(spec['netcdf-fortran'].prefix) ]) args.extend(self.enable_or_disable('io')) # Other dependencies args.append('--with-fft-path={0}'.format(spec['fftw'].prefix)) args.append('--with-libxc-path={0}'.format(spec['libxc'].prefix)) return args def install(self, spec, prefix): # As of version 4.2.1 an 'install' target is advertized, # but not present install_tree('bin', prefix.bin) install_tree('lib', prefix.lib) install_tree('include', prefix.include) install_tree('driver', prefix.driver)
player1537-forks/spack
var/spack/repos/builtin.mock/packages/cvs-test/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class CvsTest(Package): """Mock package that uses cvs for fetching.""" homepage = "http://www.cvs-fetch-example.com" version('cvs', cvs='to-be-filled-in-by-test')
player1537-forks/spack
var/spack/repos/builtin/packages/hdf-eos2/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import sys from spack import * class HdfEos2(AutotoolsPackage): """HDF-EOS (Hierarchical Data Format - Earth Observing System) is a self-describing file format based upon HDF for standard data products that are derived from EOS missions. HDF-EOS2 is based upon HDF4. """ homepage = "https://hdfeos.org" # The download URLs are messing, and include sha256 checksum. # This is just a template. See version_list and url_for_version below # Template for url_for_version. 0 is sha256 checksum, 1 is filename url = "https://git.earthdata.nasa.gov/rest/git-lfs/storage/DAS/hdfeos/{0}?response-content-disposition=attachment%3B%20filename%3D%22{1}%22%3B%20filename*%3Dutf-8%27%27{1}" # Crazy URL scheme, differing with each version, and including the # sha256 checksum in the URL. Yuck # The data in version_list is used to generate versions and urls # In basename expansions, 0 is raw version, # 1 is for version with dots => underscores version_list = [ {'version': '2.20v1.00', 'sha256': 'cb0f900d2732ab01e51284d6c9e90d0e852d61bba9bce3b43af0430ab5414903', 'basename': 'HDF-EOS{0}.tar.Z'}, {'version': '2.19b', 'sha256': 'a69993508dbf5fa6120bac3c906ab26f1ad277348dfc2c891305023cfdf5dc9d', 'basename': 'hdfeos{1}.zip'} ] for vrec in version_list: ver = vrec['version'] sha256 = vrec['sha256'] version(ver, sha256=sha256) variant('shared', default=True, description='Build shared libraries (can be used with +static)') variant('static', default=True, description='Build static libraries (can be used with +shared)') conflicts('~static', when='~shared', msg='At least one of +static or +shared must be set') # Build dependencies depends_on('hdf') # The standard Makefile.am, etc. add a --single_module flag to LDFLAGS # to pass to the linker. # That appears to be only recognized by the Darwin linker, remove it # if we are not running on darwin/ if sys.platform != "darwin": patch('hdf-eos2.nondarwin-no-single_module.patch') def url_for_version(self, version): vrec = [x for x in self.version_list if x['version'] == version.dotted.string] if vrec: fname = vrec[0]['basename'].format(version.dotted, version.underscored) sha256 = vrec[0]['sha256'] myurl = self.url.format(sha256, fname) return myurl else: sys.exit('ERROR: cannot generate URL for version {0};' 'version/checksum not found in version_list'.format( version)) def configure_args(self): extra_args = [] # Package really wants h4cc to be used extra_args.append('CC={0}/bin/h4cc -Df2cFortran'.format( self.spec['hdf'].prefix)) # We always build PIC code extra_args.append('--with-pic') # Set shared/static appropriately extra_args.extend(self.enable_or_disable('shared')) extra_args.extend(self.enable_or_disable('static')) # Provide config args for dependencies extra_args.append('--with-hdf4={0}'.format(self.spec['hdf'].prefix)) if 'jpeg' in self.spec: extra_args.append('--with-jpeg={0}'.format( self.spec['jpeg'].prefix)) if 'libszip' in self.spec: extra_args.append('--with-szlib={0}'.format( self.spec['libszip'].prefix)) if 'zlib' in self.spec: extra_args.append('--with-zlib={0}'.format( self.spec['zlib'].prefix)) return extra_args
player1537-forks/spack
var/spack/repos/builtin/packages/r-dbi/package.py
<reponame>player1537-forks/spack<filename>var/spack/repos/builtin/packages/r-dbi/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RDbi(RPackage): """R Database Interface. A database interface definition for communication between R and relational database management systems. All classes in this package are virtual and need to be extended by the various R/DBMS implementations.""" cran = "DBI" version('1.1.2', sha256='56ec377d471c76ac234ddfd313bd01a050c99fb6fa5f704f5333b34a5d714f58') version('1.1.1', sha256='572ab3b8a6421d0ac3e7665c4c842826f1723af98fca25d4f43edb419e771344') version('1.1.0', sha256='a96db7fa39a58f1ed34c6e78d8f5f7e4cf0882afb301323b5c6975d6729203e4') version('1.0.0', sha256='ff16f118eb3f759183441835e932b87358dd80ab9800ce576a8f3df1b6f01cf5') version('0.4-1', sha256='eff14a9af4975f23f8e1f4347d82c33c32c0b4f4f3e11370c582a89aeb8ac68e') version('0.7', sha256='2557d5d59a45620ec9de340c2c25eec4cc478d3fc3f8b87979cf337c5bcfde11') depends_on('r@3.0.0:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/hybridsim/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Hybridsim(MakefilePackage): """ HybridSim provides cycle-accurate simulation of a non-volatile memory system augmented with a DRAM based cache. It uses DRAMSim2 for the DRAM model and NVDIMMSim for the non-volatile memory model """ homepage = "https://github.com/jimstevens2001/HybridSim" git = "https://github.com/jimstevens2001/HybridSim" url = "https://github.com/jimstevens2001/HybridSim/archive/v2.0.1.tar.gz" maintainers = ['jjwilke'] version('2.0.1', sha256="57b82ac929acd36de84525e4d61358f1ab6532f5b635ca3f560e563479921937") depends_on("dramsim2") depends_on("nvdimmsim") patch("makefile.patch", when="@2.0.1") def build(self, spec, prefix): symlink(spec["dramsim2"].prefix, "DRAMSim2") symlink(spec["nvdimmsim"].prefix, "NVDIMMSim") if spec.satisfies("platform=darwin"): make("libhybridsim.dylib") else: make("libhybridsim.so") def install(self, spec, prefix): install_tree(".", prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/py-shellingham/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyShellingham(PythonPackage): """Tool to Detect Surrounding Shell""" homepage = "https://github.com/sarugaku/shellingham" pypi = "shellingham/shellingham-1.4.0.tar.gz" version('1.4.0', sha256='4855c2458d6904829bd34c299f11fdeed7cfefbf8a2c522e4caea6cd76b3171e') depends_on('python@2.6:2.7,3.4:', type=('build', 'run')) depends_on('py-setuptools', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/ruby-mustache/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class RubyMustache(RubyPackage): """Inspired by ctemplate and et, Mustache is a framework-agnostic way to render logic-free views.""" homepage = "https://github.com/mustache/mustache" url = "https://github.com/mustache/mustache/archive/v1.1.1.tar.gz" version('1.1.1', sha256='9ab4a9842a37d5278789ba26152b0b78f649e3020266809ec33610a89f7e65ea') depends_on('ruby@2.0:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin.mock/packages/vdefault-or-external-root/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class VdefaultOrExternalRoot(Package): """Test that we don't prefer adding an external to using a default variant value. """ homepage = 'https://www.example.org' url = 'https://example.org/files/v3.4/cmake-3.4.3.tar.gz' version('1.0', '4cb3ff35b2472<PASSWORD>0<PASSWORD>2<PASSWORD>3') depends_on('vdefault-or-external')
player1537-forks/spack
var/spack/repos/builtin/packages/geant4/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class Geant4(CMakePackage): """Geant4 is a toolkit for the simulation of the passage of particles through matter. Its areas of application include high energy, nuclear and accelerator physics, as well as studies in medical and space science.""" homepage = "http://geant4.cern.ch/" url = "https://gitlab.cern.ch/geant4/geant4/-/archive/v10.7.1/geant4-v10.7.1.tar.gz" tags = ['hep'] maintainers = ['drbenmorgan'] version('11.0.0', sha256='04d11d4d9041507e7f86f48eb45c36430f2b6544a74c0ccaff632ac51d9644f1') version('10.7.3', sha256='8615d93bd4178d34f31e19d67bc81720af67cdab1c8425af8523858dcddcf65b', preferred=True) version('10.7.2', sha256='593fc85883a361487b17548ba00553501f66a811b0a79039276bb75ad59528cf') version('10.7.1', sha256='2aa7cb4b231081e0a35d84c707be8f35e4edc4e97aad2b233943515476955293') version('10.7.0', sha256='c991a139210c7f194720c900b149405090058c00beb5a0d2fac5c40c42a262d4') version('10.6.3', sha256='bf96d6d38e6a0deabb6fb6232eb00e46153134da645715d636b9b7b4490193d3') version('10.6.2', sha256='e381e04c02aeade1ed8cdd9fdbe7dcf5d6f0f9b3837a417976b839318a005dbd') version('10.6.1', sha256='4fd64149ae26952672a81ce5579d3806fda4bd251d486897093ac57633a42b7e') version('10.6.0', sha256='eebe6a170546064ff81ab3b00f513ccd1d4122a026514982368d503ac55a4ee4') version('10.5.1', sha256='2397eb859dc4de095ff66059d8bda9f060fdc42e10469dd7890946293eeb0e39') version('10.4.3', sha256='67f3bb6405a2c77e573936c2b933f5a4a33915aa379626a2eb3012009b91e1da') version('10.4.0', sha256='e919b9b0a88476e00c0b18ab65d40e6a714b55ee4778f66bac32a5396c22aa74') version('10.3.3', sha256='bcd36a453da44de9368d1d61b0144031a58e4b43a6d2d875e19085f2700a89d8') _cxxstd_values = ('11', '14', '17') variant('cxxstd', default=_cxxstd_values[0], values=_cxxstd_values, multi=False, description='Use the specified C++ standard when building.') conflicts('cxxstd=11', when='@11:', msg='geant4@11: only supports cxxstd=17') conflicts('cxxstd=14', when='@11:', msg='geant4@11: only supports cxxstd=17') variant('threads', default=True, description='Build with multithreading') variant('vecgeom', default=False, description='Enable vecgeom support') variant('opengl', default=False, description='Optional OpenGL support') variant('x11', default=False, description='Optional X11 support') variant('motif', default=False, description='Optional motif support') variant('qt', default=False, description='Enable Qt support') variant('python', default=False, description='Enable Python bindings') variant('tbb', default=False, description='Use TBB as a tasking backend', when='@11:') variant('vtk', default=False, description='Enable VTK support', when='@11:') depends_on('cmake@3.16:', type='build', when='@11.0.0:') depends_on('cmake@3.8:', type='build', when='@10.6.0:') depends_on('cmake@3.5:', type='build') for _vers in ["11.0.0", "10.7.3", "10.7.2", "10.7.1", "10.7.0", "10.6.3", "10.6.2", "10.6.1", "10.6.0", "10.5.1", "10.4.3", "10.4.0", "10.3.3"]: depends_on('geant4-data@' + _vers, type='run', when='@' + _vers) depends_on("expat") depends_on("zlib") depends_on('tbb', when='+tbb') depends_on('vtk@8.2:', when='+vtk') # Python, with boost requirement dealt with in cxxstd section depends_on('python@3:', when='+python') extends('python', when='+python') conflicts('+python', when='@:10.6.1', msg='Geant4 <= 10.6.1 cannot be built with Python bindings') for std in _cxxstd_values: # CLHEP version requirements to be reviewed depends_on('clhep@2.4.5.1: cxxstd=' + std, when='@11.0.0: cxxstd=' + std) depends_on('clhep@2.4.4.0: cxxstd=' + std, when='@10.7.0: cxxstd=' + std) depends_on('clhep@2.3.3.0: cxxstd=' + std, when='@10.3.3:10.6 cxxstd=' + std) # Spack only supports Xerces-c 3 and above, so no version req depends_on('xerces-c netaccessor=curl cxxstd=' + std, when='cxxstd=' + std) # Vecgeom specific versions for each Geant4 version depends_on('vecgeom@1.1.18:1.1 cxxstd=' + std, when='@11.0.0: +vecgeom cxxstd=' + std) depends_on('vecgeom@1.1.8:1.1 cxxstd=' + std, when='@10.7.0: +vecgeom cxxstd=' + std) depends_on('vecgeom@1.1.5 cxxstd=' + std, when='@10.6.0:10.6 +vecgeom cxxstd=' + std) depends_on('vecgeom@1.1.0 cxxstd=' + std, when='@10.5.0:10.5 +vecgeom cxxstd=' + std) depends_on('vecgeom@0.5.2 cxxstd=' + std, when='@10.4.0:10.4 +vecgeom cxxstd=' + std) depends_on('vecgeom@0.3rc cxxstd=' + std, when='@10.3.0:10.3 +vecgeom cxxstd=' + std) # Boost.python, conflict handled earlier depends_on('boost@1.70: +python cxxstd=' + std, when='+python cxxstd=' + std) # Visualization driver dependencies depends_on("gl", when='+opengl') depends_on("glu", when='+opengl') depends_on("glx", when='+opengl+x11') depends_on("libx11", when='+x11') depends_on("libxmu", when='+x11') depends_on("motif", when='+motif') depends_on("qt@5: +opengl", when="+qt") # As released, 10.03.03 has issues with respect to using external # CLHEP. patch('CLHEP-10.03.03.patch', level=1, when='@10.3.3') # These patches can be applied independent of the cxxstd value? patch('cxx17.patch', when='@:10.3 cxxstd=17') patch('cxx17_geant4_10_0.patch', level=1, when='@10.4.0 cxxstd=17') patch('geant4-10.4.3-cxx17-removed-features.patch', level=1, when='@10.4.3 cxxstd=17') def cmake_args(self): spec = self.spec # Core options options = [ '-DGEANT4_USE_SYSTEM_CLHEP=ON', '-DGEANT4_USE_SYSTEM_EXPAT=ON', '-DGEANT4_USE_SYSTEM_ZLIB=ON', '-DGEANT4_USE_G3TOG4=ON', '-DGEANT4_USE_GDML=ON', '-DXERCESC_ROOT_DIR={0}'.format(spec['xerces-c'].prefix) ] # Use the correct C++ standard option for the requested version if spec.version >= Version('11.0'): options.append( self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd')) else: options.append( self.define_from_variant('GEANT4_BUILD_CXXSTD', 'cxxstd')) # Don't install the package cache file as Spack will set # up CMAKE_PREFIX_PATH etc for the dependencies if spec.version >= Version('10.6'): options.append('-DGEANT4_INSTALL_PACKAGE_CACHE=OFF') # Multithreading options.append(self.define_from_variant('GEANT4_BUILD_MULTITHREADED', 'threads')) options.append(self.define_from_variant('GEANT4_USE_TBB', 'tbb')) if '+threads' in spec: # Locked at global-dynamic to allow use cases that load the # geant4 libs at application runtime options.append('-DGEANT4_BUILD_TLS_MODEL=global-dynamic') # Never install the data with geant4, but point to the dependent # geant4-data's install directory to correctly set up the # Geant4Config.cmake values for Geant4_DATASETS . options.append(self.define('GEANT4_INSTALL_DATA', False)) options.append(self.define('GEANT4_INSTALL_DATADIR', self.datadir)) # Vecgeom if '+vecgeom' in spec: options.append('-DGEANT4_USE_USOLIDS=ON') options.append('-DUSolids_DIR=%s' % spec[ 'vecgeom'].prefix.lib.CMake.USolids) # Visualization options if 'platform=darwin' not in spec: if "+x11" in spec and "+opengl" in spec: options.append('-DGEANT4_USE_OPENGL_X11=ON') if "+motif" in spec and "+opengl" in spec: options.append('-DGEANT4_USE_XM=ON') if "+x11" in spec: options.append('-DGEANT4_USE_RAYTRACER_X11=ON') if '+qt' in spec: options.append('-DGEANT4_USE_QT=ON') options.append( '-DQT_QMAKE_EXECUTABLE=%s' % spec['qt'].prefix.bin.qmake) options.append(self.define_from_variant('GEANT4_USE_VTK', 'vtk')) # Python if spec.version > Version('10.6.1'): options.append(self.define_from_variant('GEANT4_USE_PYTHON', 'python')) return options @property def datadir(self): dataspec = self.spec['geant4-data'] return join_path( dataspec.prefix.share, '{0}-{1}'.format(dataspec.name, dataspec.version.dotted) )
player1537-forks/spack
var/spack/repos/builtin/packages/dislin/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Dislin(Package): """DISLIN is a high level and easy to use graphics library for displaying data as curves, bar graphs, pie charts, 3D-colour plots, surfaces, contours and maps.""" homepage = "https://www.mps.mpg.de/dislin" url = "ftp://ftp.gwdg.de/pub/grafik/dislin/linux/i586_64/dislin-11.0.linux.i586_64.tar.gz" version('11.0', sha256='13d28188924e0b0b803d72aa4b48be4067e98e890701b0aa6f54a11c7d34dd10') depends_on('motif') depends_on('gl') depends_on('glx') @property def libs(self): query_parameters = self.spec.last_query.extra_parameters query2libraries = { tuple(): ['libdislin'], ('d',): ['libdislin_d'], ('c', ): ['libdislnc'], ('cd',): ['libdislnc_d'], ('cxx',): ['libdiscpp'], ('java',): ['libdisjava'] } key = tuple(query_parameters) libraries = query2libraries[key] return find_libraries( libraries, root=self.prefix, shared=True, recursive=True ) def setup_build_environment(self, env): env.set('DISLIN', self.prefix) def setup_run_environment(self, env): env.set('DISLIN', self.prefix) env.prepend_path('PATH', self.prefix) env.prepend_path('LD_LIBRARY_PATH', self.prefix) env.prepend_path('LD_LIBRARY_PATH', self.spec['motif'].prefix.lib) env.prepend_path('LD_LIBRARY_PATH', self.spec['mesa'].prefix.lib) def setup_dependent_run_environment(self, env, dependent_spec): env.prepend_path('LD_LIBRARY_PATH', self.prefix) def install(self, spec, prefix): install = Executable('./INSTALL') install() with working_dir('examples'): install('dislin_d.h', prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/fermisciencetools/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Fermisciencetools(Package): """The Fermi Science Tools consists of the basic tools necessary to analyze Fermi data. This is the binary version for Linux x86_64 with libc-2.17.""" homepage = "https://fermi.gsfc.nasa.gov/ssc/data/analysis/software/" url = "https://fermi.gsfc.nasa.gov/ssc/data/analysis/software/v11r5p3/ScienceTools-v11r5p3-fssc-20180124-x86_64-unknown-linux-gnu-libc2.17.tar.gz" # Now we are using the binary distribution. The source distribution is also # available, but there might be some logical errors in the configure codes, # which leads to failing in building it from source. Hopefully someone else # can figure it out and we can use the source distribution instead. version('11r5p3', sha256='2f4fc32a0b2e5c0f1ddb220a0560f67e66052b7907c72dba181908dc9269ffe8') def install(self, spec, prefix): install_tree('x86_64-unknown-linux-gnu-libc2.17', prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/ragel/package.py
<filename>var/spack/repos/builtin/packages/ragel/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Ragel(AutotoolsPackage): """Ragel State Machine Compiler Ragel compiles executable finite state machines from regular languages. Ragel targets C, C++ and ASM. Ragel state machines can not only recognize byte sequences as regular expression machines do, but can also execute code at arbitrary points in the recognition of a regular language. Code embedding is done using inline operators that do not disrupt the regular language syntax. """ homepage = "https://www.colm.net/open-source/ragel" git = "git://colm.net/ragel.git" url = "https://www.colm.net/files/ragel/ragel-6.10.tar.gz" version('6.10', sha256='5f156edb65d20b856d638dd9ee2dfb43285914d9aa2b6ec779dac0270cd56c3f') depends_on('colm', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/libmatheval/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Libmatheval(AutotoolsPackage, GNUMirrorPackage): """GNU libmatheval is a library (callable from C and Fortran) to parse and evaluate symbolic expressions input as text. It supports expressions in any number of variables of arbitrary names, decimal and symbolic constants, basic unary and binary operators, and elementary mathematical functions. In addition to parsing and evaluation, libmatheval can also compute symbolic derivatives and output expressions to strings.""" homepage = "https://www.gnu.org/software/libmatheval/" gnu_mirror_path = "libmatheval/libmatheval-1.1.11.tar.gz" version('1.1.11', sha256='474852d6715ddc3b6969e28de5e1a5fbaff9e8ece6aebb9dc1cc63e9e88e89ab') # Only needed for unit tests, but configure crashes without it depends_on('guile', type='build') depends_on('flex') # guile 2.0 provides a deprecated interface for the unit test using guile patch('guile-2.0.patch', when='^guile@2.0') # guile 2.2 does not support deprecated functions any longer # the patch skips the unit tests patch('guile-2.2.patch', when='^guile@2.2:')
player1537-forks/spack
var/spack/repos/builtin/packages/py-nistats/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyNistats(PythonPackage): """Modeling and Statistical analysis of fMRI data in Python.""" homepage = "https://github.com/nilearn/nistats" pypi = "nistats/nistats-0.0.1rc0.tar.gz" version('0.0.1rc0', sha256='dcc4c4e410f542fd72e02e12b3b6531851bae2680d08ad29658b272587ef2f98') version('0.0.1b2', sha256='a853149087bafbf1bed12664ed8889a63ff15dde1fb7a9d51e8a094afc8d695d') depends_on('python@2.7:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-numpy@1.11:', type=('build', 'run')) depends_on('py-scipy@0.17:', type=('build', 'run')) depends_on('py-scikit-learn@0.18:', type=('build', 'run')) depends_on('py-nibabel@2.0.2:', type=('build', 'run')) # needs +plotting to avoid ModuleNotFoundError: # 'nilearn.plotting.js_plotting_utils' when importing nistats.reporting # Functionality has been incorporated into py-nilearn@0.7: depends_on('py-nilearn+plotting@0.4:0.6', type=('build', 'run')) depends_on('py-pandas@0.18:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/tempestremap/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Tempestremap(AutotoolsPackage): """ TempestRemap is a conservative, consistent and monotone remapping package for arbitrary grid geometry with support for finite volumes and finite elements. There is still quite a bit of work to be done, but any feedback is appreciated on the software in its current form """ homepage = "https://github.com/ClimateGlobalChange/tempestremap" url = "https://github.com/ClimateGlobalChange/tempestremap/archive/v2.0.5.tar.gz" maintainers = ['iulian787', 'vijaysm', 'paullric'] version('2.0.5', sha256='8618f5cbde450922efa1d77e67b062c557788b0cf4304adca30237afe3ade887') version('2.0.4', sha256='8349eeb604e97b13d2ecde8626a69e579a7af70ad0e8a6925a8bb4306a4963a4') version('2.0.3', sha256='b4578c2cb101ba091a10dc914e15ac968257f5db27ca78bc9fb5dbd70bce191f') version('2.0.2', sha256='2347bf804d19d515cb630a76b87e6dc6edcc1a828ff8c0f2a8a28e77794bad13') version('2.0.1', sha256='a3f1bef8cc413a689d429ac56f2bcc2e1d282d99797c3375233de792a7448ece') version('2.0.0', sha256='5850e251a4ad04fc924452f49183e5e12c38725832a568e57fa424a844b8a000') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build') depends_on('netcdf-c') depends_on('blas') depends_on('lapack') def configure_args(self): spec = self.spec options = [] options.append('--with-netcdf=%s' % spec['netcdf-c'].prefix) options.append('--with-blas=%s' % spec['blas'].libs.ld_flags) options.append('--with-lapack=%s' % spec['lapack'].libs.ld_flags) return options
player1537-forks/spack
var/spack/repos/builtin/packages/coinutils/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Coinutils(AutotoolsPackage): """CoinUtils is an open-source collection of classes and helper functions that are generally useful to multiple COIN-OR projects.""" homepage = "https://projects.coin-or.org/Coinutils" url = "https://github.com/coin-or/CoinUtils/archive/releases/2.11.4.tar.gz" version('2.11.4', sha256='d4effff4452e73356eed9f889efd9c44fe9cd68bd37b608a5ebb2c58bd45ef81') build_directory = 'spack-build'
player1537-forks/spack
var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py
<reponame>player1537-forks/spack<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyJupyterServerMathjax(PythonPackage): """MathJax resources as a Jupyter Server Extension.""" homepage = "http://jupyter.org/" pypi = "jupyter_server_mathjax/jupyter_server_mathjax-0.2.3.tar.gz" version('0.2.3', sha256='564e8d1272019c6771208f577b5f9f2b3afb02b9e2bff3b34c042cef8ed84451') depends_on('python@3.6:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-wheel', type='build') depends_on('py-jupyter-packaging', type='build') depends_on('py-jupyter-server@1.1:1', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/py-requests-ntlm/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyRequestsNtlm(PythonPackage): """This package allows for HTTP NTLM authentication using the requests library.""" homepage = "https://github.com/requests/requests-ntlm" pypi = "requests_ntlm/requests_ntlm-1.1.0.tar.gz" version('1.1.0', sha256='9189c92e8c61ae91402a64b972c4802b2457ce6a799d658256ebf084d5c7eb71') depends_on('py-setuptools', type='build') depends_on('py-requests@2.0.0:', type=('build', 'run')) depends_on('py-ntlm-auth@1.0.2:', type=('build', 'run')) depends_on('py-cryptography@1.3:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/astra/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Astra(Package): """A Space Charge Tracking Algorithm.""" homepage = "https://www.desy.de/~mpyflo/" version('2020-02-03', sha256='ca9ee7d3d369f9040fbd595f57f3153f712d789b66385fd2d2de88a69a774b83', expand=False, url='https://www.desy.de/~mpyflo/Astra_for_64_Bit_Linux/Astra') # no longer available? # version('2016-11-30', # sha256='50738bf924724e2dd15f1d924b290ffb0f7c703e5d5ae02ffee2db554338801e', # expand=False, # url='https://www.desy.de/~mpyflo/Astra_for_64_Bit_Linux/Astra') variant('gui', default=False, description='Install plotting/gui tools') resource(name='generator', url='https://www.desy.de/~mpyflo/Astra_for_64_Bit_Linux/generator', sha256='d31cf9fcfeb90ce0e729d8af628caf4a23f7e588a3d412d5b19241e8c684e531', expand=False, placement='generator') resource(name='postpro', url='https://www.desy.de/~mpyflo/Astra_for_64_Bit_Linux/postpro', sha256='f47efb14748ce1da62bcd33c9411482bee89bcab75b28a678fc764db0c21ee8d', expand=False, when='+gui', placement='postpro') resource(name='fieldplot', url='https://www.desy.de/~mpyflo/Astra_for_64_Bit_Linux/fieldplot', sha256='89df1da96bfd9f165fa148b84376af558e6633ab2dda837273706143ff863c96', expand=False, when='+gui', placement='fieldplot') resource(name='lineplot', url='https://www.desy.de/~mpyflo/Astra_for_64_Bit_Linux/lineplot', sha256='d2d5702be9cb3d96391c6a0ca37366d580ced1f0f722fb33a6039ad7fd43b69a', expand=False, when='+gui', placement='lineplot') resource(name='pgxwin_server', url='https://www.desy.de/~mpyflo/Astra_for_64_Bit_Linux/pgxwin_server', sha256='d2d5702be9cb3d96391c6a0ca37366d580ced1f0f722fb33a6039ad7fd43b69a', expand=False, when='+gui', placement='pgxwin_server') depends_on('libxcb', when='+gui') depends_on('libx11', when='+gui') def install(self, spec, prefix): mkdir(prefix.bin) install('Astra', prefix.bin) install('generator/generator', prefix.bin) if spec.satisfies('+gui'): install('postpro/postpro', prefix.bin) install('fieldplot/fieldplot', prefix.bin) install('lineplot/lineplot', prefix.bin) install('pgxwin_server/pgxwin_server', prefix.bin) chmod = which('chmod') chmod('+x', join_path(prefix.bin, 'Astra')) chmod('+x', join_path(prefix.bin, 'generator')) if spec.satisfies('+gui'): chmod('+x', join_path(prefix.bin, 'postpro')) chmod('+x', join_path(prefix.bin, 'fieldplot')) chmod('+x', join_path(prefix.bin, 'lineplot')) chmod('+x', join_path(prefix.bin, 'pgxwin_server'))
player1537-forks/spack
lib/spack/spack/test/link_paths.py
<gh_stars>1-10 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import pytest import spack.paths from spack.compiler import _parse_non_system_link_dirs #: directory with sample compiler data datadir = os.path.join(spack.paths.test_path, 'data', 'compiler_verbose_output') @pytest.fixture(autouse=True) def allow_nonexistent_paths(monkeypatch): # Allow nonexistent paths to be detected as part of the output # for testing purposes. monkeypatch.setattr(os.path, 'isdir', lambda x: True) def check_link_paths(filename, paths): with open(os.path.join(datadir, filename)) as file: output = file.read() detected_paths = _parse_non_system_link_dirs(output) actual = detected_paths expected = paths missing_paths = list(x for x in expected if x not in actual) assert not missing_paths extra_paths = list(x for x in actual if x not in expected) assert not extra_paths assert actual == expected def test_icc16_link_paths(): check_link_paths('icc-16.0.3.txt', [ '/usr/tce/packages/intel/intel-16.0.3/compilers_and_libraries_2016.3.210/linux/compiler/lib/intel64_lin', # noqa '/usr/tce/packages/gcc/gcc-4.9.3/lib64/gcc/x86_64-unknown-linux-gnu/4.9.3', # noqa '/usr/tce/packages/gcc/gcc-4.9.3/lib64']) def test_pgi_link_paths(): check_link_paths('pgcc-16.3.txt', [ '/usr/tce/packages/pgi/pgi-16.3/linux86-64/16.3/lib']) def test_gcc7_link_paths(): check_link_paths('gcc-7.3.1.txt', []) def test_clang4_link_paths(): check_link_paths('clang-4.0.1.txt', []) def test_xl_link_paths(): check_link_paths('xl-13.1.5.txt', [ '/opt/ibm/xlsmp/4.1.5/lib', '/opt/ibm/xlmass/8.1.5/lib', '/opt/ibm/xlC/13.1.5/lib']) def test_cce_link_paths(): check_link_paths('cce-8.6.5.txt', [ '/opt/gcc/6.1.0/snos/lib64', '/opt/cray/dmapp/default/lib64', '/opt/cray/pe/mpt/7.7.0/gni/mpich-cray/8.6/lib', '/opt/cray/pe/libsci/17.12.1/CRAY/8.6/x86_64/lib', '/opt/cray/rca/2.2.16-6.0.5.0_15.34__g5e09e6d.ari/lib64', '/opt/cray/pe/pmi/5.0.13/lib64', '/opt/cray/xpmem/2.2.4-6.0.5.0_4.8__g35d5e73.ari/lib64', '/opt/cray/dmapp/7.1.1-6.0.5.0_49.8__g1125556.ari/lib64', '/opt/cray/ugni/6.0.14-6.0.5.0_16.9__g19583bb.ari/lib64', '/opt/cray/udreg/2.3.2-6.0.5.0_13.12__ga14955a.ari/lib64', '/opt/cray/alps/6.5.28-6.0.5.0_18.6__g13a91b6.ari/lib64', '/opt/cray/pe/atp/2.1.1/libApp', '/opt/cray/pe/cce/8.6.5/cce/x86_64/lib', '/opt/cray/wlm_detect/1.3.2-6.0.5.0_3.1__g388ccd5.ari/lib64', '/opt/gcc/6.1.0/snos/lib/gcc/x86_64-suse-linux/6.1.0', '/opt/cray/pe/cce/8.6.5/binutils/x86_64/x86_64-unknown-linux-gnu/lib']) def test_clang_apple_ld_link_paths(): check_link_paths('clang-9.0.0-apple-ld.txt', [ '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/usr/lib']) # noqa def test_nag_mixed_gcc_gnu_ld_link_paths(): # This is a test of a mixed NAG/GCC toolchain, i.e. 'cxx' is set to g++ and # is used for the rpath detection. The reference compiler output is a # result of # '/path/to/gcc/bin/g++ -Wl,-v ./main.c'. check_link_paths('collect2-6.3.0-gnu-ld.txt', [ '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0', # noqa '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib64', # noqa '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib']) # noqa def test_nag_link_paths(): # This is a test of a NAG-only toolchain, i.e. 'cc' and 'cxx' are empty, # and therefore 'fc' is used for the rpath detection). The reference # compiler output is a result of # 'nagfor -Wc=/path/to/gcc/bin/gcc -Wl,-v ./main.c'. check_link_paths('nag-6.2-gcc-6.5.0.txt', [ '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0', # noqa '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib64', # noqa '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib']) # noqa def test_obscure_parsing_rules(): check_link_paths('obscure-parsing-rules.txt', [ '/first/path', '/second/path', '/third/path'])
player1537-forks/spack
var/spack/repos/builtin/packages/qthreads/package.py
<filename>var/spack/repos/builtin/packages/qthreads/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * def is_integer(x): """Any integer value""" try: return float(x).is_integer() except ValueError: return False class Qthreads(AutotoolsPackage): """The qthreads API is designed to make using large numbers of threads convenient and easy, and to allow portable access to threading constructs used in massively parallel shared memory environments. The API maps well to both MTA-style threading and PIM-style threading, and we provide an implementation of this interface in both a standard SMP context as well as the SST context. The qthreads API provides access to full/empty-bit (FEB) semantics, where every word of memory can be marked either full or empty, and a thread can wait for any word to attain either state.""" homepage = "http://www.cs.sandia.gov/qthreads/" url = "https://github.com/Qthreads/qthreads/releases/download/1.10/qthread-1.10.tar.bz2" test_requires_compiler = True test_base_path = 'test/basics/' test_list = ['hello_world_multi', 'hello_world'] tags = ['e4s'] version('1.16', sha256='0a95e20b08cb486de6c33bff16590f41e444ca64ab738aee697ef982fbb021d8') version('1.15', sha256='3ac2dc24debff004a2998933de5724b1e14e1ae262fa9942acbb01f77819a23b') version("1.14", sha256="16f15e5b2e35b6329a857d24c283a1e43cd49921ee49a1446d4f31bf9c6f5cf9") version("1.12", sha256="2c13a5f6f45bc2f22038d272be2e748e027649d3343a9f824da9e86a88b594c9") version("1.11", sha256="dbde6c7cb7de7e89921e47363d09cecaebf775c9d090496c2be8350355055571") version("1.10", sha256="29fbc2e54bcbc814c1be13049790ee98c505f22f22ccee34b7c29a4295475656") patch("restrict.patch", when="@:1.10") patch("trap.patch", when="@:1.10") variant( 'hwloc', default=True, description='hwloc support' ) variant('spawn_cache', default=False, description='enables worker specific cache of spawns') variant('scheduler', default='nemesis', values=('nemesis', 'lifo', 'mutexfifo', 'mtsfifo', 'sherwood', 'distrib', 'nottingham'), multi=False, description='Specify which scheduler policy to use') variant('static', default=True, description='Build static library') variant('stack_size', default=4096, description='Specify number of bytes to use in a stack', values=is_integer) depends_on("hwloc@1.0:1", when="@:1.15 +hwloc") depends_on("hwloc@1.5:2", when="@1.16: +hwloc") def configure_args(self): spec = self.spec if "+hwloc" in self.spec: args = [ "--enable-guard-pages", "--with-topology=hwloc", "--with-hwloc=%s" % spec["hwloc"].prefix] else: args = ["--with-topology=no"] if '+spawn_cache' in self.spec: args.append('--enable-spawn-cache') else: args.append('--disable-spawn-cache') if '+static' in self.spec: args.append('--enable-static=yes') else: args.append('--enable-static=no') args.append('--with-default-stack-size=%s' % self.spec.variants['stack_size'].value) args.append('--with-scheduler=%s' % self.spec.variants['scheduler'].value) return args @run_after('install') def setup_build_tests(self): """Copy the build test files after the package is installed to an install test subdirectory for use during `spack test run`.""" tests = self.test_list relative_test_dir = self.test_base_path files_to_cpy = [] header = 'test/argparsing.h' for test in tests: test_path = join_path(relative_test_dir, test + '.c') files_to_cpy.append(test_path) files_to_cpy.append(header) self.cache_extra_test_sources(files_to_cpy) def build_tests(self): """Build and run the added smoke (install) test.""" tests = self.test_list relative_test_dir = self.test_base_path for test in tests: options = [ '-I{0}'.format(self.prefix.include), '-I{0}'.format(self.install_test_root + '/test'), join_path(self.install_test_root, relative_test_dir, test + '.c'), '-o', test, '-L{0}'.format(self.prefix.lib), '-lqthread', '{0}{1}'.format(self.compiler.cc_rpath_arg, self.prefix.lib)] reason = 'test:{0}: Checking ability to link to the library.'\ .format(test) self.run_test('cc', options, [], installed=False, purpose=reason) def run_tests(self): tests = self.test_list # Now run the program for test in tests: reason = 'test:{0}: Checking ability to execute.'.format(test) self.run_test(test, [], purpose=reason) def test(self): # Build self.build_tests() # Run test programs pulled from the build self.run_tests()
player1537-forks/spack
var/spack/repos/builtin/packages/trilinos-catalyst-ioss-adapter/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class TrilinosCatalystIossAdapter(CMakePackage): """Adapter for Trilinos Seacas Ioss and Paraview Catalyst""" homepage = "https://trilinos.org/" git = "https://github.com/trilinos/Trilinos.git" version('develop', branch='develop') version('master', branch='master') depends_on('bison', type='build') depends_on('flex', type='build') depends_on('paraview+mpi+python+osmesa') depends_on('py-numpy', type=('build', 'run')) # Here we avoid paraview trying to use netcdf-c~parallel-netcdf # which is netcdf-c's default, even though paraview depends on 'netcdf-c' # without any variants. Concretizer bug? depends_on('netcdf-c+parallel-netcdf') root_cmakelists_dir = join_path('packages', 'seacas', 'libraries', 'ioss', 'src', 'visualization', 'ParaViewCatalystIossAdapter') def setup_run_environment(self, env): env.prepend_path('PYTHONPATH', self.prefix.python) def cmake_args(self): spec = self.spec options = [] paraview_version = 'paraview-%s' % spec['paraview'].version.up_to(2) options.extend([ '-DParaView_DIR:PATH=%s' % spec['paraview'].prefix + '/lib/cmake/' + paraview_version ]) return options
player1537-forks/spack
var/spack/repos/builtin/packages/racket/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Racket(Package): """The Racket programming language.""" homepage = "https://www.racket-lang.org" maintainers = ['arjunguha', 'elfprince13'] version('8.3', '<KEY>') depends_on('libffi', type=('build', 'link', 'run')) depends_on('patchutils') depends_on('libtool', type=('build')) phases = ['configure', 'build', 'install'] def url_for_version(self, version): return "https://mirror.racket-lang.org/installers/{0}/racket-minimal-{0}-src-builtpkgs.tgz".format(version) variant('cs', default=True, description='Build Racket CS (new ChezScheme VM)') variant('bc', default=False, description='Build Racket BC (old MZScheme VM)') variant('shared', default=True, description="Enable shared") variant('jit', default=True, description="Just-in-Time Compilation") parallel = False extendable = True def toggle(self, spec, variant): toggle_text = ("enable" if spec.variants[variant].value else "disable") return "--{0}-{1}".format(toggle_text, variant) def configure(self, spec, prefix): with working_dir('src'): configure = Executable("./configure") configure_args = [self.toggle(spec, 'cs'), self.toggle(spec, 'bc'), self.toggle(spec, 'jit')] toggle_shared = self.toggle(spec, 'shared') if sys.platform == 'darwin': configure_args += ["--enable-macprefix"] if "+xonx" in spec: configure_args += ["--enable-xonx", toggle_shared] else: configure_args += [toggle_shared] configure_args += ["--prefix={0}".format(prefix)] configure(*configure_args) def build(self, spec, prefix): with working_dir('src'): if spec.variants["bc"].value: make("bc") if spec.variants["cs"].value: make("cs") def install(self, spec, prefix): with working_dir('src'): if spec.variants["bc"].value: make('install-bc') if spec.variants["cs"].value: make('install-cs')
player1537-forks/spack
var/spack/repos/builtin/packages/amdscalapack/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * from spack.pkg.builtin.netlib_scalapack import ScalapackBase class Amdscalapack(ScalapackBase): """ ScaLAPACK is a library of high-performance linear algebra routines for parallel distributed memory machines. It depends on external libraries including BLAS and LAPACK for Linear Algebra computations. AMD's optimized version of ScaLAPACK enables using BLIS and LibFLAME library that have optimized dense matrix functions and solvers for AMD EPYC processor family CPUs. """ _name = 'amdscalapack' homepage = "https://developer.amd.com/amd-aocl/scalapack/" git = "https://github.com/amd/scalapack.git" maintainers = ['amd-toolchain-support'] version('3.1', sha256='4c2ee2c44644a0feec0c6fc1b1a413fa9028f14d7035d43a398f5afcfdbacb98') version('3.0', sha256='6e6f3578f44a8e64518d276e7580530599ecfa8729f568303ed2590688e7096f') version('2.2', sha256='2d64926864fc6d12157b86e3f88eb1a5205e7fc157bf67e7577d0f18b9a7484c') variant( 'build_type', default='Release', description='CMake build type', values=('Release', 'RelWithDebInfo')) variant( 'ilp64', default=False, description='Build with ILP64 support') conflicts('+ilp64', when="@:3.0", msg="ILP64 is supported from 3.1 onwards") def url_for_version(self, version): if version == Version('3.1'): return "https://github.com/amd/aocl-scalapack/archive/3.1.tar.gz" elif version == Version('3.0'): return "https://github.com/amd/scalapack/archive/3.0.tar.gz" elif version == Version('2.2'): return "https://github.com/amd/scalapack/archive/2.2.tar.gz" def cmake_args(self): """ cmake_args function""" args = super(Amdscalapack, self).cmake_args() spec = self.spec if spec.satisfies('%gcc@10:'): args.extend(['-DCMAKE_Fortran_FLAGS={0}'.format( "-fallow-argument-mismatch")]) if spec.satisfies('@2.2'): args.extend(['-DUSE_DOTC_WRAPPER:BOOL=%s' % ( 'ON' if spec.satisfies('%aocc ^amdblis') else 'OFF')]) # -DENABLE_ILP64:BOOL=ON args.extend([self.define_from_variant('ENABLE_ILP64', 'ilp64')]) # -DUSE_F2C:BOOL=ON args.extend([self.define('USE_F2C', spec.satisfies('@:3.0'))]) args.extend([ '-DLAPACK_FOUND=true', '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc, ]) return args
player1537-forks/spack
var/spack/repos/builtin/packages/py-functools32/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyFunctools32(PythonPackage): """Backport of the functools module from Python 3.2.3 for use on 2.7 and PyPy.""" homepage = "https://github.com/MiCHiLU/python-functools32" pypi = "functools32/functools32-3.2.3-2.tar.gz" version('3.2.3-2', sha256='f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d') # pip silently replaces distutils with setuptools depends_on('py-setuptools', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/r-sys/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RSys(RPackage): """Powerful and Reliable Tools for Running System Commands in R. Drop-in replacements for the base system2() function with fine control and consistent behavior across platforms. Supports clean interruption, timeout, background tasks, and streaming STDIN / STDOUT / STDERR over binary or text connections. Arguments on Windows automatically get encoded and quoted to work on different locales.""" cran = "sys" version('3.4', sha256='17f88fbaf222f1f8fd07919461093dac0e7175ae3c3b3264b88470617afd0487') version('3.2', sha256='2819498461fe2ce83d319d1a47844e86bcea6d01d10861818dba289e7099bbcc') def flag_handler(self, name, flags): if name == 'cflags': flags.append(self.compiler.c99_flag) return (flags, None, None)
player1537-forks/spack
var/spack/repos/builtin/packages/py-jupyterlab-widgets/package.py
<filename>var/spack/repos/builtin/packages/py-jupyterlab-widgets/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyJupyterlabWidgets(PythonPackage): """A JupyterLab extension.""" homepage = "https://github.com/jupyter-widgets/ipywidgets" # Source is also available, but I'm having issues getting it to build: # https://github.com/jupyter-widgets/ipywidgets/issues/3324 url = "https://files.pythonhosted.org/packages/py3/j/jupyterlab_widgets/jupyterlab_widgets-1.0.2-py3-none-any.whl" version('1.0.2', sha256='f5d9efface8ec62941173ba1cffb2edd0ecddc801c11ae2931e30b50492eb8f7', expand=False) depends_on('python@3.6:', type=('build', 'run')) depends_on('py-setuptools@40.8.0:', type='build') # TODO: replace this after concretizer learns how to concretize separate build deps depends_on('py-jupyter-packaging7', type='build') # depends_on('py-jupyter-packaging@0.7.9:0.7', type='build') depends_on('py-jupyterlab@3.0:3', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/py-louie/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyLouie(PythonPackage): """Louie provides Python programmers with a straightforward way to dispatch signals between objects in a wide variety of contexts. It is based on PyDispatcher, which in turn was based on a highly-rated recipe in the Python Cookbook.""" homepage = "https://github.com/11craft/louie/" url = "https://github.com/11craft/louie/archive/2.0.tar.gz" version('2.0', sha256='ac274ef672511357fc15d784df841c238ae13d00964094571eebabb0b14c54b2') version('1.1', sha256='4bc227171fc546d1a527ee3059fa17df6d35a0acc10db1f942dd3da42ad96408') depends_on('py-setuptools', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/py-charm4py/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyCharm4py(PythonPackage): """Charm4py (Charm++ for Python) is a distributed computing and parallel programming framework for Python, for the productive development of fast, parallel and scalable applications. It is built on top of Charm++, a C++ adaptive runtime system that has seen extensive use in the scientific and high-performance computing (HPC) communities across many disciplines, and has been used to develop applications that run on a wide range of devices: from small multi-core devices up to the largest supercomputers.""" homepage = "https://charmpy.readthedocs.io" pypi = "charm4py/charm4py-1.0.tar.gz" # Add a list of GitHub accounts to # notify when the package is updated. maintainers = ['payerle'] version('1.0', sha256='8ddb9f021b7379fde94b28c31f4ab6a60ced2c2a207a2d75ce57cb91b6be92bc') variant('mpi', default=True, description='build Charm++ library with the MPI instead of TCP' ' communication layer') # Builds its own charm++, so no charmpp dependency depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-cython', type='build') depends_on('py-cffi@1.7:', type='build') depends_on('py-numpy@1.10.0:', type=('build', 'run')) depends_on('py-greenlet', type=('build', 'run')) depends_on('cuda') depends_on('mpi', when='+mpi') # setup.py builds its own charm++, but libcharm.so # ends up with a cuda dependency causing unresolved symbol errors # when setup.py tries to load it to get version. We need to explicitly # link libcudart when building the charm++ library. # To do this, the following patch: # 1) hacks setup.py to apply a patch to the charm++ Makefile # causing the Makefile to include libcudart when building libcharm.so # 2) inserts the patchfile needed to do so. # This is convoluted, but best way I see since setup.py untars the # charm++ sources and we need to patch a file that is in the tarball. # # The patch to the Makefile adds SPACK_CHARM4PY_EXTRALIBS to the link # arguments. This needs to be set in the environment to be effective. patch('py-charm4py.makefile.patch', when='@1.0') # This sets the SPACK_CHARM4PY_EXTRALIBS env var which the # py-charm4py.makefile.patch adds to the build/link command for # libcharm.so. def setup_build_environment(self, env): env.set('SPACK_CHARM4PY_EXTRALIBS', self.spec['cuda'].libs.ld_flags) def install_options(self, spec, prefix): args = [] if '+mpi' in spec: args.append('--mpi') return args
player1537-forks/spack
var/spack/repos/builtin/packages/gdrcopy/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Gdrcopy(MakefilePackage): """A fast GPU memory copy library based on NVIDIA GPUDirect RDMA technology.""" homepage = "https://github.com/NVIDIA/gdrcopy" url = "https://github.com/NVIDIA/gdrcopy/archive/v2.1.tar.gz" git = "https://github.com/NVIDIA/gdrcopy" version('master', branch='master') version('2.2', sha256='e4be119809391b18c735346d24b3b398dd9421cbff47ef12befbae40d61da45f') version('2.1', sha256='cecc7dcc071107f77396f5553c9109790b6d2298ae29eb2dbbdd52b2a213e4ea') version('2.0', sha256='98320e6e980a7134ebc4eedd6cf23647104f2b3c557f2eaf0d31a02609f5f2b0') version('1.3', sha256='f11cdfe389b685f6636b80b4a3312dc014a385ad7220179c1318c60e2e28af3a') def build(self, spec, prefix): make('lib') def install(self, spec, prefix): mkdir(prefix.include) mkdir(prefix.lib64) if spec.satisfies('@2.2:'): make('lib_install', 'prefix={0}'.format(self.prefix)) else: make('lib_install', 'PREFIX={0}'.format(self.prefix))
player1537-forks/spack
var/spack/repos/builtin/packages/pythia8/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Pythia8(AutotoolsPackage): """The Pythia program is a standard tool for the generation of events in high-energy collisions, comprising a coherent set of physics models for the evolution from a few-body hard process to a complex multiparticle final state.""" homepage = "http://home.thep.lu.se/Pythia/" url = "https://pythia.org/download/pythia83/pythia8306.tgz" tags = ['hep'] maintainers = ['ChristianTackeGSI'] version('8.306', sha256='734803b722b1c1b53c8cf2f0d3c30747c80fc2dde5e0ba141bc9397dad37a8f6') version('8.304', sha256='d3897018fb6d545eaf93bf43f32580c984a9bff49259d9dd29dff6edfbe9d9a1') version('8.303', sha256='cd7c2b102670dae74aa37053657b4f068396988ef7da58fd3c318c84dc37913e') version('8.302', sha256='7372e4cc6f48a074e6b7bc426b040f218ec4a64b0a55e89da6af56933b5f5085') version('8.301', sha256='51382768eb9aafb97870dca1909516422297b64ef6a6b94659259b3e4afa7f06') version('8.244', sha256='e34880f999daf19cdd893a187123927ba77d1bf851e30f6ea9ec89591f4c92ca', deprecated=True) version('8.240', sha256='d27495d8ca7707d846f8c026ab695123c7c78c7860f04e2c002e483080418d8d', deprecated=True) version('8.235', sha256='e82f0d6165a8250a92e6aa62fb53201044d8d853add2fdad6d3719b28f7e8e9d', deprecated=True) version('8.230', sha256='332fad0ed4f12e6e0cb5755df0ae175329bc16bfaa2ae472d00994ecc99cd78d', deprecated=True) version('8.212', sha256='f8fb4341c7e8a8be3347eb26b00329a388ccf925313cfbdba655a08d7fd5a70e', deprecated=True) variant('shared', default=True, description='Build shared library') variant('hepmc', default=True, description='Export PYTHIA events to the HEPMC format, version 2') variant('hepmc3', default=True, description='Export PYTHIA events to the HEPMC format, version 3') variant('evtgen', default=False, description='Particle decays with the EvtGen decay package') variant('root', default=False, description='Use ROOT trees and histograms with PYTHIA') variant('fastjet', default=False, description='Building of jets using the FastJet package, version 3') variant('lhapdf', default=False, description='Support the use of external PDF sets via LHAPDF') variant('rivet', default=False, description='Support use of RIVET through direct interface') variant('python', default=False, description='Interface to use PYTHIA in Python') variant('madgraph5amc', default=False, description='MadGraph matrix element plugins for parton showers') variant('openmpi', default=False, description='Multi-threading support via OpenMP') variant('mpich', default=False, description='Multi-threading support via MPICH') variant('hdf5', default=False, description='Support the use of HDF5 format') depends_on('rsync', type='build') depends_on('hepmc', when='+hepmc') depends_on('hepmc3', when='+hepmc3') depends_on('root', when='+root') depends_on('evtgen', when='+evtgen') depends_on('fastjet@3.0.0:', when='+fastjet') depends_on('lhapdf@6.2:', when='+lhapdf') depends_on('boost', when='+lhapdf @:8.213') depends_on('rivet', when='+rivet') depends_on('python', when='+python') depends_on('madgraph5amc', when='+madgraph5amc') depends_on('openmpi', when='+openmpi') depends_on('mpich', when='+mpich') depends_on('hdf5', when='+hdf5') depends_on('highfive@2.2', when='+hdf5') extends('python', when='+python') conflicts('^evtgen+pythia8', when='+evtgen', msg='Building pythia with evtgen bindings and ' 'evtgen with pythia bindings results in a circular dependency ' 'that cannot be resolved at the moment! ' 'Use pythia8+evtgen^evtgen~pythia8') conflicts('+evtgen', when='~hepmc', msg='+evtgen requires +hepmc') conflicts('+mpich', when='@:8.304', msg='MPICH support was added in 8.304') conflicts('+hdf5', when='@:8.304', msg='HDF5 support was added in 8.304') conflicts('+hdf5', when='~mpich', msg='MPICH is required for reading HDF5 files') def configure_args(self): args = [] if self.spec.satisfies('@:8.301 +shared'): # Removed in 8.301 args.append('--enable-shared') if '+hepmc' in self.spec: args.append('--with-hepmc2=%s' % self.spec['hepmc'].prefix) else: args.append('--without-hepmc2') if '+lhapdf' in self.spec: args.append('--with-lhapdf6=%s' % self.spec['lhapdf'].prefix) if self.spec.satisfies('@:8.213'): args.append('--with-lhapdf6-plugin=LHAPDF6.h') args.append('--with-boost=' + self.spec['boost'].prefix) if '+madgraph5amc' in self.spec: args += '--with-mg5mes=' + self.spec['madgraph5amc'].prefix else: args += '--without-mg5mes' args += self.with_or_without('hepmc3', activation_value='prefix') if '+fastjet' in self.spec: args += '--with-fastjet3=' + self.spec['fastjet'].prefix else: args += '--without-fastjet3' args += self.with_or_without('evtgen', activation_value='prefix') args += self.with_or_without('root', activation_value='prefix') args += self.with_or_without('rivet', activation_value='prefix') if self.spec.satisfies('+rivet'): args.append('--with-yoda=' + self.spec['yoda'].prefix) args += self.with_or_without('python', activation_value='prefix') args += self.with_or_without('openmp', activation_value='prefix', variant='openmpi') args += self.with_or_without('mpich', activation_value='prefix') args += self.with_or_without('hdf5', activation_value='prefix') if self.spec.satisfies('+hdf5'): args.append('--with-highfive=' + self.spec['highfive'].prefix) return args def url_for_version(self, version): url = self.url.rsplit('/', 2)[0] dirname = 'pythia' + str(version.joined)[:2] fname = 'pythia' + str(version.joined) + '.tgz' return url + '/' + dirname + '/' + fname def setup_common_env(self, env): env.set('PYTHIA8', self.prefix) env.set('PYTHIA8DATA', self.prefix.share.Pythia8.xmldoc) def setup_dependent_run_environment(self, env, dependent_spec): self.setup_common_env(env) def setup_dependent_build_environment(self, env, dependent_spec): self.setup_common_env(env)
player1537-forks/spack
var/spack/repos/builtin/packages/py-discover/package.py
<filename>var/spack/repos/builtin/packages/py-discover/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyDiscover(PythonPackage): """Test discovery for unittest.""" pypi = "discover/discover-0.4.0.tar.gz" version('0.4.0', sha256='05c3fa9199e57d4b16fb653e02d65713adc1f89ef55324fb0c252b1cf9070d79') # pip silently replaces distutils with setuptools depends_on('py-setuptools', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/py-vector-quantize-pytorch/package.py
<filename>var/spack/repos/builtin/packages/py-vector-quantize-pytorch/package.py<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyVectorQuantizePytorch(PythonPackage): """A vector quantization library originally transcribed from Deepmind's tensorflow implementation, made conveniently into a package. It uses exponential moving averages to update the dictionary.""" homepage = "https://github.com/lucidrains/vector-quantize-pytorch" pypi = "vector_quantize_pytorch/vector_quantize_pytorch-0.3.9.tar.gz" version('0.3.9', sha256='783ca76251299f0e3eb244062bc05c4416bb29157e57077e4a8969c5277f05ee') depends_on('py-setuptools', type='build') depends_on('py-einops', type=('build', 'run')) depends_on('py-torch', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/r-rcppdate/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RRcppdate(RPackage): """'date' C++ Header Library for Date and Time Functionality. 'date' is a C++ header library offering extensive date and time functionality for the C++11, C++14 and C++17 standards written by <NAME> and released under the MIT license. A slightly modified version has been accepted (along with 'tz.h') as part of C++20. This package regroups all header files from the upstream repository by <NAME> so that other R packages can use them in their C++ code. At present, few of the types have explicit 'Rcpp' wrapper though these may be added as needed.""" cran = "RcppDate" version('0.0.3', sha256='9c5ee7cf76d63cd51e8faff831f5f865762868d7d705395960c0f22e9b238bdb') version('0.0.1', sha256='117721fc677dfb4209200a7ff894fbbb8ee1b652d01b3878b11c3253733b4a5f')
player1537-forks/spack
lib/spack/spack/cmd/tags.py
<reponame>player1537-forks/spack<filename>lib/spack/spack/cmd/tags.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import sys import six import llnl.util.tty as tty import llnl.util.tty.colify as colify import spack.repo import spack.store import spack.tag description = "Show package tags and associated packages" section = "basic" level = "long" def report_tags(category, tags): buffer = six.StringIO() isatty = sys.stdout.isatty() if isatty: num = len(tags) fmt = '{0} package tag'.format(category) buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt))) if tags: colify.colify(tags, output=buffer, tty=isatty, indent=4) else: buffer.write(" None\n") print(buffer.getvalue()) def setup_parser(subparser): subparser.epilog = ( "Tags from known packages will be used if no tags are provided on " "the command\nline. If tags are provided, packages with at least one " "will be reported.\n\nYou are not allowed to provide tags and use " "'--all' at the same time." ) subparser.add_argument( '-i', '--installed', action='store_true', default=False, help="show information for installed packages only" ) subparser.add_argument( '-a', '--all', action='store_true', default=False, help="show packages for all available tags" ) subparser.add_argument( 'tag', nargs='*', help="show packages with the specified tag" ) def tags(parser, args): # Disallow combining all option with (positional) tags to avoid confusion if args.all and args.tag: tty.die("Use the '--all' option OR provide tag(s) on the command line") # Provide a nice, simple message if database is empty if args.installed and not spack.environment.installed_specs(): tty.msg("No installed packages") return # unique list of available tags available_tags = sorted(spack.repo.path.tag_index.keys()) if not available_tags: tty.msg("No tagged packages") return show_packages = args.tag or args.all # Only report relevant, available tags if no packages are to be shown if not show_packages: if not args.installed: report_tags("available", available_tags) else: tag_pkgs = spack.tag.packages_with_tags(available_tags, True, True) tags = tag_pkgs.keys() if tag_pkgs else [] report_tags("installed", tags) return # Report packages associated with tags buffer = six.StringIO() isatty = sys.stdout.isatty() tags = args.tag if args.tag else available_tags tag_pkgs = spack.tag.packages_with_tags(tags, args.installed, False) missing = 'No installed packages' if args.installed else 'None' for tag in sorted(tag_pkgs): # TODO: Remove the sorting once we're sure noone has an old # TODO: tag cache since it can accumulate duplicates. packages = sorted(list(set(tag_pkgs[tag]))) if isatty: buffer.write("{0}:\n".format(tag)) if packages: colify.colify(packages, output=buffer, tty=isatty, indent=4) else: buffer.write(" {0}\n".format(missing)) buffer.write("\n") print(buffer.getvalue())
player1537-forks/spack
var/spack/repos/builtin/packages/perl-xml-twig/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PerlXmlTwig(PerlPackage): """This module provides a way to process XML documents. It is build on top of XML::Parser. The module offers a tree interface to the document, while allowing you to output the parts of it that have been completely processed. It allows minimal resource (CPU and memory) usage by building the tree only for the parts of the documents that need actual processing, through the use of the twig_roots and twig_print_outside_roots options. The finish and finish_print methods also help to increase performances. XML::Twig tries to make simple things easy so it tries its best to takes care of a lot of the (usually) annoying (but sometimes necessary) features that come with XML and XML::Parser.""" homepage = "https://metacpan.org/pod/XML::Twig" url = "https://cpan.metacpan.org/authors/id/M/MI/MIROD/XML-Twig-3.52.tar.gz" version('3.52', sha256='fef75826c24f2b877d0a0d2645212fc4fb9756ed4d2711614ac15c497e8680ad') depends_on('perl-xml-parser', type=('build', 'run')) patch('non_interactive.patch')
player1537-forks/spack
var/spack/repos/builtin/packages/r-aod/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RAod(RPackage): """Analysis of Overdispersed Data. Provides a set of functions to analyse overdispersed counts or proportions. Most of the methods are already available elsewhere but are scattered in different packages. The proposed functions should be considered as complements to more sophisticated methods such as generalized estimating equations (GEE) or generalized linear mixed effect models (GLMM).""" cran = "aod" version('1.3.1', sha256='052d8802500fcfdb3b37a8e3e6f3fbd5c3a54e48c3f68122402d2ea3a15403bc') depends_on('r@2.10:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/log4cxx/package.py
<reponame>player1537-forks/spack<filename>var/spack/repos/builtin/packages/log4cxx/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Log4cxx(CMakePackage): """A C++ port of Log4j""" homepage = "https://logging.apache.org/log4cxx/latest_stable/" url = "https://dlcdn.apache.org/logging/log4cxx/0.12.0/apache-log4cxx-0.12.0.tar.gz" maintainers = ['nicmcd'] version('0.12.1', sha256='7bea5cb477f0e31c838f0e1f4f498cc3b30c2eae74703ddda923e7e8c2268d22') version('0.12.0', sha256='bd5b5009ca914c8fa7944b92ea6b4ca6fb7d146f65d526f21bf8b3c6a0520e44') variant('cxxstd', default='17', description='C++ standard', values=('11', '17'), multi=False) depends_on('cmake@3.13:', type='build') depends_on('apr-util') depends_on('apr') depends_on('boost+thread+system', when='cxxstd=11') depends_on('zlib') depends_on('zip') def cmake_args(self): return [ self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'), self.define('BUILD_TESTING', 'off')]
player1537-forks/spack
var/spack/repos/builtin/packages/unifdef/package.py
<filename>var/spack/repos/builtin/packages/unifdef/package.py<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Unifdef(MakefilePackage): """The unifdef utility selectively processes conditional C preprocessor #if and #ifdef directives. It removes from a file both the directives and the additional text that they delimit, while otherwise leaving the file alone.""" homepage = "https://dotat.at/prog/unifdef/" url = "https://dotat.at/prog/unifdef/unifdef-2.11.tar.xz" maintainers = ['matthiasdiener'] version('2.11', sha256='828ffc270ac262b88fe011136acef2780c05b0dc3c5435d005651740788d4537') def edit(self, spec, prefix): makefile = FileFilter('Makefile') makefile.filter(r'\$\{HOME\}', prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/serialbox/package.py
<gh_stars>1-10 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class Serialbox(CMakePackage): """Serialbox is a serialization library and tools for C/C++, Python3 and Fortran. Serialbox is used in several projects for building validation frameworks against reference runs.""" homepage = "https://github.com/GridTools/serialbox" url = "https://github.com/GridTools/serialbox/archive/v2.6.1.tar.gz" maintainers = ['skosukhin'] version('2.6.1', sha256='b795ce576e8c4fd137e48e502b07b136079c595c82c660cfa2e284b0ef873342') version('2.6.0', sha256='9199f8637afbd7f2b3c5ba932d1c63e9e14d553a0cafe6c29107df0e04ee9fae') version('2.5.4', sha256='f4aee8ef284f58e6847968fe4620e222ac7019d805bbbb26c199e4b6a5094fee') version('2.5.3', sha256='696499b3f43978238c3bcc8f9de50bce2630c07971c47c9e03af0324652b2d5d') variant('c', default=True, description='enable C interface') variant('python', default=False, description='enable Python interface') variant('fortran', default=False, description='enable Fortran interface') variant('ftg', default=False, description='enable FortranTestGenerator frontend') variant('sdb', default=False, description='enable stencil debugger') variant('shared', default=True, description='build shared libraries') variant('examples', default=False, description='build the examples') variant('logging', default=True, description='enable the logging infrastructure') variant('async-api', default=True, description='enable the asynchronous API') variant('netcdf', default=False, description='build the NetCDF archive backend') variant('std-filesystem', default=True, description='use std::experimental::filesystem (no dependency on ' 'compiled boost libs)') depends_on('cmake@3.12:', type='build') # We might be provided with an external vanilla cmake, and we need one with # with https://gitlab.kitware.com/cmake/cmake/-/merge_requests/5025 depends_on('cmake@3.19:', when='%pgi', type='build') depends_on('boost@1.54:', type='build') depends_on('boost+filesystem+system', when='~std-filesystem', type=('build', 'link')) depends_on('netcdf-c', when='+netcdf') depends_on('python@3.4:', when='+python', type=('build', 'run')) depends_on('py-numpy', when='+python', type=('build', 'run')) # pp_ser fails to process source files containing Unicode character with # Python 3 (https://github.com/GridTools/serialbox/pull/249): patch('ppser_py3.patch', when='@2.2.1:') # NAG patches: patch('nag/interface.patch', when='@2.0.1:%nag+fortran') patch('nag/examples.patch', when='@2.3.1:%nag+fortran+examples') patch('nag/ftg.patch', when='@2.3.1:%nag+ftg') conflicts('+ftg', when='~fortran', msg='the FortranTestGenerator frontend requires the Fortran ' 'interface') conflicts('+ftg', when='@:2.2.999', msg='the FortranTestGenerator frontend is supported only ' 'starting version 2.3.0') conflicts('+sdb', when='~python', msg='the stencil debugger requires the Python interface') conflicts('+fortran', when='~c', msg='the Fortran interface requires the C interface') conflicts('+python', when='~c', msg='the Python interface requires the C interface') conflicts('+python', when='~shared', msg='the Python interface requires the shared libraries') def patch(self): # The following is implemented as a method to avoid having two sets of # almost identical patch files: one with the CR symbols (for versions # 2.5.x) and one without them (for versions 2.6.x). # Remove hard-coded -march=native # (see https://github.com/GridTools/serialbox/pull/233): if self.spec.satisfies('@2.0.1:2.6.0'): filter_file( r'^(\s*set\(CMAKE_CXX_FLAGS.*-march=native)', r'#\1', 'CMakeLists.txt') # Do not fallback to boost::filesystem: if '+std-filesystem' in self.spec: filter_file( r'(message\()' r'STATUS( "std::experimental::filesystem not found).*("\))', r'\1FATAL_ERROR\2\3', 'CMakeLists.txt') @property def libs(self): query_parameters = self.spec.last_query.extra_parameters shared = '+shared' in self.spec query2libraries = { tuple(): ['libSerialboxCore'], ('c', 'fortran'): [ 'libSerialboxFortran', 'libSerialboxC', 'libSerialboxCore', ], ('c',): [ 'libSerialboxC', 'libSerialboxCore', ], ('fortran',): [ 'libSerialboxFortran', 'libSerialboxC', 'libSerialboxCore' ] } key = tuple(sorted(query_parameters)) libraries = query2libraries[key] if self.spec.satisfies('@2.5.0:2.5'): libraries = [ '{0}{1}'.format(name, 'Shared' if shared else 'Static') for name in libraries] libs = find_libraries( libraries, root=self.prefix, shared=shared, recursive=True) if libs: return libs msg = 'Unable to recursively locate {0} libraries in {1}' raise spack.error.NoLibrariesError( msg.format(self.spec.name, self.spec.prefix)) def flag_handler(self, name, flags): cmake_flags = [] if name == 'cxxflags': # Intel (at least up to version 19.0.1, version 19.0.4 works) and # PGI (at least up to version 19.9, version 20.1.0 works) compilers # have problems with C++11 name mangling. An attempt to link to # libSerialboxCore leads to: # undefined reference to # `std::experimental::filesystem::v1::__cxx11::path:: # _M_find_extension[abi:cxx11]() const' if any(self.spec.satisfies('{0}+std-filesystem'.format(x)) for x in ['%intel@:19.0.1', '%pgi@:19.9']): cmake_flags.append('-D_GLIBCXX_USE_CXX11_ABI=0') return flags, None, (cmake_flags or None) def cmake_args(self): args = [ '-DBOOST_ROOT:PATH=%s' % self.spec['boost'].prefix, # https://cmake.org/cmake/help/v3.15/module/FindBoost.html#boost-cmake self.define('Boost_NO_BOOST_CMAKE', True), self.define_from_variant('SERIALBOX_ENABLE_C', 'c'), self.define_from_variant('SERIALBOX_ENABLE_PYTHON', 'python'), self.define_from_variant('SERIALBOX_ENABLE_FORTRAN', 'fortran'), self.define_from_variant('SERIALBOX_ENABLE_FTG', 'ftg'), self.define_from_variant('SERIALBOX_ENABLE_SDB', 'sdb'), self.define_from_variant('SERIALBOX_BUILD_SHARED', 'shared'), self.define_from_variant('SERIALBOX_EXAMPLES', 'examples'), self.define_from_variant('SERIALBOX_LOGGING', 'logging'), self.define_from_variant('SERIALBOX_ASYNC_API', 'async-api'), # CMake scripts of Serialbox (at least up to version 2.6.0) are # broken and do not instruct the compiler to link to the OpenSSL # libraries: self.define('SERIALBOX_USE_OPENSSL', False), self.define_from_variant('SERIALBOX_ENABLE_EXPERIMENTAL_FILESYSTEM', 'std-filesystem'), self.define_from_variant('SERIALBOX_USE_NETCDF', 'netcdf'), self.define('SERIALBOX_TESTING', self.run_tests), ] if '+netcdf' in self.spec: args.append('-DNETCDF_ROOT:PATH=%s' % self.spec['netcdf-c'].prefix) return args
player1537-forks/spack
var/spack/repos/builtin/packages/r-iso/package.py
<filename>var/spack/repos/builtin/packages/r-iso/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RIso(RPackage): """Functions to Perform Isotonic Regression. Linear order and unimodal order (univariate) isotonic regression; bivariate isotonic regression with linear order on both variables.""" cran = "Iso" version('0.0-18.1', sha256='2fa5f78a7603cbae94a5e38e791938596a053d48c609a7c120a19cbb7d93c66f') version('0.0-18', sha256='2d7e8c4452653364ee086d95cea620c50378e30acfcff129b7261e1756a99504') version('0.0-17', sha256='c007d6eaf6335a15c1912b0804276ff39abce27b7a61539a91b8fda653629252') depends_on('r@1.7.0:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/easi/package.py
<filename>var/spack/repos/builtin/packages/easi/package.py # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import shutil from spack import * from spack.cmd.pkg import GitExe class Easi(CMakePackage): """easi is a library for the Easy Initialization of models in three (or less or more) dimensional domains. """ homepage = "https://easyinit.readthedocs.io" git = "https://github.com/SeisSol/easi.git" maintainers = ['ThrudPrimrose', 'ravil-mobile', 'krenzland'] version('develop', branch='master') version('1.1.2', tag='v1.1.2') variant('asagi', default=True, description='build with ASAGI support') variant('jit', default='impalajit', description='build with JIT support', values=('impalajit', 'impalajit-llvm', 'lua'), multi=False) depends_on('asagi +mpi +mpi3', when='+asagi') depends_on('yaml-cpp@0.6.2') depends_on('impalajit-llvm@1.0.0', when='jit=impalajit-llvm') depends_on('lua@5.3.2', when='jit=lua') depends_on('git', type='build', when='jit=impalajit') conflicts('jit=impalajit', when='target=aarch64:') conflicts('jit=impalajit', when='target=ppc64:') conflicts('jit=impalajit', when='target=ppc64le:') conflicts('jit=impalajit', when='target=riscv64:') def pre_build(self): spec = self.spec if "jit=impalajit" in spec: impalajir_src = join_path(self.stage.source_path, 'impalajit') if os.path.isdir(impalajir_src): shutil.rmtree(impalajir_src) git_exe = GitExe() git_exe('clone', 'https://github.com/uphoffc/ImpalaJIT.git', impalajir_src) with working_dir(join_path(impalajir_src, 'build'), create=True): cmake('..', '-DCMAKE_INSTALL_PREFIX={0}'.format(self.spec.prefix)) make() make('install') def cmake_args(self): self.pre_build() args = [] args.append(self.define_from_variant('ASAGI', 'asagi')) with_impala = 'jit=impalajit' in self.spec with_impala |= 'jit=impalajit-llvm' in self.spec if with_impala: args.append(self.define('IMPALAJIT', True)) backend_type = 'llvm' if 'jit=impalajit-llvm' in self.spec else 'original' args.append(self.define('IMPALAJIT_BACKEND', backend_type)) if 'jit=lua' in self.spec: args.append(self.define('IMPALAJIT', False)) args.append(self.define('LUA', True)) return args
player1537-forks/spack
var/spack/repos/builtin/packages/xf86dga/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Xf86dga(AutotoolsPackage, XorgPackage): """dga is a simple test client for the XFree86-DGA extension.""" homepage = "https://cgit.freedesktop.org/xorg/app/xf86dga" xorg_mirror_path = "app/xf86dga-1.0.3.tar.gz" version('1.0.3', sha256='acbf89f60a99b18c161d2beb0e4145a0fdf6c516f7f45fa52e547d88491f75c9') depends_on('libx11') depends_on('libxxf86dga@1.1:') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/albert/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Albert(MakefilePackage): """Albert is an interactive program to assist the specialist in the study of nonassociative algebra.""" homepage = "https://people.cs.clemson.edu/~dpj/albertstuff/albert.html" url = "https://github.com/kentavv/Albert/archive/v4.0a_opt4.tar.gz" version('4.0a_opt4', sha256='80b9ee774789c9cd123072523cfb693c443c3624708a58a5af177a51f36b2c79') version('4.0a', sha256='caf49e24fb9bf2a09053d9bf022c4737ffe61d62ce9c6bc32aa03dded2a14913') depends_on('readline') def install(self, spec, prefix): mkdirp(prefix.bin) install('albert', join_path(prefix.bin))
player1537-forks/spack
var/spack/repos/builtin/packages/py-numcodecs/package.py
<filename>var/spack/repos/builtin/packages/py-numcodecs/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyNumcodecs(PythonPackage): """Numcodecs is a Python package providing buffer compression and transformation codecs for use in data storage and communication applications. """ homepage = "https://github.com/zarr-developers/numcodecs" pypi = "numcodecs/numcodecs-0.6.4.tar.gz" git = "https://github.com/zarr-developers/numcodecs.git" # 'numcodecs.tests' excluded from 'import_modules' because it requires # an additional dependency on 'pytest' import_modules = ['numcodecs'] version('master', branch='master', submodules=True) version('0.7.3', sha256='022b12ad83eb623ec53f154859d49f6ec43b15c36052fa864eaf2d9ee786dd85') version('0.6.4', sha256='ef4843d5db4d074e607e9b85156835c10d006afc10e175bda62ff5412fca6e4d') variant('msgpack', default=False, description='Codec to encode data as msgpacked bytes.') depends_on('python@3.6:3', when='@0.7:', type=('build', 'link', 'run')) depends_on('python@2.7:2.8,3.5:', when='@:0.6', type=('build', 'link', 'run')) depends_on('py-setuptools@18.1:', type='build') depends_on('py-setuptools-scm@1.5.5:', type='build') depends_on('py-cython', type='build') depends_on('py-numpy@1.7:', type=('build', 'run')) depends_on('py-msgpack', type=('build', 'run'), when='+msgpack') patch('apple-clang-12.patch', when='%apple-clang@12:')
player1537-forks/spack
var/spack/repos/builtin/packages/r-biasedurn/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RBiasedurn(RPackage): """Biased Urn Model Distributions. Statistical models of biased sampling in the form of univariate and multivariate noncentral hypergeometric distributions, including Wallenius' noncentral hypergeometric distribution and Fisher's noncentral hypergeometric distribution (also called extended hypergeometric distribution). See vignette("UrnTheory") for explanation of these distributions.""" cran = "BiasedUrn" version('1.07', sha256='2377c2e59d68e758a566452d7e07e88663ae61a182b9ee455d8b4269dda3228e')
player1537-forks/spack
var/spack/repos/builtin/packages/r-ggjoy/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RGgjoy(RPackage): """Joyplots in 'ggplot2'. Joyplots provide a convenient way of visualizing changes in distributions over time or space.""" cran = "ggjoy" version('0.4.1', sha256='d2f778bc40203d7fbb7c81b40beed8614c36ea10448e911663cc6109aa685504') version('0.4.0', sha256='cb9ef790921ffcd3cfb6a55b409d17ccae9e8f5fdd2a28e55ea2ccfa8efd44e8') version('0.3.0', sha256='bb6d5172deda6cc54d2647644c1056944bc886d48fe1f11a23afd518eaf5cc97') version('0.2.0', sha256='27c28e9b3aa333ee6f518ee5c1cf6533fdaefa4e205396cd4636bcf0d193e6a2') depends_on('r@3.2:', type=('build', 'run')) depends_on('r-ggplot2', type=('build', 'run')) depends_on('r-ggridges@0.4.0:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/py-umi-tools/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyUmiTools(PythonPackage): """Tools for handling Unique Molecular Identifiers in NGS data sets""" homepage = "https://github.com/CGATOxford/UMI-tools" url = "https://github.com/CGATOxford/UMI-tools/archive/0.5.3.tar.gz" version('1.0.0', sha256='7f73ef29120134123351c19089e9b7b7c03a8f241a19f8cb0e43d17f0e2f9fc7') version('0.5.5', sha256='9834a4388dd1ea0b971071009db7ccdbd447c6019796a3c061b0bb383c9ad992') version('0.5.4', sha256='a03e6babf188d0618a63f083b4da18120b9e8b4d473af71b585dba7de347e962') version('0.5.3', sha256='d599f15c48c96a96ba667db1f364ebfed4ba733dd30469f9656c1717282d2ecb') depends_on('python@2.7:') depends_on('py-setuptools@1.1:', type='build') depends_on('py-numpy@1.7:', type=('build', 'run')) depends_on('py-pandas@0.12:', type=('build', 'run')) depends_on('py-pysam@0.8.4:', type=('build', 'run')) depends_on('py-future', type=('build', 'run')) depends_on('py-six', type=('build', 'run')) depends_on('py-regex', type=('build', 'run')) depends_on('py-scipy', type=('build', 'run')) depends_on('py-matplotlib', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/camx/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import glob import os from spack import * class Camx(MakefilePackage): '''Comprehensive Air Quality Model with Extensions.''' homepage = 'https://www.camx.com' # Upstream obfuscates their download URL to get you to fill out their # registration form and accept their license agreement. version('6.50', url='http://www.camx.com/getmedia/caaf7983-616b-4207-bd10-c2b404bda78d/CAMx_v6-50-src-180430.tgz', sha256='4a53f78e0722d85a9c7d8ed6732aff55163a4ce06f69b6bbc9e00a3bf36a756c') resource(when='@6.50', name='user_manual', url='http://www.camx.com/files/camxusersguide_v6-50.pdf', sha256='b02d9826d59f22f9daa5955bb7b9fd3e0ca86eb73017c5845896d40391c64588', expand=False, placement='doc') resource(when='@6.50', name='input_data', url='http://www.camx.com/getmedia/77ad8028-9388-4f5d-bcab-a418e15dde68/v6-50-specific-inputs-180430.tgz', sha256='89b58283e37b8e2bd550a8ec62208f241be72c78dc26da9c42ad63c34f54ebfb', placement='data') variant('mpi', default=True, description='Enable MPI') variant( 'threads', default='pthread', description='Multithreading support', values=('pthread', 'openmp'), multi=False ) depends_on('mpi', when='+mpi') parallel = False def patch(self): # Relax values in parameter file to fix fun errors like this: # # ERROR in STARTUP: # A parameter in the camx.prm is not sufficiently large. # Please change the value for parameter: MXCELLS # It should be set to a value of at least: 396 with working_dir('Includes'): duplicate = 'camx.prm' os.remove(duplicate) orig = 'camx.prm.v{0}'.format(self.spec.version) prm = FileFilter(orig) prm.filter(r'MXCELLS = [^)]+', 'MXCELLS = 400 ') prm.filter(r'MXPTSRC = [^)]+', 'MXPTSRC = 1355961 ') prm.filter(r'MXLAYER = [^)]+', 'MXLAYER = 40 ') prm.filter(r'MXPIG = [^)]+', 'MXPIG = 100000 ') def edit(self, spec, prefix): makefile = FileFilter('Makefile') if spec.target.family == 'aarch64': makefile.filter('-mcmodel=medium', '-mcmodel=large') makefile = FileFilter('./MPI/util/Makefile') makefile.filter('-mcmodel=medium', '-mcmodel=large') # Support Intel MPI. if spec['mpi'].name == 'intel-mpi': makefile.filter( 'else ifneq (, $(findstring $(MPI),openmpi openMPI OPENMPI))', '''else ifneq (, $(findstring $(MPI),intel-mpi intel impi)) MPI_ECHO = "* MPI will be built in using Intel MPI" MPI_INC = $(MPI_INST)/include MPI_LIBS = -L$(CAMX_MPI)/util -lutil -lparlib ''' + spec['mpi'].libs.ld_flags + ''' MPI_STRING = IntelMPI FC = $(MPI_INST)/bin64/mpifort CC = $(MPI_INST)/bin64/mpicc else ifneq (, $(findstring $(MPI),openmpi openMPI OPENMPI))''', string=True) makefile.filter('OPENMPI MVAPICH', 'OPENMPI MVAPICH IntelMPI', string=True) if '+mpi' in spec: # Substitute CC, FC. makefile.filter('CC = .*', 'CC = ' + spec['mpi'].mpicc) makefile.filter('FC = .*', 'FC = ' + spec['mpi'].mpifc) # Fix parlib not being compiled. makefile.filter('all: comp_$(COMPILER)', # Yes, flake8, Makefile rules needs tabs! '''all: parlib comp_$(COMPILER) parlib : $(MAKE) -C $(CAMX_MPI)/util # noqa: E101,W191 ''', string=True) # noqa: E101 makefile_parlib = FileFilter('MPI/util/Makefile') makefile_parlib.filter('CC = .*', 'CC = ' + spec['mpi'].mpicc) makefile_parlib.filter('LIBS = .*', 'LIBS = ' + spec['mpi'].libs.ld_flags) makefile_parlib.filter('MPI_INST = .*', 'MPI_INST = ' + spec['mpi'].prefix) else: # Substitute CC, FC. makefile.filter('CC = .*', 'CC = ' + env['CC']) makefile.filter('FC = .*', 'FC = ' + env['FC']) @property def build_targets(self): # Set compiler. omp = ['', 'omp'][self.spec.satisfies('threads=openmp')] compiler = os.path.basename(env['FC']) + omp args = ['COMPILER={0}'.format(compiler)] # Set MPI. if '+mpi' in self.spec: mpi = self.spec['mpi'] args += [ 'MPI={0}'.format(mpi.name), 'MPI_INST={0}'.format(mpi.prefix), ] return args def install(self, spec, prefix): exe = glob.glob('CAMx.*')[0] mkdir(prefix.bin) install(exe, prefix.bin.camx) mkdirp(prefix.share.doc) install_tree('doc', prefix.share.doc, symlinks=False) mkdir(prefix.share.data) install_tree('data', prefix.share.data)
player1537-forks/spack
var/spack/repos/builtin/packages/cachefilesd/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Cachefilesd(MakefilePackage): """The cachefilesd daemon manages the caching files and directory that are that are used by network file systems such a AFS and NFS to do persistent caching to the local disk.""" homepage = "https://people.redhat.com/~dhowells/fscache" url = "https://people.redhat.com/~dhowells/fscache/cachefilesd-0.10.tar.bz2" version('0.10.10', sha256='0d0309851efabd02b7c849f73535b8ad3f831570e83e4f65e42354da18e11a02') version('0.10.9', sha256='c897ec6704615f26de3ddc20ff30a191ce995cb8973d2cde88b4b28c1a1e6bca') version('0.10.7', sha256='193cca5efb37ee460a4ed8e1ed4878e3718e432ebe690ec4fe02486ef3f2494e') version('0.10.6', sha256='aaaaea887a5850c6fa01d09c80946e987411f6b550261f83967c671c65af959d') version('0.10.5', sha256='125ea4f6aef4bf8e936a7cc747b59e074537a8aed74cd1bab3f05d7fbc47287f') @when('target=aarch64:') def edit(self, spec, prefix): makefile = FileFilter('Makefile') makefile.filter(r'-m64', '', string=True) def install(self, spec, prefix): make('DESTDIR={0}'.format(prefix), 'install') def setup_run_environment(self, env): env.prepend_path('PATH', self.prefix.sbin)
player1537-forks/spack
var/spack/repos/builtin/packages/py-opentuner/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyOpentuner(PythonPackage): """An extensible framework for program autotuning.""" homepage = "http://opentuner.org/" git = "https://github.com/jansel/opentuner.git" maintainers = ['matthiasdiener'] version('0.8.7', commit='<PASSWORD>') version('0.8.2', commit='<PASSWORD>') version('0.8.0', commit='4cb9135') depends_on('python@3:', type=('build', 'run'), when='@0.8.1:') depends_on('python@2.7:2.8', type=('build', 'run'), when='@:0.8.0') depends_on('py-argparse@1.2.1:', when='^python@:2.6,3.0:3.1', type=('build', 'run')) depends_on('py-fn-py@0.2.12:', type=('build', 'run')) depends_on('py-future', type=('build', 'run')) depends_on('py-numpy@1.8.0:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-sqlalchemy@0.8.2:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/spectrum-mpi/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import re class SpectrumMpi(BundlePackage): """IBM MPI implementation from Spectrum MPI.""" has_code = False homepage = "https://www-03.ibm.com/systems/spectrum-computing/products/mpi" # https://www.ibm.com/docs/en/smpi/10.4 version('10.4') provides('mpi') executables = ['^ompi_info$'] @classmethod def determine_version(cls, exe): output = Executable(exe)(output=str, error=str) match = re.search(r'Spectrum MPI: (\S+)', output) if not match: return None version = match.group(1) return version @classmethod def determine_variants(cls, exes, version): compiler_suites = { 'xl': {'cc': 'mpixlc', 'cxx': 'mpixlC', 'f77': 'mpixlf', 'fc': 'mpixlf'}, 'pgi': {'cc': 'mpipgicc', 'cxx': 'mpipgic++', 'f77': 'mpipgifort', 'fc': 'mpipgifort'}, 'default': {'cc': 'mpicc', 'cxx': 'mpicxx', 'f77': 'mpif77', 'fc': 'mpif90'}} def get_host_compiler(exe): output = Executable(exe)("--showme", output=str, error=str) match = re.search(r'^(\S+)', output) return match.group(1) if match else None def get_spack_compiler_spec(compilers_found): # check using cc for now, as everyone should have that defined. path = os.path.dirname(compilers_found['cc']) spack_compilers = spack.compilers.find_compilers([path]) actual_compiler = None # check if the compiler actually matches the one we want for spack_compiler in spack_compilers: if os.path.dirname(spack_compiler.cc) == path: actual_compiler = spack_compiler break return actual_compiler.spec if actual_compiler else None results = [] for exe in exes: dirname = os.path.dirname(exe) siblings = os.listdir(dirname) compilers_found = {} for compiler_suite in compiler_suites.values(): for (compiler_class, compiler_name) in compiler_suite.items(): if compiler_name in siblings: # Get the real name of the compiler full_exe = os.path.join(dirname, compiler_name) host_exe = get_host_compiler(full_exe) if host_exe: compilers_found[compiler_class] = host_exe if compilers_found: break if compilers_found: compiler_spec = get_spack_compiler_spec(compilers_found) if compiler_spec: variant = "%" + str(compiler_spec) else: variant = '' # Use this variant when you need to define the # compilers explicitly # # results.append((variant, {'compilers': compilers_found})) # # Otherwise, use this simpler attribute results.append(variant) else: results.append('') return results def setup_dependent_package(self, module, dependent_spec): # get the compiler names if '%xl' in dependent_spec or '%xl_r' in dependent_spec: self.spec.mpicc = os.path.join(self.prefix.bin, 'mpixlc') self.spec.mpicxx = os.path.join(self.prefix.bin, 'mpixlC') self.spec.mpif77 = os.path.join(self.prefix.bin, 'mpixlf') self.spec.mpifc = os.path.join(self.prefix.bin, 'mpixlf') elif '%pgi' in dependent_spec: self.spec.mpicc = os.path.join(self.prefix.bin, 'mpipgicc') self.spec.mpicxx = os.path.join(self.prefix.bin, 'mpipgic++') self.spec.mpif77 = os.path.join(self.prefix.bin, 'mpipgifort') self.spec.mpifc = os.path.join(self.prefix.bin, 'mpipgifort') else: self.spec.mpicc = os.path.join(self.prefix.bin, 'mpicc') self.spec.mpicxx = os.path.join(self.prefix.bin, 'mpicxx') self.spec.mpif77 = os.path.join(self.prefix.bin, 'mpif77') self.spec.mpifc = os.path.join(self.prefix.bin, 'mpif90') def setup_dependent_build_environment(self, env, dependent_spec): if '%xl' in dependent_spec or '%xl_r' in dependent_spec: env.set('MPICC', os.path.join(self.prefix.bin, 'mpixlc')) env.set('MPICXX', os.path.join(self.prefix.bin, 'mpixlC')) env.set('MPIF77', os.path.join(self.prefix.bin, 'mpixlf')) env.set('MPIF90', os.path.join(self.prefix.bin, 'mpixlf')) elif '%pgi' in dependent_spec: env.set('MPICC', os.path.join(self.prefix.bin, 'mpipgicc')) env.set('MPICXX', os.path.join(self.prefix.bin, 'mpipgic++')) env.set('MPIF77', os.path.join(self.prefix.bin, 'mpipgifort')) env.set('MPIF90', os.path.join(self.prefix.bin, 'mpipgifort')) else: env.set('MPICC', os.path.join(self.prefix.bin, 'mpicc')) env.set('MPICXX', os.path.join(self.prefix.bin, 'mpic++')) env.set('MPIF77', os.path.join(self.prefix.bin, 'mpif77')) env.set('MPIF90', os.path.join(self.prefix.bin, 'mpif90')) env.set('OMPI_CC', spack_cc) env.set('OMPI_CXX', spack_cxx) env.set('OMPI_FC', spack_fc) env.set('OMPI_F77', spack_f77) env.prepend_path('LD_LIBRARY_PATH', self.prefix.lib) def setup_run_environment(self, env): # Because MPI functions as a compiler we need to setup the compilers # in the run environment, like any compiler if '%xl' in self.spec or '%xl_r' in self.spec: env.set('MPICC', os.path.join(self.prefix.bin, 'mpixlc')) env.set('MPICXX', os.path.join(self.prefix.bin, 'mpixlC')) env.set('MPIF77', os.path.join(self.prefix.bin, 'mpixlf')) env.set('MPIF90', os.path.join(self.prefix.bin, 'mpixlf')) elif '%pgi' in self.spec: env.set('MPICC', os.path.join(self.prefix.bin, 'mpipgicc')) env.set('MPICXX', os.path.join(self.prefix.bin, 'mpipgic++')) env.set('MPIF77', os.path.join(self.prefix.bin, 'mpipgifort')) env.set('MPIF90', os.path.join(self.prefix.bin, 'mpipgifort')) else: env.set('MPICC', os.path.join(self.prefix.bin, 'mpicc')) env.set('MPICXX', os.path.join(self.prefix.bin, 'mpic++')) env.set('MPIF77', os.path.join(self.prefix.bin, 'mpif77')) env.set('MPIF90', os.path.join(self.prefix.bin, 'mpif90'))
player1537-forks/spack
var/spack/repos/builtin/packages/c3d/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class C3d(CMakePackage): """Image processing and conversion tool based on ITK. """ homepage = "https://github.com/pyushkevich/c3d" git = "https://github.com/pyushkevich/c3d.git" url = "https://github.com/pyushkevich/c3d/archive/refs/tags/v1.3.0.tar.gz" version('1.3.0', sha256="bd45482247fa4ac5ab98b3a775b5438390671e3e224a42f73967904b3895050d") depends_on('itk') def cmake_args(self): return ["-DCONVERT3D_USE_ITK_REMOTE_MODULES=OFF"]
player1537-forks/spack
var/spack/repos/builtin/packages/cyrus-sasl/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class CyrusSasl(AutotoolsPackage): """This is the Cyrus SASL API implementation. It can be used on the client or server side to provide authentication and authorization services.""" homepage = "https://github.com/cyrusimap/cyrus-sasl" url = "https://github.com/cyrusimap/cyrus-sasl/archive/cyrus-sasl-2.1.27.tar.gz" version('2.1.27', sha256='b564d773803dc4cff42d2bdc04c80f2b105897a724c247817d4e4a99dd6b9976') version('2.1.26', sha256='7c14d1b5bd1434adf2dd79f70538617e6aa2a7bde447454b90b84ac5c4d034ba') version('2.1.25', sha256='8bfd4fa4def54c760e5061f2a74c278384c3b9807f02c4b07dab68b5894cc7c1') version('2.1.24', sha256='1df15c492f7ecb90be49531a347b3df21b041c2e0325dcc4fc5a6e98384c40dd') version('2.1.23', sha256='b1ec43f62d68446a6a5879925c63d94e26089c5a46cd83e061dd685d014c7d1f') depends_on('m4', type='build') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('groff', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/r-randomfieldsutils/package.py
<reponame>player1537-forks/spack<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RRandomfieldsutils(RPackage): """Utilities for the Simulation and Analysis of Random Fields and Genetic Data. Various utilities are provided that might be used in spatial statistics and elsewhere. It delivers a method for solving linear equations that checks the sparsity of the matrix before any algorithm is used.""" cran = "RandomFieldsUtils" version('1.1.0', sha256='f472602fed449a505a2e5787ab8a6c8c1b764335980adaeeb7b1f24069124a9d') version('0.5.6', sha256='07f484443dffab53fb530e56f1e36e7a59e77768638555975587b6a1e619480b') version('0.5.3', sha256='ea823cba2e254a9f534efb4b772c0aeef2039ee9ef99744e077b969a87f8031d') version('0.5.1', sha256='a95aab4e2025c4247503ff513570a65aa3c8e63cb7ce2979c9317a2798dfaca2') depends_on('r@3.0:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/mathematica/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os from spack import * class Mathematica(Package): """Mathematica: high-powered computation with thousands of Wolfram Language functions, natural language input, real-world data, mobile support. Note: A manual download is required for Mathematica. Spack will search your current directory for the download file. Alternatively, add this file to a mirror so that Spack can find it. For instructions on how to set up a mirror, see https://spack.readthedocs.io/en/latest/mirrors.html""" homepage = "https://www.wolfram.com/mathematica/" url = 'file://{0}/Mathematica_12.0.0_LINUX.sh'.format(os.getcwd()) manual_download = True version('12.2.0', sha256='3b6676a203c6adb7e9c418a5484b037974287b5be09c64e7dfea74ddc0e400d7', expand=False) version('12.1.1', sha256='ad47b886be4a9864d70f523f792615a051d4ebc987d9a0f654b645b4eb43b30a', expand=False) version('12.0.0', sha256='b9fb71e1afcc1d72c200196ffa434512d208fa2920e207878433f504e58ae9d7', expand=False) # Licensing license_required = True license_comment = '#' license_files = ['Configuration/Licensing/mathpass'] license_url = 'https://reference.wolfram.com/language/tutorial/RegistrationAndPasswords.html#857035062' def install(self, spec, prefix): # Backup .spack because Mathematica moves it but never restores it copy_tree(join_path(prefix, '.spack'), join_path(self.stage.path, '.spack')) sh = which('sh') sh(self.stage.archive_file, '--', '-auto', '-verbose', '-targetdir={0}'.format(prefix), '-execdir={0}'.format(prefix.bin), '-selinux=y') # This is what most people would use on a cluster but the installer # does not symlink it ws_link_path = os.path.join(prefix.bin, 'wolframscript') if not os.path.exists(ws_link_path): ln = which('ln') ws_path = os.path.join(prefix, 'Executables', 'wolframscript') ln('-s', ws_path, ws_link_path) # Move back .spack where it belongs copy_tree(join_path(self.stage.path, '.spack'), join_path(prefix, '.spack'))
player1537-forks/spack
var/spack/repos/builtin/packages/qtgraph/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os from spack import * class Qtgraph(QMakePackage): """The baseline library used in the CUDA-centric Open|SpeedShop Graphical User Interface (GUI) which allows Graphviz DOT formatted data to be imported into a Qt application by wrapping the Graphviz libcgraph and libgvc within the Qt Graphics View Framework.""" homepage = "https://github.com/OpenSpeedShop/QtGraph" git = "https://github.com/OpenSpeedShop/QtGraph.git" version('develop', branch='master') version('1.0.0.0', branch='1.0.0.0') # qtgraph depends on these packages depends_on('qt@5.10.0:', when='@1.0.0.0:') depends_on("graphviz@2.40.1:", when='@develop') depends_on("graphviz@2.40.1", when='@1.0.0.0:') def setup_build_environment(self, env): env.set('GRAPHVIZ_ROOT', self.spec['graphviz'].prefix) env.set('INSTALL_ROOT', self.prefix) def setup_run_environment(self, env): # What library suffix should be used based on library existence if os.path.isdir(self.prefix.lib64): lib_dir = self.prefix.lib64 else: lib_dir = self.prefix.lib # The implementor has set up the library and include paths in # a non-conventional way. We reflect that here. env.prepend_path('LD_LIBRARY_PATH', join_path( lib_dir, '{0}'.format(self.spec['qt'].version.up_to(3)))) env.prepend_path('CPATH', self.prefix.include.QtGraph)
player1537-forks/spack
var/spack/repos/builtin/packages/py-google-crc32c/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyGoogleCrc32c(PythonPackage): """This package wraps the google/crc32c hardware-based implementation of the CRC32C hashing algorithm.""" homepage = "https://github.com/googleapis/python-crc32c" pypi = "google-crc32c/google-crc32c-1.3.0.tar.gz" maintainers = ['marcusboden'] version('1.3.0', '276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df') depends_on('py-setuptools', type='build') depends_on('google-crc32c', type=('build', 'run')) def setup_build_environment(self, env): env.set('CRC32C_INSTALL_PREFIX', self.spec['google-crc32c'].prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/rocketmq/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Rocketmq(Package): """ Apache RocketMQ is a distributed messaging and streaming platform with low latency, high performance and reliability, trillion-level capacity and flexible scalability. """ homepage = "https://rocketmq.apache.org/" url = "https://archive.apache.org/dist/rocketmq/4.5.2/rocketmq-all-4.5.2-bin-release.zip" version('4.6.0', sha256='584910d50639297808dd0b86fcdfaf431efd9607009a44c6258d9a0e227748fe') version('4.5.2', sha256='f7711ef9c203d7133e70e0e1e887025d7dd80d29f6d5283ca6022b12576b8aba') version('4.5.1', sha256='0c46e4b652b007d07e9c456eb2e275126b9210c27cd56bee518809f33c8ed437') version('4.5.0', sha256='d75dc26291b47413f7c565bc65499501e3499f01beb713246586f72844e31042') version('4.4.0', sha256='8a948e240e8d2ebbf4c40c180105d088a937f82a594cd1f2ae527b20349f1d34') version('4.3.2', sha256='e31210a86266ee218eb6ff4f8ca6e211439895459c3bdad162067b573d9e3415') depends_on('java@8:', type='run') def install(self, spec, prefix): install_tree('.', prefix)