repo_name
stringlengths
6
97
path
stringlengths
3
341
text
stringlengths
8
1.02M
player1537-forks/spack
var/spack/repos/builtin/packages/pygmo/package.py
<filename>var/spack/repos/builtin/packages/pygmo/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Pygmo(CMakePackage): """Parallel Global Multiobjective Optimizer (and its Python alter ego PyGMO) is a C++ / Python platform to perform parallel computations of optimisation tasks (global and local) via the asynchronous generalized island model.""" homepage = "https://esa.github.io/pygmo2/" url = "https://github.com/esa/pygmo2/archive/v2.18.0.tar.gz" git = "https://github.com/esa/pygmo2.git" version('master', branch='master') version('2.18.0', sha256='9f081cc973297894af09f713f889870ac452bfb32b471f9f7ba08a5e0bb9a125') variant('shared', default=True, description='Build shared libraries') # Run-time dependencies # https://github.com/esa/pygmo2/blob/master/doc/install.rst#dependencies extends('python@3.4:') depends_on('pagmo2@2.18:') depends_on('boost@1.60:') depends_on('py-numpy', type=('build', 'run')) depends_on('py-cloudpickle', type=('build', 'run')) # Build-time dependencies # https://github.com/esa/pygmo2/blob/master/doc/install.rst#installation-from-source depends_on('py-pybind11@2.6:', type='build') depends_on('cmake@3.17:', type='build') def cmake_args(self): return [ self.define('PYGMO_INSTALL_PATH', python_platlib), self.define_from_variant('BUILD_SHARED_LIBS', 'shared'), ]
player1537-forks/spack
var/spack/repos/builtin/packages/grnboost/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Grnboost(Package): """GRNBoost is a library built on top of Apache Spark that implements a scalable strategy for gene regulatory network (GRN) inference. See https://github.com/aertslab/GRNBoost/blob/master/docs/user_guide.md for the user guide. The location of xgboost4j-<version>.jar and GRNBoost.jar are set to $XGBOOST_JAR and $GRNBOOST_JAR. Path to xgboost4j-<version>.jar is also added to CLASSPATH.""" homepage = "https://github.com/aertslab/GRNBoost" version('2017-10-9', git='https://github.com/aertslab/GRNBoost.git', commit='<PASSWORD>') depends_on('sbt', type='build') depends_on('java', type=('build', 'run')) depends_on('xgboost', type='run') depends_on('spark+hadoop', type='run') def setup_run_environment(self, env): grnboost_jar = join_path(self.prefix, 'target', 'scala-2.11', 'GRNBoost.jar') xgboost_version = self.spec['xgboost'].version.string xgboost_jar = join_path(self.spec['xgboost'].prefix, 'xgboost4j-' + xgboost_version + '.jar') env.set('GRNBOOST_JAR', grnboost_jar) env.set('JAVA_HOME', self.spec['java'].prefix) env.set('CLASSPATH', xgboost_jar) env.set('XGBOOST_JAR', xgboost_jar) def install(self, spec, prefix): sbt = which('sbt') sbt('assembly') install_tree('target', prefix.target)
player1537-forks/spack
var/spack/repos/builtin/packages/tiptop/package.py
<filename>var/spack/repos/builtin/packages/tiptop/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Tiptop(AutotoolsPackage): """Tiptop is a performance monitoring tool for Linux.""" homepage = "https://github.com/FeCastle/tiptop" git = "https://github.com/FeCastle/tiptop.git" version('master', commit='<PASSWORD>') depends_on('papi') depends_on('byacc', type='build') depends_on('flex', type='build') patch('NR_perf_counter_open_aarch64.patch', when='target=aarch64:')
player1537-forks/spack
var/spack/repos/builtin/packages/apr/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Apr(AutotoolsPackage): """Apache portable runtime.""" homepage = 'https://apr.apache.org/' url = 'https://archive.apache.org/dist/apr/apr-1.7.0.tar.gz' version('1.7.0', sha256='48e9dbf45ae3fdc7b491259ffb6ccf7d63049ffacbc1c0977cced095e4c2d5a2') version('1.6.2', sha256='4fc24506c968c5faf57614f5d0aebe0e9d0b90afa47a883e1a1ca94f15f4a42e') version('1.5.2', sha256='1af06e1720a58851d90694a984af18355b65bb0d047be03ec7d659c746d6dbdb') patch('missing_includes.patch', when='@1.7.0') depends_on('uuid', type='link') @property def libs(self): return find_libraries( ['libapr-{0}'.format(self.version.up_to(1))], root=self.prefix, recursive=True, )
player1537-forks/spack
lib/spack/spack/cmd/style.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from __future__ import print_function import argparse import os import re import sys import llnl.util.tty as tty import llnl.util.tty.color as color from llnl.util.filesystem import working_dir import spack.bootstrap import spack.paths from spack.util.executable import which if sys.version_info < (3, 0): from itertools import izip_longest # novm zip_longest = izip_longest else: from itertools import zip_longest # novm description = "runs source code style checks on spack" section = "developer" level = "long" def grouper(iterable, n, fillvalue=None): "Collect data into fixed-length chunks or blocks" # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" args = [iter(iterable)] * n for group in zip_longest(*args, fillvalue=fillvalue): yield filter(None, group) #: List of directories to exclude from checks -- relative to spack root exclude_directories = [ os.path.relpath(spack.paths.external_path, spack.paths.prefix), ] #: Order in which tools should be run. flake8 is last so that it can #: double-check the results of other tools (if, e.g., --fix was provided) #: The list maps an executable name to a spack spec needed to install it. tool_order = [ ("isort", spack.bootstrap.ensure_isort_in_path_or_raise), ("mypy", spack.bootstrap.ensure_mypy_in_path_or_raise), ("black", spack.bootstrap.ensure_black_in_path_or_raise), ("flake8", spack.bootstrap.ensure_flake8_in_path_or_raise), ] #: tools we run in spack style tools = {} def is_package(f): """Whether flake8 should consider a file as a core file or a package. We run flake8 with different exceptions for the core and for packages, since we allow `from spack import *` and poking globals into packages. """ return f.startswith("var/spack/repos/") #: decorator for adding tools to the list class tool(object): def __init__(self, name, required=False): self.name = name self.required = required def __call__(self, fun): tools[self.name] = (fun, self.required) return fun def changed_files(base="develop", untracked=True, all_files=False, root=None): """Get list of changed files in the Spack repository. Arguments: base (str): name of base branch to evaluate differences with. untracked (bool): include untracked files in the list. all_files (bool): list all files in the repository. root (str): use this directory instead of the Spack prefix. """ if root is None: root = spack.paths.prefix git = which("git", required=True) # ensure base is in the repo git("show-ref", "--verify", "--quiet", "refs/heads/%s" % base, fail_on_error=False) if git.returncode != 0: tty.die( "This repository does not have a '%s' branch." % base, "spack style needs this branch to determine which files changed.", "Ensure that '%s' exists, or specify files to check explicitly." % base ) range = "{0}...".format(base) git_args = [ # Add changed files committed since branching off of develop ["diff", "--name-only", "--diff-filter=ACMR", range], # Add changed files that have been staged but not yet committed ["diff", "--name-only", "--diff-filter=ACMR", "--cached"], # Add changed files that are unstaged ["diff", "--name-only", "--diff-filter=ACMR"], ] # Add new files that are untracked if untracked: git_args.append(["ls-files", "--exclude-standard", "--other"]) # add everything if the user asked for it if all_files: git_args.append(["ls-files", "--exclude-standard"]) excludes = [ os.path.realpath(os.path.join(root, f)) for f in exclude_directories ] changed = set() for arg_list in git_args: files = git(*arg_list, output=str).split("\n") for f in files: # Ignore non-Python files if not (f.endswith(".py") or f == "bin/spack"): continue # Ignore files in the exclude locations if any(os.path.realpath(f).startswith(e) for e in excludes): continue changed.add(f) return sorted(changed) def setup_parser(subparser): subparser.add_argument( "-b", "--base", action="store", default="develop", help="branch to compare against to determine changed files (default: develop)", ) subparser.add_argument( "-a", "--all", action="store_true", help="check all files, not just changed files", ) subparser.add_argument( "-r", "--root-relative", action="store_true", default=False, help="print root-relative paths (default: cwd-relative)", ) subparser.add_argument( "-U", "--no-untracked", dest="untracked", action="store_false", default=True, help="exclude untracked files from checks", ) subparser.add_argument( "-f", "--fix", action="store_true", default=False, help="format automatically if possible (e.g., with isort, black)", ) subparser.add_argument( "--no-isort", dest="isort", action="store_false", help="do not run isort (default: run isort if available)", ) subparser.add_argument( "--no-flake8", dest="flake8", action="store_false", help="do not run flake8 (default: run flake8 or fail)", ) subparser.add_argument( "--no-mypy", dest="mypy", action="store_false", help="do not run mypy (default: run mypy if available)", ) subparser.add_argument( "--black", dest="black", action="store_true", help="run black if available (default: skip black)", ) subparser.add_argument( "--root", action="store", default=None, help="style check a different spack instance", ) subparser.add_argument( "files", nargs=argparse.REMAINDER, help="specific files to check" ) def cwd_relative(path, args): """Translate prefix-relative path to current working directory-relative.""" return os.path.relpath(os.path.join(args.root, path), args.initial_working_dir) def rewrite_and_print_output( output, args, re_obj=re.compile(r"^(.+):([0-9]+):"), replacement=r"{0}:{1}:" ): """rewrite ouput with <file>:<line>: format to respect path args""" # print results relative to current working directory def translate(match): return replacement.format( cwd_relative(match.group(1), args), *list(match.groups()[1:]) ) for line in output.split("\n"): if not line: continue if not args.root_relative and re_obj: line = re_obj.sub(translate, line) print(" " + line) def print_style_header(file_list, args): tools = [tool for tool, _ in tool_order if getattr(args, tool)] tty.msg("Running style checks on spack", "selected: " + ", ".join(tools)) # translate modified paths to cwd_relative if needed paths = [filename.strip() for filename in file_list] if not args.root_relative: paths = [cwd_relative(filename, args) for filename in paths] tty.msg("Modified files", *paths) sys.stdout.flush() def print_tool_header(tool): sys.stdout.flush() tty.msg("Running %s checks" % tool) sys.stdout.flush() def print_tool_result(tool, returncode): if returncode == 0: color.cprint(" @g{%s checks were clean}" % tool) else: color.cprint(" @r{%s found errors}" % tool) @tool("flake8", required=True) def run_flake8(flake8_cmd, file_list, args): returncode = 0 output = "" # run in chunks of 100 at a time to avoid line length limit # filename parameter in config *does not work* for this reliably for chunk in grouper(file_list, 100): output = flake8_cmd( # always run with config from running spack prefix "--config=%s" % os.path.join(spack.paths.prefix, ".flake8"), *chunk, fail_on_error=False, output=str ) returncode |= flake8_cmd.returncode rewrite_and_print_output(output, args) print_tool_result("flake8", returncode) return returncode @tool("mypy") def run_mypy(mypy_cmd, file_list, args): # always run with config from running spack prefix mypy_args = [ "--config-file", os.path.join(spack.paths.prefix, "pyproject.toml"), "--package", "spack", "--package", "llnl", "--show-error-codes", ] # not yet, need other updates to enable this # if any([is_package(f) for f in file_list]): # mypy_args.extend(["--package", "packages"]) output = mypy_cmd(*mypy_args, fail_on_error=False, output=str) returncode = mypy_cmd.returncode rewrite_and_print_output(output, args) print_tool_result("mypy", returncode) return returncode @tool("isort") def run_isort(isort_cmd, file_list, args): # always run with config from running spack prefix isort_args = ("--settings-path", os.path.join(spack.paths.prefix, "pyproject.toml")) if not args.fix: isort_args += ("--check", "--diff") pat = re.compile("ERROR: (.*) Imports are incorrectly sorted") replacement = "ERROR: {0} Imports are incorrectly sorted" returncode = 0 for chunk in grouper(file_list, 100): packed_args = isort_args + tuple(chunk) output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str) returncode |= isort_cmd.returncode rewrite_and_print_output(output, args, pat, replacement) print_tool_result("isort", returncode) return returncode @tool("black") def run_black(black_cmd, file_list, args): # always run with config from running spack prefix black_args = ("--config", os.path.join(spack.paths.prefix, "pyproject.toml")) if not args.fix: black_args += ("--check", "--diff") if color.get_color_when(): # only show color when spack would black_args += ("--color",) pat = re.compile("would reformat +(.*)") replacement = "would reformat {0}" returncode = 0 output = "" # run in chunks of 100 at a time to avoid line length limit # filename parameter in config *does not work* for this reliably for chunk in grouper(file_list, 100): packed_args = black_args + tuple(chunk) output = black_cmd(*packed_args, fail_on_error=False, output=str, error=str) returncode |= black_cmd.returncode rewrite_and_print_output(output, args, pat, replacement) print_tool_result("black", returncode) return returncode def style(parser, args): # ensure python version is new enough if sys.version_info < (3, 6): tty.die("spack style requires Python 3.6 or later.") # save initial working directory for relativizing paths later args.initial_working_dir = os.getcwd() # ensure that the config files we need actually exist in the spack prefix. # assertions b/c users should not ever see these errors -- they're checked in CI. assert os.path.isfile(os.path.join(spack.paths.prefix, "pyproject.toml")) assert os.path.isfile(os.path.join(spack.paths.prefix, ".flake8")) # validate spack root if the user provided one args.root = os.path.realpath(args.root) if args.root else spack.paths.prefix spack_script = os.path.join(args.root, "bin", "spack") if not os.path.exists(spack_script): tty.die( "This does not look like a valid spack root.", "No such file: '%s'" % spack_script ) file_list = args.files if file_list: def prefix_relative(path): return os.path.relpath(os.path.abspath(os.path.realpath(path)), args.root) file_list = [prefix_relative(p) for p in file_list] return_code = 0 with working_dir(args.root): if not file_list: file_list = changed_files(args.base, args.untracked, args.all) print_style_header(file_list, args) commands = {} with spack.bootstrap.ensure_bootstrap_configuration(): for tool_name, bootstrap_fn in tool_order: # Skip the tool if it was not requested if not getattr(args, tool_name): continue commands[tool_name] = bootstrap_fn() for tool_name, bootstrap_fn in tool_order: # Skip the tool if it was not requested if not getattr(args, tool_name): continue run_function, required = tools[tool_name] print_tool_header(tool_name) return_code |= run_function(commands[tool_name], file_list, args) if return_code == 0: tty.msg(color.colorize("@*{spack style checks were clean}")) else: tty.error(color.colorize("@*{spack style found errors}")) return return_code
player1537-forks/spack
var/spack/repos/builtin/packages/r-minqa/package.py
<filename>var/spack/repos/builtin/packages/r-minqa/package.py<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RMinqa(RPackage): """Derivative-free optimization algorithms by quadratic approximation. Derivative-free optimization by quadratic approximation based on an interface to Fortran implementations by <NAME>.""" cran = "minqa" version('1.2.4', sha256='cfa193a4a9c55cb08f3faf4ab09c11b70412523767f19894e4eafc6e94cccd0c') depends_on('r-rcpp@0.9.10:', type=('build', 'run')) depends_on('gmake', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/x264/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.directives import depends_on, version class X264(AutotoolsPackage): """Software library and application for encoding video streams""" homepage = "https://www.videolan.org/developers/x264.html" git = "https://code.videolan.org/videolan/x264.git" version("20210613", commit="5db6aa6cab1b146e07b60cc1736a01f21da01154") depends_on("nasm") def configure_args(self): return ["--enable-shared", "--enable-pic"]
player1537-forks/spack
var/spack/repos/builtin/packages/openmc/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Openmc(CMakePackage): """OpenMC is a community-developed Monte Carlo neutron and photon transport simulation code. It is capable of performing fixed source, k-eigenvalue, and subcritical multiplication calculations on models built using either a constructive solid geometry or CAD representation. OpenMC supports both continuous-energy and multigroup transport. The continuous-energy particle interaction data is based on a native HDF5 format that can be generated from ACE files produced by NJOY. Parallelism is enabled via a hybrid MPI and OpenMP programming model.""" homepage = "https://docs.openmc.org/" url = "https://github.com/openmc-dev/openmc/tarball/v0.13.0" git = "https://github.com/openmc-dev/openmc.git" version('develop', branch='develop', submodules=True) version('master', branch='master', submodules=True) version('0.13.0', commit='cff247e35785e7236d67ccf64a3401f0fc50a469', submodules=True) version('0.12.2', commit='<KEY>', submodules=True) version('0.12.1', commit='<KEY>', submodules=True) version('0.12.0', commit='<KEY>', submodules=True) version('0.11.0', sha256='19a9d8e9c3b581e9060fbd96d30f1098312d217cb5c925eb6372a5786d9175af') version('0.10.0', sha256='47650cb45e2c326ae439208d6f137d75ad3e5c657055912d989592c6e216178f') variant('mpi', default=False, description='Enable MPI support') variant('openmp', default=True, description='Enable OpenMP support') variant('optimize', default=False, description='Enable optimization flags') variant('debug', default=False, description='Enable debug flags') depends_on('git', type='build') depends_on('hdf5+hl~mpi', when='~mpi') depends_on('mpi', when='+mpi') depends_on('hdf5+hl+mpi', when='+mpi') def cmake_args(self): options = ['-DCMAKE_INSTALL_LIBDIR=lib'] # forcing bc sometimes goes to lib64 if '+mpi' in self.spec: options += ['-DCMAKE_C_COMPILER=%s' % self.spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % self.spec['mpi'].mpicxx] options += [self.define_from_variant('openmp')] options += [self.define_from_variant('optimize')] options += [self.define_from_variant('debug')] if '+optimize' in self.spec: self.spec.variants['build_type'].value = 'Release' if '+debug' in self.spec: self.spec.variants['build_type'].value = 'Debug' return options
player1537-forks/spack
var/spack/repos/builtin/packages/r-affycontam/package.py
<filename>var/spack/repos/builtin/packages/r-affycontam/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RAffycontam(RPackage): """structured corruption of affymetrix cel file data. structured corruption of cel file data to demonstrate QA effectiveness""" bioc = "affyContam" version('1.52.0', commit='47c1d86da330f157d3ece0e26b0657d66a5ca0c9') version('1.48.0', commit='<PASSWORD>') version('1.42.0', commit='<PASSWORD>') version('1.40.0', commit='<PASSWORD>') version('1.38.0', commit='<PASSWORD>') version('1.36.0', commit='<PASSWORD>') version('1.34.0', commit='<KEY>') depends_on('r@2.7.0:', type=('build', 'run')) depends_on('r-biobase', type=('build', 'run')) depends_on('r-affy', type=('build', 'run')) depends_on('r-affydata', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/popt/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Popt(AutotoolsPackage): """The popt library parses command line options.""" homepage = "https://launchpad.net/popt" url = "https://launchpad.net/popt/head/1.16/+download/popt-1.16.tar.gz" version('1.16', sha256='e728ed296fe9f069a0e005003c3d6b2dde3d9cad453422a10d6558616d304cc8') depends_on('iconv') def patch(self): # Remove flags not recognized by the NVIDIA compilers if self.spec.satisfies('%nvhpc@:20.11'): filter_file('CFLAGS="$CFLAGS -Wall -W"', 'CFLAGS="$CFLAGS -Wall"', 'configure', string=True)
player1537-forks/spack
var/spack/repos/builtin/packages/git/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import re from spack import * class Git(AutotoolsPackage): """Git is a free and open source distributed version control system designed to handle everything from small to very large projects with speed and efficiency. """ homepage = "http://git-scm.com" url = "https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.12.0.tar.gz" tags = ['build-tools'] executables = ['^git$'] # In order to add new versions here, add a new list entry with: # * version: {version} # * sha256: the sha256sum of the git-{version}.tar.gz # * sha256_manpages: the sha256sum of the corresponding manpage from # https://www.kernel.org/pub/software/scm/git/git-manpages-{version}.tar.gz # You can find the source here: https://mirrors.edge.kernel.org/pub/software/scm/git/sha256sums.asc releases = [ { 'version': '2.35.1', 'sha256': '9845a37dd01f9faaa7d8aa2078399d3aea91b43819a5efea6e2877b0af09bd43', 'sha256_manpages': 'd90da8b28fe0088519e0dc3c9f4bc85e429c7d6ccbaadcfe94aed47fb9c95504', }, { 'version': '2.35.0', 'sha256': 'c1d0adc777a457a3d9b2759024f173b34e61be96f7480ac5bc44216617834412', 'sha256_manpages': 'c0408a1c944c8e481d7f507bd90a7ee43c34617a1a7af2d76a1898dcf44fa430', }, { 'version': '2.34.1', 'sha256': 'fc4eb5ecb9299db91cdd156c06cdeb41833f53adc5631ddf8c0cb13eaa2911c1', 'sha256_manpages': '220f1ed68582caeddf79c4db15e4eaa4808ec01fd11889e19232f0a74d7f31b0', }, { 'version': '2.34.0', 'sha256': '0ce6222bfd31938b29360150286b51c77c643fa97740b1d35b6d1ceef8b0ecd7', 'sha256_manpages': 'fe66a69244def488306c3e05c1362ea53d8626d2a7e57cd7311df2dab1ef8356', }, { 'version': '2.33.1', 'sha256': '02047f8dc8934d57ff5e02aadd8a2fe8e0bcf94a7158da375e48086cc46fce1d', 'sha256_manpages': '292b08ca1b79422ff478a6221980099c5e3c0a38aba39d952063eedb68e27d93', }, { 'version': '2.33.0', 'sha256': '02d909d0bba560d3a1008bd00dd577621ffb57401b09175fab2bf6da0e9704ae', 'sha256_manpages': 'ba9cd0f29a3632a3b78f8ed2389f0780aa6e8fcbe258259d7c584920d19ed1f7', }, { 'version': '2.32.0', 'sha256': '6038f06d396ba9dab2eee541c7db6e7f9f847f181ec62f3d8441893f8c469398', 'sha256_manpages': 'b5533c40ea1688231c0e2df51cc0d1c0272e17fe78a45ba6e60cb8f61fa4a53c', }, { 'version': '2.31.1', 'sha256': '46d37c229e9d786510e0c53b60065704ce92d5aedc16f2c5111e3ed35093bfa7', 'sha256_manpages': 'd330498aaaea6928b0abbbbb896f6f605efd8d35f23cbbb2de38c87a737d4543' }, { 'version': '2.31.0', 'sha256': 'bc6168777883562569144d536e8a855b12d25d46870d95188a3064260d7784ee', 'sha256_manpages': 'a51b760c36be19113756839a9110b328a09abfff0d57f1c93ddac3974ccbc238' }, { 'version': '2.30.1', 'sha256': '23a3e53f0d2dd3e62a8147b24a1a91d6ffe95b92123ef4dbae04e9a6205e71c0', 'sha256_manpages': 'db323e1b242e9d0337363b1e538c8b879e4c46eedbf94d3bee9e65dab6d49138' }, { 'version': '2.30.0', 'sha256': 'd24c4fa2a658318c2e66e25ab67cc30038a35696d2d39e6b12ceccf024de1e5e', 'sha256_manpages': 'e23035ae232c9a5eda57db258bc3b7f1c1060cfd66920f92c7d388b6439773a6' }, { 'version': '2.29.2', 'sha256': '869a121e1d75e4c28213df03d204156a17f02fce2dc77be9795b327830f54195', 'sha256_manpages': '68b258e6d590cb78e02c0df741bbaeab94cbbac6d25de9da4fb3882ee098307b' }, { 'version': '2.29.0', 'sha256': 'fa08dc8424ef80c0f9bf307877f9e2e49f1a6049e873530d6747c2be770742ff', 'sha256_manpages': '8f3bf70ddb515674ce2e19572920a39b1be96af12032b77f1dd57898981fb151' }, { 'version': '2.28.0', 'sha256': 'f914c60a874d466c1e18467c864a910dd4ea22281ba6d4d58077cb0c3f115170', 'sha256_manpages': '3cfca28a88d5b8112ea42322b797a500a14d0acddea391aed0462aff1ab11bf7' }, { 'version': '2.27.0', 'sha256': '77ded85cbe42b1ffdc2578b460a1ef5d23bcbc6683eabcafbb0d394dffe2e787', 'sha256_manpages': '414e4b17133e54d846f6bfa2479f9757c50e16c013eb76167a492ae5409b8947' }, { 'version': '2.26.0', 'sha256': 'aa168c2318e7187cd295a645f7370cc6d71a324aafc932f80f00c780b6a26bed', 'sha256_manpages': 'c1ffaf0b4cd1e80a0eb0d4039e208c9d411ef94d5da44e38363804e1a7961218' }, { 'version': '2.25.0', 'sha256': 'a98c9b96d91544b130f13bf846ff080dda2867e77fe08700b793ab14ba5346f6', 'sha256_manpages': '22b2380842ef75e9006c0358de250ead449e1376d7e5138070b9a3073ef61d44' }, { 'version': '2.23.0', 'sha256': 'e3396c90888111a01bf607346db09b0fbf49a95bc83faf9506b61195936f0cfe', 'sha256_manpages': 'a5b0998f95c2290386d191d34780d145ea67e527fac98541e0350749bf76be75' }, { 'version': '2.22.0', 'sha256': 'a4b7e4365bee43caa12a38d646d2c93743d755d1cea5eab448ffb40906c9da0b', 'sha256_manpages': 'f6a5750dfc4a0aa5ec0c0cc495d4995d1f36ed47591c3941be9756c1c3a1aa0a' }, { 'version': '2.21.0', 'sha256': '85eca51c7404da75e353eba587f87fea9481ba41e162206a6f70ad8118147bee', 'sha256_manpages': '14c76ebb4e31f9e55cf5338a04fd3a13bced0323cd51794ccf45fc74bd0c1080' }, { 'version': '2.20.1', 'sha256': 'edc3bc1495b69179ba4e272e97eff93334a20decb1d8db6ec3c19c16417738fd', 'sha256_manpages': 'e9c123463abd05e142defe44a8060ce6e9853dfd8c83b2542e38b7deac4e6d4c' }, { 'version': '2.19.2', 'sha256': 'db893ad69c9ac9498b09677c5839787eba2eb3b7ef2bc30bfba7e62e77cf7850', 'sha256_manpages': '60334ecd59ee10319af4a7815174d10991d1afabacd3b3129d589f038bf25542' }, { 'version': '2.19.1', 'sha256': 'ec4dc96456612c65bf6d944cee9ac640145ec7245376832b781cb03e97cbb796', 'sha256_manpages': 'bd27f58dc90a661e3080b97365eb7322bfa185de95634fc59d98311925a7d894' }, { 'version': '2.18.0', 'sha256': '94faf2c0b02a7920b0b46f4961d8e9cad08e81418614102898a55f980fa3e7e4', 'sha256_manpages': '6cf38ab3ad43ccdcd6a73ffdcf2a016d56ab6b4b240a574b0bb96f520a04ff55' }, { 'version': '2.17.1', 'sha256': 'ec6452f0c8d5c1f3bcceabd7070b8a8a5eea11d4e2a04955c139b5065fd7d09a', 'sha256_manpages': '9732053c1a618d2576c1751d0249e43702f632a571f84511331882beb360677d' }, { 'version': '2.17.0', 'sha256': '7a0cff35dbb14b77dca6924c33ac9fe510b9de35d5267172490af548ec5ee1b8', 'sha256_manpages': '41b58c68e90e4c95265c75955ddd5b68f6491f4d57b2f17c6d68e60bbb07ba6a' }, { 'version': '2.15.1', 'sha256': '85fca8781a83c96ba6db384cc1aa6a5ee1e344746bafac1cbe1f0fe6d1109c84', 'sha256_manpages': '472454c494c9a7f50ad38060c3eec372f617de654b20f3eb3be59fc17a683fa1', }, { 'version': '2.14.1', 'sha256': '01925349b9683940e53a621ee48dd9d9ac3f9e59c079806b58321c2cf85a4464', 'sha256_manpages': '8c5810ce65d44cd333327d3a115c5b462712a2f81225d142e07bd889ad8dc0e0', }, { 'version': '2.13.0', 'sha256': '9f2fa8040ebafc0c2caae4a9e2cb385c6f16c0525bcb0fbd84938bc796372e80', 'sha256_manpages': 'e764721796cad175a4cf9a4afe7fb4c4fc57582f6f9a6e214239498e0835355b', }, { 'version': '2.12.2', 'sha256': 'd9c6d787a24670d7e5100db2367c250ad9756ef8084fb153a46b82f1d186f8d8', 'sha256_manpages': '6e7ed503f1190734e57c9427df356b42020f125fa36ab0478777960a682adf50', }, { 'version': '2.12.1', 'sha256': '65d62d10caf317fc1daf2ca9975bdb09dbff874c92d24f9529d29a7784486b43', 'sha256_manpages': '35e46b8acd529ea671d94035232b1795919be8f3c3a363ea9698f1fd08d7d061', }, { 'version': '2.12.0', 'sha256': '882f298daf582a07c597737eb4bbafb82c6208fe0e73c047defc12169c221a92', 'sha256_manpages': '1f7733a44c59f9ae8dd321d68a033499a76c82046025cc2a6792299178138d65', }, { 'version': '2.11.1', 'sha256': 'a1cdd7c820f92c44abb5003b36dc8cb7201ba38e8744802399f59c97285ca043', 'sha256_manpages': 'ee567e7b0f95333816793714bb31c54e288cf8041f77a0092b85e62c9c2974f9', }, { 'version': '2.11.0', 'sha256': 'd3be9961c799562565f158ce5b836e2b90f38502d3992a115dfb653d7825fd7e', 'sha256_manpages': '437a0128acd707edce24e1a310ab2f09f9a09ee42de58a8e7641362012dcfe22', }, { 'version': '2.9.3', 'sha256': 'a252b6636b12d5ba57732c8469701544c26c2b1689933bd1b425e603cbb247c0', 'sha256_manpages': '8ea1a55b048fafbf0c0c6fcbca4b5b0f5e9917893221fc7345c09051d65832ce', }, { 'version': '2.9.2', 'sha256': '3cb09a3917c2d8150fc1708f3019cf99a8f0feee6cd61bba3797e3b2a85be9dc', 'sha256_manpages': 'ac5c600153d1e4a1c6494e250cd27ca288e7667ad8d4ea2f2386f60ba1b78eec', }, { 'version': '2.9.1', 'sha256': 'c2230873bf77f93736473e6a06501bf93eed807d011107de6983dc015424b097', 'sha256_manpages': '324f5f173f2bd50b0102b66e474b81146ccc078d621efeb86d7f75e3c1de33e6', }, { 'version': '2.9.0', 'sha256': 'bff7560f5602fcd8e37669e0f65ef08c6edc996e4f324e4ed6bb8a84765e30bd', 'sha256_manpages': '35ba69a8560529aa837e395a6d6c8d42f4d29b40a3c1cc6e3dc69bb1faadb332', }, { 'version': '2.8.4', 'sha256': '626e319f8a24fc0866167ea5f6bf3e2f38f69d6cb2e59e150f13709ca3ebf301', 'sha256_manpages': '953a8eadaf4ae96dbad2c3ec12384c677416843917ef83d94b98367ffd55afc0', }, { 'version': '2.8.3', 'sha256': '2dad50c758339d6f5235309db620e51249e0000ff34aa2f2acbcb84c2123ed09', 'sha256_manpages': '2dad50c758339d6f5235309db620e51249e0000ff34aa2f2acbcb84c2123ed09', }, { 'version': '2.8.2', 'sha256': 'a029c37ee2e0bb1efea5c4af827ff5afdb3356ec42fc19c1d40216d99e97e148', 'sha256_manpages': '82d322211aade626d1eb3bcf3b76730bfdd2fcc9c189950fb0a8bdd69c383e2f', }, { 'version': '2.8.1', 'sha256': 'cfc66324179b9ed62ee02833f29d39935f4ab66874125a3ab9d5bb9055c0cb67', 'sha256_manpages': 'df46de0c172049f935cc3736361b263c5ff289b77077c73053e63ae83fcf43f4', }, { 'version': '2.8.0', 'sha256': '2c6eee5506237e0886df9973fd7938a1b2611ec93d07f64ed3447493ebac90d1', 'sha256_manpages': '2c48902a69df3bec3b8b8f0350a65fd1b662d2f436f0e64d475ecd1c780767b6', }, { 'version': '2.7.3', 'sha256': '30d067499b61caddedaf1a407b4947244f14d10842d100f7c7c6ea1c288280cd', 'sha256_manpages': '84b487c9071857ab0f15f11c4a102a583d59b524831cda0dc0954bd3ab73920b', }, { 'version': '2.7.1', 'sha256': 'b4ab42798b7fb038eaefabb0c32ce9dbde2919103e5e2a35adc35dd46258a66f', 'sha256_manpages': '0313cf4d283336088883d8416692fb6c547512233e11dbf06e5b925b7e762d61', }, ] for release in releases: version(release['version'], sha256=release['sha256']) resource( name='git-manpages', url="https://www.kernel.org/pub/software/scm/git/git-manpages-{0}.tar.gz".format( release['version']), sha256=release['sha256_manpages'], placement='git-manpages', when='@{0} +man'.format(release['version'])) variant('tcltk', default=False, description='Gitk: provide Tcl/Tk in the run environment') variant('svn', default=False, description='Provide SVN Perl dependency in run environment') variant('perl', default=True, description='Do not use Perl scripts or libraries at all') variant('nls', default=True, description='Enable native language support') variant('man', default=True, description='Install manual pages') depends_on('curl') depends_on('expat') depends_on('gettext', when='+nls') depends_on('iconv') depends_on('libidn2') depends_on('openssl') depends_on('pcre', when='@:2.13') depends_on('pcre2', when='@2.14:') depends_on('perl', when='+perl') depends_on('zlib') depends_on('openssh', type='run') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build') depends_on('tk', type=('build', 'link'), when='+tcltk') depends_on('perl-alien-svn', type='run', when='+svn') conflicts('+svn', when='~perl') @classmethod def determine_version(cls, exe): output = Executable(exe)('--version', output=str, error=str) match = re.search( spack.fetch_strategy.GitFetchStrategy.git_version_re, output) return match.group(1) if match else None @classmethod def determine_variants(cls, exes, version_str): prefix = os.path.dirname(exes[0]) variants = '' if 'gitk' in os.listdir(prefix): variants += '+tcltk' else: variants += '~tcltk' return variants # See the comment in setup_build_environment re EXTLIBS. def patch(self): filter_file(r'^EXTLIBS =$', '#EXTLIBS =', 'Makefile') def setup_build_environment(self, env): # We use EXTLIBS rather than LDFLAGS so that git's Makefile # inserts the information into the proper place in the link commands # (alongside the # other libraries/paths that configure discovers). # LDFLAGS is inserted *before* libgit.a, which requires libintl. # EXTFLAGS is inserted *after* libgit.a. # This depends on the patch method above, which keeps the Makefile # from stepping on the value that we pass in via the environment. # # The test avoids failures when git is an external package. # In that case the node in the DAG gets truncated and git DOES NOT # have a gettext dependency. if '+nls' in self.spec: if 'intl' in self.spec['gettext'].libs.names: env.append_flags('EXTLIBS', '-L{0} -lintl'.format( self.spec['gettext'].prefix.lib)) env.append_flags('CFLAGS', '-I{0}'.format( self.spec['gettext'].prefix.include)) if '~perl' in self.spec: env.append_flags('NO_PERL', '1') def configure_args(self): spec = self.spec configure_args = [ '--with-curl={0}'.format(spec['curl'].prefix), '--with-expat={0}'.format(spec['expat'].prefix), '--with-iconv={0}'.format(spec['iconv'].prefix), '--with-openssl={0}'.format(spec['openssl'].prefix), '--with-zlib={0}'.format(spec['zlib'].prefix), ] if '+perl' in self.spec: configure_args.append('--with-perl={0}'.format(spec['perl'].command.path)) if '^pcre' in self.spec: configure_args.append('--with-libpcre={0}'.format( spec['pcre'].prefix)) if '^pcre2' in self.spec: configure_args.append('--with-libpcre2={0}'.format( spec['pcre2'].prefix)) if '+tcltk' in self.spec: configure_args.append('--with-tcltk={0}'.format( self.spec['tk'].prefix.bin.wish)) else: configure_args.append('--without-tcltk') return configure_args @run_after('configure') def filter_rt(self): if self.spec.satisfies('platform=darwin'): # Don't link with -lrt; the system has no (and needs no) librt filter_file(r' -lrt$', '', 'Makefile') def check(self): make('test') def build(self, spec, prefix): args = [] if '~nls' in self.spec: args.append('NO_GETTEXT=1') make(*args) if spec.satisfies('platform=darwin'): with working_dir('contrib/credential/osxkeychain'): make() def install(self, spec, prefix): args = ["install"] if '~nls' in self.spec: args.append('NO_GETTEXT=1') make(*args) if spec.satisfies('platform=darwin'): install('contrib/credential/osxkeychain/git-credential-osxkeychain', join_path(prefix, "libexec", "git-core")) @run_after('install') def install_completions(self): install_tree('contrib/completion', self.prefix.share) @run_after('install') def install_manpages(self): if '~man' in self.spec: return prefix = self.prefix with working_dir('git-manpages'): install_tree('man1', prefix.share.man.man1) install_tree('man5', prefix.share.man.man5) install_tree('man7', prefix.share.man.man7) def setup_run_environment(self, env): # Setup run environment if using SVN extension # Libs from perl-alien-svn and apr-util are required in # LD_LIBRARY_PATH # TODO: extend to other platforms if "+svn platform=linux" in self.spec: perl_svn = self.spec['perl-alien-svn'] env.prepend_path('LD_LIBRARY_PATH', join_path( perl_svn.prefix, 'lib', 'perl5', 'x86_64-linux-thread-multi', 'Alien', 'SVN'))
player1537-forks/spack
var/spack/repos/builtin/packages/orc/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Orc(CMakePackage): """the smallest, fastest columnar storage for Hadoop workloads.""" homepage = "https://orc.apache.org/" url = "https://github.com/apache/orc/archive/rel/release-1.6.5.tar.gz" version('1.6.5', sha256='df5885db8fa2e4435db8d486c6c7fc4e2c565d6197eee27729cf9cbdf36353c0') depends_on('maven') depends_on('openssl') depends_on('zlib@1.2.11:') depends_on('pcre') depends_on('protobuf@3.5.1:') depends_on('zstd@1.4.5:') depends_on('googletest@1.8.0:') depends_on('snappy@1.1.7:') depends_on('lz4@1.7.5:') patch('thirdparty.patch') def cmake_args(self): args = [] args.append('-DCMAKE_CXX_FLAGS=' + self.compiler.cxx_pic_flag) args.append('-DCMAKE_C_FLAGS=' + self.compiler.cc_pic_flag) args.append('-DINSTALL_VENDORED_LIBS:BOOL=OFF') args.append('-DBUILD_LIBHDFSPP:BOOL=OFF') args.append('-DBUILD_TOOLS:BOOL=OFF') args.append('-DBUILD_CPP_TESTS:BOOL=OFF') for x in ('snappy', 'zlib', 'zstd', 'lz4', 'protobuf'): args.append('-D{0}_HOME={1}'.format(x.upper(), self.spec[x].prefix)) return args
player1537-forks/spack
var/spack/repos/builtin/packages/mira/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Mira(AutotoolsPackage): """MIRA is a multi-pass DNA sequence data assembler/mapper for whole genome and EST/RNASeq projects.""" homepage = "https://sourceforge.net/projects/mira-assembler/" url = "https://downloads.sourceforge.net/project/mira-assembler/MIRA/stable/mira-4.0.2.tar.bz2" version('4.0.2', sha256='a32cb2b21e0968a5536446287c895fe9e03d11d78957554e355c1080b7b92a80') depends_on('boost@1.46:') depends_on('expat@2.0.1:') depends_on('gperftools') conflicts('%gcc@6:', when='@:4.0.2') def patch(self): with working_dir(join_path('src', 'progs')): edit = FileFilter('quirks.C') edit.filter('#include <boost/filesystem.hpp>', '#include <boost/filesystem.hpp>\n#include <iostream>') def configure_args(self): args = ['--with-boost=%s' % self.spec['boost'].prefix, '--with-expat=%s' % self.spec['expat'].prefix] return args
player1537-forks/spack
lib/spack/spack/compilers/oneapi.py
<filename>lib/spack/spack/compilers/oneapi.py<gh_stars>1-10 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from os.path import dirname from spack.compiler import Compiler class Oneapi(Compiler): # Subclasses use possible names of C compiler cc_names = ['icx'] # Subclasses use possible names of C++ compiler cxx_names = ['icpx'] # Subclasses use possible names of Fortran 77 compiler f77_names = ['ifx'] # Subclasses use possible names of Fortran 90 compiler fc_names = ['ifx'] # Named wrapper links within build_env_path link_paths = {'cc': 'oneapi/icx', 'cxx': 'oneapi/icpx', 'f77': 'oneapi/ifx', 'fc': 'oneapi/ifx'} PrgEnv = 'PrgEnv-oneapi' PrgEnv_compiler = 'oneapi' version_argument = '--version' version_regex = r'(?:(?:oneAPI DPC\+\+(?:\/C\+\+)? Compiler)|(?:\(IFORT\))) (\S+)' @property def verbose_flag(self): return "-v" required_libs = ['libirc', 'libifcore', 'libifcoremt', 'libirng', 'libsvml', 'libintlc', 'libimf', 'libsycl', 'libOpenCL'] @property def debug_flags(self): return ['-debug', '-g', '-g0', '-g1', '-g2', '-g3'] @property def opt_flags(self): return ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os'] @property def openmp_flag(self): return "-fiopenmp" # There may be some additional options here for offload, e.g. : # -fopenmp-simd Emit OpenMP code only for SIMD-based constructs. # -fopenmp-targets=<value> # -fopenmp-version=<value> # -fopenmp Parse OpenMP pragmas and generate parallel code. # -qno-openmp Disable OpenMP support # -qopenmp-link=<value> Choose whether to link with the static or # dynamic OpenMP libraries. Default is dynamic. # -qopenmp-simd Emit OpenMP code only for SIMD-based constructs. # -qopenmp-stubs enables the user to compile OpenMP programs in # sequential mode. The OpenMP directives are # ignored and a stub OpenMP library is linked. # -qopenmp-threadprivate=<value> # -qopenmp Parse OpenMP pragmas and generate parallel code. # -static-openmp Use the static host OpenMP runtime while # linking. # -Xopenmp-target=<triple> <arg> # -Xopenmp-target <arg> Pass <arg> to the target offloading toolchain. # Source: icx --help output @property def cxx11_flag(self): return "-std=c++11" @property def cxx14_flag(self): return "-std=c++14" @property def c99_flag(self): return "-std=c99" @property def c11_flag(self): return "-std=c1x" @property def cc_pic_flag(self): return "-fPIC" @property def cxx_pic_flag(self): return "-fPIC" @property def f77_pic_flag(self): return "-fPIC" @property def fc_pic_flag(self): return "-fPIC" @property def stdcxx_libs(self): return ('-cxxlib', ) def setup_custom_environment(self, pkg, env): # workaround bug in icpx driver where it requires sycl-post-link is on the PATH # It is located in the same directory as the driver. Error message: # clang++: error: unable to execute command: # Executable "sycl-post-link" doesn't exist! if self.cxx: env.prepend_path('PATH', dirname(self.cxx))
player1537-forks/spack
var/spack/repos/builtin/packages/cdecimal/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Cdecimal(AutotoolsPackage): """cdecimal is a fast drop-in replacement for the decimal module in Python's standard library.""" homepage = "https://www.bytereef.org/mpdecimal/" url = "https://www.bytereef.org/software/mpdecimal/releases/cdecimal-2.3.tar.gz" version('2.3', sha256='d737cbe43ed1f6ad9874fb86c3db1e9bbe20c0c750868fde5be3f379ade83d8b') patch('darwin_install_name.patch', when='platform=darwin')
player1537-forks/spack
var/spack/repos/builtin/packages/h5cpp/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class H5cpp(CMakePackage): """Easy to use HDF5 C++ templates for Serial and Parallel HDF5""" homepage = "http://h5cpp.org" url = "https://github.com/steven-varga/h5cpp/archive/v1.10.4-5.tar.gz" git = "https://github.com/steven-varga/h5cpp.git" maintainers = ['eschnett'] version('master', branch='master') version('1.10.4-6', sha256='4fbc8e777dc78a37ec2fe8c7b6a47114080ffe587f083e83a2046b5e794aef93') version('1.10.4-5', sha256='661ccc4d76e081afc73df71ef11d027837d92dd1089185f3650afcaec9d418ec') variant('mpi', default=True, description='Include MPI support') depends_on('cmake @3.10:', type='build') depends_on('hdf5 @1.10.4:') depends_on('hdf5 +mpi', when='+mpi') depends_on('mpi', when='+mpi') def cmake_args(self): return [ '-DHDF5_INCLUDE_DIRS=%s' % self.spec['hdf5'].headers.directories[0], '-DHDF5_LIBRARIES=%s' % self.spec['hdf5'].libs.directories[0], '-DH5CPP_BUILD_TESTS=OFF', ]
player1537-forks/spack
var/spack/repos/builtin/packages/r-ggbeeswarm/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RGgbeeswarm(RPackage): """Categorical Scatter (Violin Point) Plots. Provides two methods of plotting categorical scatter plots such that the arrangement of points within a category reflects the density of data at that region, and avoids over-plotting.""" cran = "ggbeeswarm" version('0.6.0', sha256='bbac8552f67ff1945180fbcda83f7f1c47908f27ba4e84921a39c45d6e123333') depends_on('r@3.0.0:', type=('build', 'run')) depends_on('r-beeswarm', type=('build', 'run')) depends_on('r-ggplot2@2.0:', type=('build', 'run')) depends_on('r-vipor', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/r-miniui/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RMiniui(RPackage): """Shiny UI Widgets for Small Screens. Provides UI widget and layout functions for writing Shiny apps that work well on small screens.""" cran = "miniUI" version('0.1.1.1', sha256='452b41133289f630d8026507263744e385908ca025e9a7976925c1539816b0c0') depends_on('r-shiny@0.13:', type=('build', 'run')) depends_on('r-htmltools@0.3:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/bref3/package.py
<reponame>player1537-forks/spack<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os.path from spack import * class Bref3(Package): """Bref3: Converts from VCF format to bref3 format.""" homepage = "https://faculty.washington.edu/browning/beagle/beagle.html" version('2019-11-25', sha256='969c0881050c4a48d19be9ea64bf49fa68c1403b69f9f739bbfd865dda639b2d', expand=False, url='https://faculty.washington.edu/browning/beagle/bref3.25Nov19.28d.jar') version('2019-07-12', sha256='8a9c3b6c38e36ef4c05a61108f083005fd985026c67d75a8173088f88816a202', expand=False, url='https://faculty.washington.edu/browning/beagle/bref3.12Jul19.0df.jar') version('2018-01-27', sha256='4d32f0b6d536c88d5332d961309466c8c3dd9572907a3755450d26d7ba841083', expand=False, url='https://faculty.washington.edu/browning/beagle/bref.27Jan18.7e1.jar') depends_on('java@8', type='run') def install(self, spec, prefix): mkdirp(prefix.bin) jar_file = os.path.basename(self.stage.archive_file) install(jar_file, prefix.bin) # Set up a helper script to call java on the jar file, # explicitly codes the path for java and the jar file. if self.version >= Version('2019'): script = prefix.bin.bref3 else: script = prefix.bin.bref script_sh = join_path(os.path.dirname(__file__), "bref.sh") install(script_sh, script) set_executable(script) # Munge the helper script to explicitly point to java and the # jar file. java = self.spec['java'].prefix.bin.java kwargs = {'ignore_absent': False, 'backup': False, 'string': False} filter_file('^java', java, script, **kwargs) filter_file('bref.jar', join_path(prefix.bin, jar_file), script, **kwargs)
player1537-forks/spack
var/spack/repos/builtin/packages/py-pynn/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyPynn(PythonPackage): """A Python package for simulator-independent specification of neuronal network models """ homepage = "https://neuralensemble.org/PyNN/" pypi = "PyNN/PyNN-0.8.3.tar.gz" git = "https://github.com/NeuralEnsemble/PyNN.git" version('0.9.1', sha256='bbc60fea3235427191feb2daa0e2fa07eb1c3946104c068ac8a2a0501263b0b1') version('0.8.3', sha256='9d59e6cffa4714f0c892ec6b32d1f5f8f75ba3a20d8635bac50c047aa6f2537e') version('0.8beta', commit='ffb0cb1661f2b0f2778db8f71865978fe7a7a6a4') version('0.8.1', sha256='ce94246284588414d1570c1d5d697805f781384e771816727c830b01ee30fe39') version('0.7.5', sha256='15f75f422f3b71c6129ecef23f29d8baeb3ed6502e7a321b8a2596c78ef7e03c') depends_on('python@2.6:2.8,3.3:', type=('build', 'run')) # pip silently replaces distutils with setuptools depends_on('py-setuptools', type='build') depends_on('py-jinja2@2.7:', type=('build', 'run')) depends_on('py-docutils@0.10:', type=('build', 'run')) depends_on('py-numpy@1.5:', type=('build', 'run')) depends_on('py-quantities@0.10:', type=('build', 'run')) depends_on('py-lazyarray@0.2.9:', type=('build', 'run')) depends_on('py-neo@0.3:0.4.1', type=('build', 'run'), when="@:0.8.3") depends_on('py-neo@0.5.0:', type=('build', 'run'), when="@0.9.0:")
player1537-forks/spack
var/spack/repos/builtin/packages/variorum/package.py
<reponame>player1537-forks/spack<filename>var/spack/repos/builtin/packages/variorum/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Variorum(CMakePackage): """Variorum is a library providing vendor-neutral interfaces for monitoring and controlling underlying hardware features. """ homepage = "https://variorum.readthedocs.io" git = "https://github.com/llnl/variorum.git" url = "https://github.com/llnl/variorum/archive/v0.1.0.tar.gz" maintainers = ["slabasan", "rountree"] version('0.4.1', sha256='be7407b856bc2239ecaa27d3df80aee2f541bb721fbfa183612bd9c0ce061f28') version('0.4.0', sha256='70ff1c5a3ae15d0bd07d409ab6f3c128e69528703a829cb18ecb4a50adeaea34') version('0.3.0', sha256='f79563f09b8fe796283c879b05f7730c36d79ca0346c12995b7bccc823653f42') version('0.2.0', sha256='b8c010b26aad8acc75d146c4461532cf5d9d3d24d6fc30ee68f6330a68e65744') version("0.1.0", tag="v0.1.0") ############ # Variants # ############ variant("shared", default=True, description="Build Variorum as shared lib") variant("docs", default=False, description="Build Variorum's documentation") variant("log", default=False, description="Enable Variorum's logs") variant("build_type", default="Release", description="CMake build type", values=("Debug", "Release")) ######################## # Package dependencies # ######################## depends_on("cmake@2.8:", type="build") depends_on("hwloc") depends_on("jansson", type="link") ######################### # Documentation related # ######################### depends_on("py-sphinx", when="+docs", type="build") root_cmakelists_dir = "src" def cmake_args(self): spec = self.spec cmake_args = [] cmake_args.append('-DJANSSON_DIR={0}'.format(spec['jansson'].prefix)) if "+shared" in spec: cmake_args.append("-DBUILD_SHARED_LIBS=ON") else: cmake_args.append("-DBUILD_SHARED_LIBS=OFF") if "+docs" in spec: cmake_args.append("-DBUILD_DOCS=ON") sphinx_build_exe = join_path( spec["py-sphinx"].prefix.bin, "sphinx-build" ) cmake_args.append("-DSPHINX_EXECUTABLE=" + sphinx_build_exe) else: cmake_args.append("-DBUILD_DOCS=OFF") if 'build_type=Debug' in spec: cmake_args.append("-DVARIORUM_DEBUG=ON") else: cmake_args.append("-DVARIORUM_DEBUG=OFF") if "+log" in spec: cmake_args.append("-DVARIORUM_LOG=ON") else: cmake_args.append("-DVARIORUM_LOG=OFF") if self.run_tests: cmake_args.append("-DBUILD_TESTS=ON") else: cmake_args.append("-DBUILD_TESTS=OFF") return cmake_args
player1537-forks/spack
var/spack/repos/builtin/packages/h5bench/package.py
<gh_stars>1-10 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class H5bench(CMakePackage): """A benchmark suite for measuring HDF5 performance.""" homepage = 'https://github.com/hpc-io/h5bench' git = 'https://github.com/hpc-io/h5bench.git' url = 'https://github.com/hpc-io/h5bench/archive/refs/tags/1.1.tar.gz' maintainers = ['jeanbez', 'sbyna'] version('master', branch='master') version('develop', branch='develop') version('1.1', sha256='69f40e9bb0547235efe2114ab807825a513288b21ecfd9ce568443fe43bd9389') version('1.0', sha256='c9151d0c138990f7fc684501f7a7e99d8727317b5169809ddbb63d8e84c9fa3f') depends_on('cmake@3.10:', type='build') depends_on('mpi') depends_on('hdf5+mpi@1.12.0:1,develop-1.12:') @run_after('install') def install_config(self): install_tree('h5bench_patterns/sample_config', self.prefix.share.patterns) install('metadata_stress/hdf5_iotest.ini', self.prefix.share) def setup_build_environment(self, env): env.set('HDF5_HOME', self.spec['hdf5'].prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/py-pycbc/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyPycbc(PythonPackage): """PyCBC is a software package used to explore astrophysical sources of gravitational waves. It contains algorithms to analyze gravitational-wave data from the LIGO and Virgo detectors, detect coalescing compact binaries, and measure the astrophysical parameters of detected sources. PyCBC was used in the first direct detection of gravitational waves and is used in the flagship analysis of LIGO and Virgo data.""" homepage = "https://pycbc.org/" pypi = "PyCBC/PyCBC-1.14.1.tar.gz" version('1.14.1', sha256='4b0a309cb6209837aaebbd691413a286dd7200ccf4b977ffed1462a65ac35dc0') depends_on('py-setuptools', type='build') depends_on('py-numpy@1.13.0:', type=('build', 'run')) depends_on('py-mako@1.0.1:', type=('build', 'run')) depends_on('py-cython', type='build') depends_on('py-decorator@3.4.2:', type=('build', 'run')) depends_on('py-scipy@0.16.0:', type=('build', 'run')) depends_on('py-matplotlib@1.5.1:', type=('build', 'run')) depends_on('pil', type=('build', 'run')) depends_on('py-h5py@2.5:', type=('build', 'run')) depends_on('py-jinja2', type=('build', 'run')) depends_on('py-astropy@2.0.3:', type=('build', 'run')) depends_on('py-mpld3@0.3:', type=('build', 'run')) depends_on('py-lscsoft-glue@1.59.3:', type=('build', 'run')) depends_on('py-emcee@2.2.1', type=('build', 'run')) depends_on('py-requests@1.2.1:', type=('build', 'run')) depends_on('py-beautifulsoup4@4.6.0:', type=('build', 'run')) depends_on('py-six@1.10.0:', type=('build', 'run')) depends_on('py-ligo-segments', type=('build', 'run')) depends_on('py-weave@0.16.0:', when='^python@:2', type=('build', 'run')) patch('for_aarch64.patch', when='@:1.14.1 target=aarch64:')
player1537-forks/spack
var/spack/repos/builtin/packages/r-rematch2/package.py
<filename>var/spack/repos/builtin/packages/r-rematch2/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RRematch2(RPackage): """Tidy Output from Regular Expression Matching. Wrappers on 'regexpr' and 'gregexpr' to return the match results in tidy data frames.""" cran = "rematch2" version('2.1.2', sha256='fe9cbfe99dd7731a0a2a310900d999f80e7486775b67f3f8f388c30737faf7bb') version('2.1.1', sha256='d0423a418e8b46ac3a4819af7a7d19c39ca7c8c862c1e9a1c1294aa19152518f') version('2.1.0', sha256='78677071bd44b40e562df1da6f0c6bdeae44caf973f97ff8286b8c994db59f01') version('2.0.1', sha256='0612bb904334bd022ba6d1e69925b1e85f8e86b15ec65476777828776e89609a') depends_on('r-tibble')
player1537-forks/spack
var/spack/repos/builtin/packages/graphite2/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Graphite2(CMakePackage): """Graphite is a system that can be used to create "smart fonts" capable of displaying writing systems with various complex behaviors. A smart font contains not only letter shapes but also additional instructions indicating how to combine and position the letters in complex ways.""" homepage = "https://scripts.sil.org/cms/scripts/page.php?site_id=projects&item_id=graphite_home" url = "https://github.com/silnrsi/graphite/releases/download/1.3.13/graphite2-1.3.13.tgz" version('1.3.13', sha256='dd63e169b0d3cf954b397c122551ab9343e0696fb2045e1b326db0202d875f06') depends_on('python@3.6:', type='test') patch('regparm.patch')
player1537-forks/spack
var/spack/repos/builtin/packages/py-kosh/package.py
<reponame>player1537-forks/spack # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyKosh(PythonPackage): """ Kosh allows codes to store, query, share data via an easy-to-use Python API. Kosh lies on top of Sina and can use any database backend supported by Sina. In adition Kosh aims to make data access and sharing as simple as possible. """ homepage = "https://github.com/LLNL/kosh" url = "https://github.com/LLNL/kosh/archive/refs/tags/v2.0.tar.gz" # notify when the package is updated. maintainers = ['doutriaux1'] version('2.0', sha256='059e431e3d3219b53956cb464d9e10933ca141dc89662f55d9c633e35c8b3a1e') depends_on('py-setuptools', type='build') depends_on("py-llnl-sina@1.11", type=("build", "run")) depends_on("py-networkx", type=("build", "run")) depends_on("py-numpy", type=("build", "run"))
player1537-forks/spack
var/spack/repos/builtin/packages/py-azure-cli-telemetry/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class PyAzureCliTelemetry(PythonPackage): """Microsoft Azure CLI Telemetry Package.""" homepage = "https://github.com/Azure/azure-cli" pypi = "azure-cli-telemetry/azure-cli-telemetry-1.0.4.tar.gz" version('1.0.4', sha256='1f239d544d309c29e827982cc20113eb57037dba16db6cdd2e0283e437e0e577') depends_on('py-setuptools', type='build') depends_on('py-applicationinsights@0.11.1:0.11', type=('build', 'run')) depends_on('py-portalocker@1.2:1', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/r-glmnet/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RGlmnet(RPackage): """Lasso and Elastic-Net Regularized Generalized Linear Models. Extremely efficient procedures for fitting the entire lasso or elastic-net regularization path for linear regression, logistic and multinomial regression models, Poisson regression and the Cox model. Two recent additions are the multiple-response Gaussian, and the grouped multinomial. The algorithm uses cyclical coordinate descent in a path-wise fashion, as described in the paper linked to via the URL below.""" cran = "glmnet" version('4.1-3', sha256='64bc35aa40b6e580cfb8a21e649eb103e996e8747a10c476b8bb9545c846325a') version('4.1', sha256='8f0af50919f488789ecf261f6e0907f367d89fca812baa2f814054fb2d0e40cb') version('2.0-18', sha256='e8dce9d7b8105f9cc18ba981d420de64a53b09abee219660d3612915d554256b') version('2.0-13', sha256='f3288dcaddb2f7014d42b755bede6563f73c17bc87f8292c2ef7776cb9b9b8fd') version('2.0-5', sha256='2ca95352c8fbd93aa7800f3d972ee6c1a5fcfeabc6be8c10deee0cb457fd77b1') depends_on('r@3.6.0:', type=('build', 'run'), when='@4.1:') depends_on('r-matrix@1.0-6:', type=('build', 'run')) depends_on('r-foreach', type=('build', 'run')) depends_on('r-shape', type=('build', 'run'), when='@4.1:') depends_on('r-survival', type=('build', 'run'), when='@4.1:') depends_on('r-rcpp', type=('build', 'run'), when='@4.1-3:') depends_on('r-rcppeigen', type=('build', 'run'), when='@4.1-3:')
player1537-forks/spack
var/spack/repos/builtin.mock/packages/external-common-python/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class ExternalCommonPython(Package): homepage = "http://www.python.org" url = "http://www.python.org/ftp/python/3.8.7/Python-3.8.7.tgz" version('3.8.7', 'be78e48cdfc1a7ad90efff146dce6cfe') depends_on('external-common-openssl') depends_on('external-common-gdbm')
player1537-forks/spack
var/spack/repos/builtin/packages/py-extras/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyExtras(PythonPackage): """Useful extra bits for Python - things that shold be in the standard library.""" homepage = "https://github.com/testing-cabal/extras" pypi = "extras/extras-1.0.0.tar.gz" version('1.0.0', sha256='132e36de10b9c91d5d4cc620160a476e0468a88f16c9431817a6729611a81b4e') depends_on('py-setuptools', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/r-spatstat-core/package.py
<filename>var/spack/repos/builtin/packages/r-spatstat-core/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RSpatstatCore(RPackage): """Core Functionality of the 'spatstat' Family. Functionality for data analysis and modelling of spatial data, mainly spatial point patterns, in the 'spatstat' family of packages. (Excludes analysis of spatial data on a linear network, which is covered by the separate package 'spatstat.linnet'.) Exploratory methods include quadrat counts, K-functions and their simulation envelopes, nearest neighbour distance and empty space statistics, Fry plots, pair correlation function, kernel smoothed intensity, relative risk estimation with cross-validated bandwidth selection, mark correlation functions, segregation indices, mark dependence diagnostics, and kernel estimates of covariate effects. Formal hypothesis tests of random pattern (chi-squared, Kolmogorov-Smirnov, Monte Carlo, Diggle-Cressie-Loosmore-Ford, Dao-Genton, two-stage Monte Carlo) and tests for covariate effects (Cox-Berman-Waller-Lawson, Kolmogorov-Smirnov, ANOVA) are also supported. Parametric models can be fitted to point pattern data using the functions ppm(), kppm(), slrm(), dppm() similar to glm(). Types of models include Poisson, Gibbs and Cox point processes, Neyman-Scott cluster processes, and determinantal point processes. Models may involve dependence on covariates, inter-point interaction, cluster formation and dependence on marks. Models are fitted by maximum likelihood, logistic regression, minimum contrast, and composite likelihood methods. A model can be fitted to a list of point patterns (replicated point pattern data) using the function mppm(). The model can include random effects and fixed effects depending on the experimental design, in addition to all the features listed above. Fitted point process models can be simulated, automatically. Formal hypothesis tests of a fitted model are supported (likelihood ratio test, analysis of deviance, Monte Carlo tests) along with basic tools for model selection (stepwise(), AIC()) and variable selection (sdr). Tools for validating the fitted model include simulation envelopes, residuals, residual plots and Q-Q plots, leverage and influence diagnostics, partial residuals, and added variable plots.""" cran = "spatstat.core" version('2.3-2', sha256='7f4d6d997f9187eda71097a53917e7cbe03f8dcfb4e758d86a90fbe42c92f63c') depends_on('r@3.5.0:', type=('build', 'run')) depends_on('r-spatstat-data@2.1-0:', type=('build', 'run')) depends_on('r-spatstat-geom@2.3-0:', type=('build', 'run')) depends_on('r-nlme', type=('build', 'run')) depends_on('r-rpart', type=('build', 'run')) depends_on('r-spatstat-utils@2.2-0:', type=('build', 'run')) depends_on('r-spatstat-sparse@2.0-0:', type=('build', 'run')) depends_on('r-mgcv', type=('build', 'run')) depends_on('r-matrix', type=('build', 'run')) depends_on('r-abind', type=('build', 'run')) depends_on('r-tensor', type=('build', 'run')) depends_on('r-goftest@1.2-2:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/jackcess/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Jackcess(Package): """Jackcess is a pure Java library for reading from and writing to MS Access databases (currently supporting versions 2000-2016).""" homepage = "http://jackcess.sourceforge.net/" url = "https://sourceforge.net/projects/jackcess/files/jackcess/2.1.12/jackcess-2.1.12.jar" version('2.1.12', sha256='347e666d8f6abf382a0e1a7597421911423f20cf71237225f9eb53626f377f22', expand=False) version('1.2.14.3', sha256='a6fab0c4b5daf23dcf7fd309ee4ffc6df12ff982510c094e45442adf88712787', expand=False, url='https://sourceforge.net/projects/jackcess/files/jackcess/Older%20Releases/1.2.14.3/jackcess-1.2.14.3.jar') extends('jdk') depends_on('java', type='run') depends_on('commons-lang@2.6', when='@2.1.12', type='run') depends_on('commons-lang@2.4', when='@1.2.14.3', type='run') depends_on('commons-logging@1.1.3', when='@2.1.12', type='run') depends_on('commons-logging@1.1.1', when='@1.2.14.3', type='run') def install(self, spec, prefix): install('jackcess-{0}.jar'.format(self.version), prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/ike-scan/package.py
<filename>var/spack/repos/builtin/packages/ike-scan/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class IkeScan(AutotoolsPackage): """Discover and fingerprint IKE hosts (IPsec VPN Servers).""" homepage = "https://github.com/royhills/ike-scan" url = "https://github.com/royhills/ike-scan/releases/download/1.9/ike-scan-1.9.tar.gz" version('1.9', sha256='05d15c7172034935d1e46b01dacf1101a293ae0d06c0e14025a4507656f1a7b6')
player1537-forks/spack
var/spack/repos/builtin/packages/mpi/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os class Mpi(Package): """Virtual package for the Message Passing Interface.""" homepage = 'https://www.mpi-forum.org/' virtual = True def test(self): for lang in ('c', 'f'): filename = self.test_suite.current_test_data_dir.join( 'mpi_hello.' + lang) compiler_var = 'MPICC' if lang == 'c' else 'MPIF90' compiler = os.environ[compiler_var] exe_name = 'mpi_hello_%s' % lang mpirun = join_path(self.prefix.bin, 'mpirun') compiled = self.run_test(compiler, options=['-o', exe_name, filename]) if compiled: self.run_test(mpirun, options=['-np', '1', exe_name], expected=[r'Hello world! From rank \s*0 of \s*1'] )
player1537-forks/spack
var/spack/repos/builtin/packages/py-pspamm/package.py
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyPspamm(PythonPackage): """Code Generator for Small Sparse Matrix Multiplication""" homepage = "https://github.com/SeisSol/PSpaMM/blob/master/README.md" git = "https://github.com/SeisSol/PSpaMM.git" maintainers = ['ravil-mobile'] version('develop', branch='master') variant('numpy', default=False, description="installs numpy") variant('scipy', default=False, description="installs scipy") depends_on('py-numpy', when='+numpy') depends_on('py-scipy', when='+scipy') def install(self, spec, prefix): install_tree('.', prefix) def setup_run_environment(self, env): env.prepend_path('PATH', self.spec.prefix) env.prepend_path('PYTHONPATH', self.spec.prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/py-pycompadre/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyPycompadre(PythonPackage): """The Compadre Toolkit provides a performance portable solution for the parallel evaluation of computationally dense kernels. The toolkit specifically targets the Generalized Moving Least Squares (GMLS) approach, which requires the inversion of small dense matrices. The result is a set of weights that provide the information needed for remap or entries that constitute the rows of some globally sparse matrix. """ homepage = 'https://github.com/SNLComputation/compadre' git = 'https://github.com/SNLComputation/compadre.git' url = 'https://github.com/SNLComputation/compadre/archive/v1.3.0.tar.gz' maintainers = ['kuberry'] version('master', branch='master', preferred=True) variant('trilinos', default=False, description='Use Kokkos from Trilinos') variant('debug', default='0', values=['0', '1', '2'], multi=False, description='Debugging level 0) release 1) debug 2) extreme debugging') depends_on('cmake@3.10.0:', type='build') depends_on('python@3.4:', type=('build', 'link', 'run')) depends_on('py-pip', type=('build', 'link', 'run')) depends_on('py-setuptools', type='build') depends_on('py-cython@0.23:', type='build') depends_on('trilinos@13.2:', when='+trilinos') @run_before('install') def set_cmake_from_variants(self): spec = self.spec with open('cmake_opts.txt', 'w') as f: if '+trilinos' in spec: f.write('Trilinos_PREFIX:PATH=%s\n' % spec['trilinos'].prefix) if spec.variants['debug'].value == '0': f.write('CMAKE_CXX_FLAGS:STRING=%s\n' % "' -Ofast -funroll-loops -march=native -mtune=native '") f.write('Compadre_DEBUG:BOOL=OFF\n') else: f.write('CMAKE_CXX_FLAGS:STRING=%s\n' % "'-g -O0'") f.write('CMAKE_BUILD_TYPE:STRING=%s\n' % "DEBUG") f.write('Compadre_DEBUG:BOOL=ON\n') if spec.variants['debug'].value == '2': f.write('Compadre_EXTREME_DEBUG:BOOL=ON\n')
player1537-forks/spack
var/spack/repos/builtin/packages/entt/package.py
<reponame>player1537-forks/spack<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class Entt(CMakePackage): """EnTT is a header-only, tiny and easy to use library for game programming and much more written in modern C++, mainly known for its innovative entity-component-system (ECS) model. """ homepage = "https://entt.docsforge.com" url = "https://github.com/skypjack/entt/archive/v3.5.2.tar.gz" version('3.5.2', sha256='f9271293c44518386c402c9a2188627819748f66302df48af4f6d08e30661036') depends_on('cmake@3.7.0:', type='build') depends_on('doxygen@1.8.0:', type='build') # TODO: This list is not comprehensive, we might want to extend it later compiler_warning = 'EnTT requires a compiler with support for C++17' conflicts('%apple-clang@:10.1', msg=compiler_warning) conflicts('%clang@:6', msg=compiler_warning) conflicts('%gcc@:7.1', msg=compiler_warning) def cmake_args(self): return [ '-DBUILD_DOCS=ON' ]
player1537-forks/spack
var/spack/repos/builtin/packages/r-futile-logger/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RFutileLogger(RPackage): """A Logging Utility for R. Provides a simple yet powerful logging utility. Based loosely on log4j, futile.logger takes advantage of R idioms to make logging a convenient and easy to use replacement for cat and print statements.""" cran = "futile.logger" version('1.4.3', sha256='5e8b32d65f77a86d17d90fd8690fc085aa0612df8018e4d6d6c1a60fa65776e4') depends_on('r@3.0.0:', type=('build', 'run')) depends_on('r-lambda-r@1.1.0:', type=('build', 'run')) depends_on('r-futile-options', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/perl-xml-filter-buffertext/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PerlXmlFilterBuffertext(PerlPackage): """This is a very simple filter. One common cause of grief (and programmer error) is that XML parsers aren't required to provide character events in one chunk. They can, but are not forced to, and most don't. This filter does the trivial but oft-repeated task of putting all characters into a single event.""" homepage = "https://metacpan.org/pod/XML::Filter::BufferText" url = "https://cpan.metacpan.org/authors/id/R/RB/RBERJON/XML-Filter-BufferText-1.01.tar.gz" version('1.01', sha256='8fd2126d3beec554df852919f4739e689202cbba6a17506e9b66ea165841a75c')
player1537-forks/spack
var/spack/repos/builtin/packages/r-gdalutilities/package.py
<filename>var/spack/repos/builtin/packages/r-gdalutilities/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RGdalutilities(RPackage): """Wrappers for 'GDAL' Utilities Executables. R's 'sf' package ships with self-contained 'GDAL' executables, including a bare bones interface to several 'GDAL'-related utility programs collectively known as the 'GDAL utilities'. For each of those utilities, this package provides an R wrapper whose formal arguments closely mirror those of the 'GDAL' command line interface. The utilities operate on data stored in files and typically write their output to other files. Therefore, to process data stored in any of R's more common spatial formats (i.e. those supported by the 'sp', 'sf', and 'raster' packages), first write them to disk, then process them with the package's wrapper functions before reading the outputted results back into R. GDAL function arguments introduced in GDAL version 3.2.1 or earlier are supported.""" cran = "gdalUtilities" version('1.2.0', sha256='ead446f7f77f952b72b9ed80c5e415cb9d8d30cfb2439c8d1a8156fa55e2b65b') depends_on('r-sf', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/examinimd/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Examinimd(MakefilePackage): """ExaMiniMD is a proxy application and research vehicle for particle codes, in particular Molecular Dynamics (MD). Compared to previous MD proxy apps (MiniMD, COMD), its design is significantly more modular in order to allow independent investigation of different aspects. To achieve that the main components such as force calculation, communication, neighbor list construction and binning are derived classes whose main functionality is accessed via virtual functions. This allows a developer to write a new derived class and drop it into the code without touching much of the rest of the application.""" tags = ['proxy-app', 'ecp-proxy-app'] homepage = "https://github.com/ECP-copa/ExaMiniMD" url = "https://github.com/ECP-copa/ExaMiniMD/archive/1.0.zip" git = "https://github.com/ECP-copa/ExaMiniMD.git" version('develop', branch='master') version('1.0', sha256='d5f884ecc3a5f9723cc57a4c188da926b392605650606c1c8c34f2d1953f2534') variant('mpi', default=True, description='Build with MPI support') variant('openmp', default=False, description='Build with OpenMP support') variant('pthreads', default=False, description='Build with POSIX Threads support') # TODO: Set up cuda variant when test machine available conflicts('+openmp', when='+pthreads') depends_on('kokkos-legacy') depends_on('mpi', when='+mpi') @property def build_targets(self): targets = [] # Append Kokkos targets.append('KOKKOS_PATH={0}'.format( self.spec['kokkos-legacy'].prefix)) # Set kokkos device if 'openmp' in self.spec: targets.append('KOKKOS_DEVICES=OpenMP') elif 'pthreads' in self.spec: targets.append('KOKKOS_DEVICES=Pthread') else: targets.append('KOKKOS_DEVICES=Serial') # Set MPI as needed if '+mpi' in self.spec: targets.append('MPI=1') targets.append('CXX = {0}'.format(self.spec['mpi'].mpicxx)) else: targets.append('MPI=0') targets.append('CXX = {0}'.format(spack_cxx)) return targets def install(self, spec, prefix): mkdirp(prefix.bin) install('src/ExaMiniMD', prefix.bin) install_tree('input', prefix.input) mkdirp(prefix.docs) install('README.md', prefix.docs) install('LICENSE', prefix.docs)
player1537-forks/spack
lib/spack/llnl/util/filesystem.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import collections import errno import glob import grp import hashlib import itertools import numbers import os import pwd import re import shutil import stat import sys import tempfile from contextlib import contextmanager import six from llnl.util import tty from llnl.util.compat import Sequence from llnl.util.lang import dedupe, memoized from spack.util.executable import Executable __all__ = [ 'FileFilter', 'FileList', 'HeaderList', 'LibraryList', 'ancestor', 'can_access', 'change_sed_delimiter', 'copy_mode', 'filter_file', 'find', 'find_headers', 'find_all_headers', 'find_libraries', 'find_system_libraries', 'fix_darwin_install_name', 'force_remove', 'force_symlink', 'chgrp', 'chmod_x', 'copy', 'install', 'copy_tree', 'install_tree', 'is_exe', 'join_path', 'last_modification_time_recursive', 'mkdirp', 'partition_path', 'prefixes', 'remove_dead_links', 'remove_directory_contents', 'remove_if_dead_link', 'remove_linked_tree', 'set_executable', 'set_install_permissions', 'touch', 'touchp', 'traverse_tree', 'unset_executable_mode', 'working_dir', 'keep_modification_time' ] def path_contains_subdirectory(path, root): norm_root = os.path.abspath(root).rstrip(os.path.sep) + os.path.sep norm_path = os.path.abspath(path).rstrip(os.path.sep) + os.path.sep return norm_path.startswith(norm_root) def possible_library_filenames(library_names): """Given a collection of library names like 'libfoo', generate the set of library filenames that may be found on the system (e.g. libfoo.so). This generates the library filenames that may appear on any OS. """ lib_extensions = ['a', 'la', 'so', 'tbd', 'dylib'] return set( '.'.join((lib, extension)) for lib, extension in itertools.product(library_names, lib_extensions)) def paths_containing_libs(paths, library_names): """Given a collection of filesystem paths, return the list of paths that which include one or more of the specified libraries. """ required_lib_fnames = possible_library_filenames(library_names) rpaths_to_include = [] for path in paths: fnames = set(os.listdir(path)) if fnames & required_lib_fnames: rpaths_to_include.append(path) return rpaths_to_include def same_path(path1, path2): norm1 = os.path.abspath(path1).rstrip(os.path.sep) norm2 = os.path.abspath(path2).rstrip(os.path.sep) return norm1 == norm2 def filter_file(regex, repl, *filenames, **kwargs): r"""Like sed, but uses python regular expressions. Filters every line of each file through regex and replaces the file with a filtered version. Preserves mode of filtered files. As with re.sub, ``repl`` can be either a string or a callable. If it is a callable, it is passed the match object and should return a suitable replacement string. If it is a string, it can contain ``\1``, ``\2``, etc. to represent back-substitution as sed would allow. Parameters: regex (str): The regular expression to search for repl (str): The string to replace matches with *filenames: One or more files to search and replace Keyword Arguments: string (bool): Treat regex as a plain string. Default it False backup (bool): Make backup file(s) suffixed with ``~``. Default is True ignore_absent (bool): Ignore any files that don't exist. Default is False stop_at (str): Marker used to stop scanning the file further. If a text line matches this marker filtering is stopped and the rest of the file is copied verbatim. Default is to filter until the end of the file. """ string = kwargs.get('string', False) backup = kwargs.get('backup', False) ignore_absent = kwargs.get('ignore_absent', False) stop_at = kwargs.get('stop_at', None) # Allow strings to use \1, \2, etc. for replacement, like sed if not callable(repl): unescaped = repl.replace(r'\\', '\\') def replace_groups_with_groupid(m): def groupid_to_group(x): return m.group(int(x.group(1))) return re.sub(r'\\([1-9])', groupid_to_group, unescaped) repl = replace_groups_with_groupid if string: regex = re.escape(regex) for filename in filenames: msg = 'FILTER FILE: {0} [replacing "{1}"]' tty.debug(msg.format(filename, regex)) backup_filename = filename + "~" tmp_filename = filename + ".spack~" if ignore_absent and not os.path.exists(filename): msg = 'FILTER FILE: file "{0}" not found. Skipping to next file.' tty.debug(msg.format(filename)) continue # Create backup file. Don't overwrite an existing backup # file in case this file is being filtered multiple times. if not os.path.exists(backup_filename): shutil.copy(filename, backup_filename) # Create a temporary file to read from. We cannot use backup_filename # in case filter_file is invoked multiple times on the same file. shutil.copy(filename, tmp_filename) try: extra_kwargs = {} if sys.version_info > (3, 0): extra_kwargs = {'errors': 'surrogateescape'} # Open as a text file and filter until the end of the file is # reached or we found a marker in the line if it was specified with open(tmp_filename, mode='r', **extra_kwargs) as input_file: with open(filename, mode='w', **extra_kwargs) as output_file: # Using iter and readline is a workaround needed not to # disable input_file.tell(), which will happen if we call # input_file.next() implicitly via the for loop for line in iter(input_file.readline, ''): if stop_at is not None: current_position = input_file.tell() if stop_at == line.strip(): output_file.write(line) break filtered_line = re.sub(regex, repl, line) output_file.write(filtered_line) else: current_position = None # If we stopped filtering at some point, reopen the file in # binary mode and copy verbatim the remaining part if current_position and stop_at: with open(tmp_filename, mode='rb') as input_file: input_file.seek(current_position) with open(filename, mode='ab') as output_file: output_file.writelines(input_file.readlines()) except BaseException: # clean up the original file on failure. shutil.move(backup_filename, filename) raise finally: os.remove(tmp_filename) if not backup and os.path.exists(backup_filename): os.remove(backup_filename) class FileFilter(object): """Convenience class for calling ``filter_file`` a lot.""" def __init__(self, *filenames): self.filenames = filenames def filter(self, regex, repl, **kwargs): return filter_file(regex, repl, *self.filenames, **kwargs) def change_sed_delimiter(old_delim, new_delim, *filenames): """Find all sed search/replace commands and change the delimiter. e.g., if the file contains seds that look like ``'s///'``, you can call ``change_sed_delimiter('/', '@', file)`` to change the delimiter to ``'@'``. Note that this routine will fail if the delimiter is ``'`` or ``"``. Handling those is left for future work. Parameters: old_delim (str): The delimiter to search for new_delim (str): The delimiter to replace with *filenames: One or more files to search and replace """ assert(len(old_delim) == 1) assert(len(new_delim) == 1) # TODO: handle these cases one day? assert(old_delim != '"') assert(old_delim != "'") assert(new_delim != '"') assert(new_delim != "'") whole_lines = "^s@([^@]*)@(.*)@[gIp]$" whole_lines = whole_lines.replace('@', old_delim) single_quoted = r"'s@((?:\\'|[^@'])*)@((?:\\'|[^'])*)@[gIp]?'" single_quoted = single_quoted.replace('@', old_delim) double_quoted = r'"s@((?:\\"|[^@"])*)@((?:\\"|[^"])*)@[gIp]?"' double_quoted = double_quoted.replace('@', old_delim) repl = r's@\1@\2@g' repl = repl.replace('@', new_delim) for f in filenames: filter_file(whole_lines, repl, f) filter_file(single_quoted, "'%s'" % repl, f) filter_file(double_quoted, '"%s"' % repl, f) def set_install_permissions(path): """Set appropriate permissions on the installed file.""" # If this points to a file maintained in a Spack prefix, it is assumed that # this function will be invoked on the target. If the file is outside a # Spack-maintained prefix, the permissions should not be modified. if os.path.islink(path): return if os.path.isdir(path): os.chmod(path, 0o755) else: os.chmod(path, 0o644) def group_ids(uid=None): """Get group ids that a uid is a member of. Arguments: uid (int): id of user, or None for current user Returns: (list of int): gids of groups the user is a member of """ if uid is None: uid = os.getuid() user = pwd.getpwuid(uid).pw_name return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem] def chgrp(path, group): """Implement the bash chgrp function on a single path""" if isinstance(group, six.string_types): gid = grp.getgrnam(group).gr_gid else: gid = group os.chown(path, -1, gid) def chmod_x(entry, perms): """Implements chmod, treating all executable bits as set using the chmod utility's `+X` option. """ mode = os.stat(entry).st_mode if os.path.isfile(entry): if not mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH): perms &= ~stat.S_IXUSR perms &= ~stat.S_IXGRP perms &= ~stat.S_IXOTH os.chmod(entry, perms) def copy_mode(src, dest): """Set the mode of dest to that of src unless it is a link. """ if os.path.islink(dest): return src_mode = os.stat(src).st_mode dest_mode = os.stat(dest).st_mode if src_mode & stat.S_IXUSR: dest_mode |= stat.S_IXUSR if src_mode & stat.S_IXGRP: dest_mode |= stat.S_IXGRP if src_mode & stat.S_IXOTH: dest_mode |= stat.S_IXOTH os.chmod(dest, dest_mode) def unset_executable_mode(path): mode = os.stat(path).st_mode mode &= ~stat.S_IXUSR mode &= ~stat.S_IXGRP mode &= ~stat.S_IXOTH os.chmod(path, mode) def copy(src, dest, _permissions=False): """Copy the file(s) *src* to the file or directory *dest*. If *dest* specifies a directory, the file will be copied into *dest* using the base filename from *src*. *src* may contain glob characters. Parameters: src (str): the file(s) to copy dest (str): the destination file or directory _permissions (bool): for internal use only Raises: IOError: if *src* does not match any files or directories ValueError: if *src* matches multiple files but *dest* is not a directory """ if _permissions: tty.debug('Installing {0} to {1}'.format(src, dest)) else: tty.debug('Copying {0} to {1}'.format(src, dest)) files = glob.glob(src) if not files: raise IOError("No such file or directory: '{0}'".format(src)) if len(files) > 1 and not os.path.isdir(dest): raise ValueError( "'{0}' matches multiple files but '{1}' is not a directory".format( src, dest)) for src in files: # Expand dest to its eventual full path if it is a directory. dst = dest if os.path.isdir(dest): dst = join_path(dest, os.path.basename(src)) shutil.copy(src, dst) if _permissions: set_install_permissions(dst) copy_mode(src, dst) def install(src, dest): """Install the file(s) *src* to the file or directory *dest*. Same as :py:func:`copy` with the addition of setting proper permissions on the installed file. Parameters: src (str): the file(s) to install dest (str): the destination file or directory Raises: IOError: if *src* does not match any files or directories ValueError: if *src* matches multiple files but *dest* is not a directory """ copy(src, dest, _permissions=True) def resolve_link_target_relative_to_the_link(link): """ os.path.isdir uses os.path.exists, which for links will check the existence of the link target. If the link target is relative to the link, we need to construct a pathname that is valid from our cwd (which may not be the same as the link's directory) """ target = os.readlink(link) if os.path.isabs(target): return target link_dir = os.path.dirname(os.path.abspath(link)) return os.path.join(link_dir, target) def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False): """Recursively copy an entire directory tree rooted at *src*. If the destination directory *dest* does not already exist, it will be created as well as missing parent directories. *src* may contain glob characters. If *symlinks* is true, symbolic links in the source tree are represented as symbolic links in the new tree and the metadata of the original links will be copied as far as the platform allows; if false, the contents and metadata of the linked files are copied to the new tree. If *ignore* is set, then each path relative to *src* will be passed to this function; the function returns whether that path should be skipped. Parameters: src (str): the directory to copy dest (str): the destination directory symlinks (bool): whether or not to preserve symlinks ignore (typing.Callable): function indicating which files to ignore _permissions (bool): for internal use only Raises: IOError: if *src* does not match any files or directories ValueError: if *src* is a parent directory of *dest* """ if _permissions: tty.debug('Installing {0} to {1}'.format(src, dest)) else: tty.debug('Copying {0} to {1}'.format(src, dest)) abs_dest = os.path.abspath(dest) if not abs_dest.endswith(os.path.sep): abs_dest += os.path.sep files = glob.glob(src) if not files: raise IOError("No such file or directory: '{0}'".format(src)) for src in files: abs_src = os.path.abspath(src) if not abs_src.endswith(os.path.sep): abs_src += os.path.sep # Stop early to avoid unnecessary recursion if being asked to copy # from a parent directory. if abs_dest.startswith(abs_src): raise ValueError('Cannot copy ancestor directory {0} into {1}'. format(abs_src, abs_dest)) mkdirp(abs_dest) for s, d in traverse_tree(abs_src, abs_dest, order='pre', follow_symlinks=not symlinks, ignore=ignore, follow_nonexisting=True): if os.path.islink(s): link_target = resolve_link_target_relative_to_the_link(s) if symlinks: target = os.readlink(s) if os.path.isabs(target): new_target = re.sub(abs_src, abs_dest, target) if new_target != target: tty.debug("Redirecting link {0} to {1}" .format(target, new_target)) target = new_target os.symlink(target, d) elif os.path.isdir(link_target): mkdirp(d) else: shutil.copyfile(s, d) else: if os.path.isdir(s): mkdirp(d) else: shutil.copy2(s, d) if _permissions: set_install_permissions(d) copy_mode(s, d) def install_tree(src, dest, symlinks=True, ignore=None): """Recursively install an entire directory tree rooted at *src*. Same as :py:func:`copy_tree` with the addition of setting proper permissions on the installed files and directories. Parameters: src (str): the directory to install dest (str): the destination directory symlinks (bool): whether or not to preserve symlinks ignore (typing.Callable): function indicating which files to ignore Raises: IOError: if *src* does not match any files or directories ValueError: if *src* is a parent directory of *dest* """ copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True) def is_exe(path): """True if path is an executable file.""" return os.path.isfile(path) and os.access(path, os.X_OK) def get_filetype(path_name): """ Return the output of file path_name as a string to identify file type. """ file = Executable('file') file.add_default_env('LC_ALL', 'C') output = file('-b', '-h', '%s' % path_name, output=str, error=str) return output.strip() def chgrp_if_not_world_writable(path, group): """chgrp path to group if path is not world writable""" mode = os.stat(path).st_mode if not mode & stat.S_IWOTH: chgrp(path, group) def mkdirp(*paths, **kwargs): """Creates a directory, as well as parent directories if needed. Arguments: paths (str): paths to create with mkdirp Keyword Aguments: mode (permission bits or None): optional permissions to set on the created directory -- use OS default if not provided group (group name or None): optional group for permissions of final created directory -- use OS default if not provided. Only used if world write permissions are not set default_perms (str or None): one of 'parents' or 'args'. The default permissions that are set for directories that are not themselves an argument for mkdirp. 'parents' means intermediate directories get the permissions of their direct parent directory, 'args' means intermediate get the same permissions specified in the arguments to mkdirp -- default value is 'args' """ mode = kwargs.get('mode', None) group = kwargs.get('group', None) default_perms = kwargs.get('default_perms', 'args') for path in paths: if not os.path.exists(path): try: # detect missing intermediate folders intermediate_folders = [] last_parent = '' intermediate_path = os.path.dirname(path) while intermediate_path: if os.path.exists(intermediate_path): last_parent = intermediate_path break intermediate_folders.append(intermediate_path) intermediate_path = os.path.dirname(intermediate_path) # create folders os.makedirs(path) # leaf folder permissions if mode is not None: os.chmod(path, mode) if group: chgrp_if_not_world_writable(path, group) if mode is not None: os.chmod(path, mode) # reset sticky grp bit post chgrp # for intermediate folders, change mode just for newly created # ones and if mode_intermediate has been specified, otherwise # intermediate folders list is not populated at all and default # OS mode will be used if default_perms == 'args': intermediate_mode = mode intermediate_group = group elif default_perms == 'parents': stat_info = os.stat(last_parent) intermediate_mode = stat_info.st_mode intermediate_group = stat_info.st_gid else: msg = "Invalid value: '%s'. " % default_perms msg += "Choose from 'args' or 'parents'." raise ValueError(msg) for intermediate_path in reversed(intermediate_folders): if intermediate_mode is not None: os.chmod(intermediate_path, intermediate_mode) if intermediate_group is not None: chgrp_if_not_world_writable(intermediate_path, intermediate_group) os.chmod(intermediate_path, intermediate_mode) # reset sticky bit after except OSError as e: if e.errno != errno.EEXIST or not os.path.isdir(path): raise e elif not os.path.isdir(path): raise OSError(errno.EEXIST, "File already exists", path) def force_remove(*paths): """Remove files without printing errors. Like ``rm -f``, does NOT remove directories.""" for path in paths: try: os.remove(path) except OSError: pass @contextmanager def working_dir(dirname, **kwargs): if kwargs.get('create', False): mkdirp(dirname) orig_dir = os.getcwd() os.chdir(dirname) try: yield finally: os.chdir(orig_dir) class CouldNotRestoreDirectoryBackup(RuntimeError): def __init__(self, inner_exception, outer_exception): self.inner_exception = inner_exception self.outer_exception = outer_exception @contextmanager def replace_directory_transaction(directory_name, tmp_root=None): """Moves a directory to a temporary space. If the operations executed within the context manager don't raise an exception, the directory is deleted. If there is an exception, the move is undone. Args: directory_name (path): absolute path of the directory name tmp_root (path): absolute path of the parent directory where to create the temporary Returns: temporary directory where ``directory_name`` has been moved """ # Check the input is indeed a directory with absolute path. # Raise before anything is done to avoid moving the wrong directory assert os.path.isdir(directory_name), \ 'Invalid directory: ' + directory_name assert os.path.isabs(directory_name), \ '"directory_name" must contain an absolute path: ' + directory_name directory_basename = os.path.basename(directory_name) if tmp_root is not None: assert os.path.isabs(tmp_root) tmp_dir = tempfile.mkdtemp(dir=tmp_root) tty.debug('Temporary directory created [{0}]'.format(tmp_dir)) shutil.move(src=directory_name, dst=tmp_dir) tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, tmp_dir)) try: yield tmp_dir except (Exception, KeyboardInterrupt, SystemExit) as inner_exception: # Try to recover the original directory, if this fails, raise a # composite exception. try: # Delete what was there, before copying back the original content if os.path.exists(directory_name): shutil.rmtree(directory_name) shutil.move( src=os.path.join(tmp_dir, directory_basename), dst=os.path.dirname(directory_name) ) except Exception as outer_exception: raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception) tty.debug('Directory recovered [{0}]'.format(directory_name)) raise else: # Otherwise delete the temporary directory shutil.rmtree(tmp_dir, ignore_errors=True) tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir)) def hash_directory(directory, ignore=[]): """Hashes recursively the content of a directory. Args: directory (path): path to a directory to be hashed Returns: hash of the directory content """ assert os.path.isdir(directory), '"directory" must be a directory!' md5_hash = hashlib.md5() # Adapted from https://stackoverflow.com/a/3431835/771663 for root, dirs, files in os.walk(directory): for name in sorted(files): filename = os.path.join(root, name) if filename not in ignore: # TODO: if caching big files becomes an issue, convert this to # TODO: read in chunks. Currently it's used only for testing # TODO: purposes. with open(filename, 'rb') as f: md5_hash.update(f.read()) return md5_hash.hexdigest() @contextmanager def write_tmp_and_move(filename): """Write to a temporary file, then move into place.""" dirname = os.path.dirname(filename) basename = os.path.basename(filename) tmp = os.path.join(dirname, '.%s.tmp' % basename) with open(tmp, 'w') as f: yield f shutil.move(tmp, filename) @contextmanager def open_if_filename(str_or_file, mode='r'): """Takes either a path or a file object, and opens it if it is a path. If it's a file object, just yields the file object. """ if isinstance(str_or_file, six.string_types): with open(str_or_file, mode) as f: yield f else: yield str_or_file def touch(path): """Creates an empty file at the specified path.""" perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY) fd = None try: fd = os.open(path, perms) os.utime(path, None) finally: if fd is not None: os.close(fd) def touchp(path): """Like ``touch``, but creates any parent directories needed for the file. """ mkdirp(os.path.dirname(path)) touch(path) def force_symlink(src, dest): try: os.symlink(src, dest) except OSError: os.remove(dest) os.symlink(src, dest) def join_path(prefix, *args): path = str(prefix) for elt in args: path = os.path.join(path, str(elt)) return path def ancestor(dir, n=1): """Get the nth ancestor of a directory.""" parent = os.path.abspath(dir) for i in range(n): parent = os.path.dirname(parent) return parent def get_single_file(directory): fnames = os.listdir(directory) if len(fnames) != 1: raise ValueError("Expected exactly 1 file, got {0}" .format(str(len(fnames)))) return fnames[0] @contextmanager def temp_cwd(): tmp_dir = tempfile.mkdtemp() try: with working_dir(tmp_dir): yield tmp_dir finally: shutil.rmtree(tmp_dir) @contextmanager def temp_rename(orig_path, temp_path): same_path = os.path.realpath(orig_path) == os.path.realpath(temp_path) if not same_path: shutil.move(orig_path, temp_path) try: yield finally: if not same_path: shutil.move(temp_path, orig_path) def can_access(file_name): """True if we have read/write access to the file.""" return os.access(file_name, os.R_OK | os.W_OK) def traverse_tree(source_root, dest_root, rel_path='', **kwargs): """Traverse two filesystem trees simultaneously. Walks the LinkTree directory in pre or post order. Yields each file in the source directory with a matching path from the dest directory, along with whether the file is a directory. e.g., for this tree:: root/ a/ file1 file2 b/ file3 When called on dest, this yields:: ('root', 'dest') ('root/a', 'dest/a') ('root/a/file1', 'dest/a/file1') ('root/a/file2', 'dest/a/file2') ('root/b', 'dest/b') ('root/b/file3', 'dest/b/file3') Keyword Arguments: order (str): Whether to do pre- or post-order traversal. Accepted values are 'pre' and 'post' ignore (typing.Callable): function indicating which files to ignore follow_nonexisting (bool): Whether to descend into directories in ``src`` that do not exit in ``dest``. Default is True follow_links (bool): Whether to descend into symlinks in ``src`` """ follow_nonexisting = kwargs.get('follow_nonexisting', True) follow_links = kwargs.get('follow_link', False) # Yield in pre or post order? order = kwargs.get('order', 'pre') if order not in ('pre', 'post'): raise ValueError("Order must be 'pre' or 'post'.") # List of relative paths to ignore under the src root. ignore = kwargs.get('ignore', None) or (lambda filename: False) # Don't descend into ignored directories if ignore(rel_path): return source_path = os.path.join(source_root, rel_path) dest_path = os.path.join(dest_root, rel_path) # preorder yields directories before children if order == 'pre': yield (source_path, dest_path) for f in os.listdir(source_path): source_child = os.path.join(source_path, f) dest_child = os.path.join(dest_path, f) rel_child = os.path.join(rel_path, f) # Treat as a directory # TODO: for symlinks, os.path.isdir looks for the link target. If the # target is relative to the link, then that may not resolve properly # relative to our cwd - see resolve_link_target_relative_to_the_link if os.path.isdir(source_child) and ( follow_links or not os.path.islink(source_child)): # When follow_nonexisting isn't set, don't descend into dirs # in source that do not exist in dest if follow_nonexisting or os.path.exists(dest_child): tuples = traverse_tree( source_root, dest_root, rel_child, **kwargs) for t in tuples: yield t # Treat as a file. elif not ignore(os.path.join(rel_path, f)): yield (source_child, dest_child) if order == 'post': yield (source_path, dest_path) def set_executable(path): mode = os.stat(path).st_mode if mode & stat.S_IRUSR: mode |= stat.S_IXUSR if mode & stat.S_IRGRP: mode |= stat.S_IXGRP if mode & stat.S_IROTH: mode |= stat.S_IXOTH os.chmod(path, mode) def last_modification_time_recursive(path): path = os.path.abspath(path) times = [os.stat(path).st_mtime] times.extend(os.stat(os.path.join(root, name)).st_mtime for root, dirs, files in os.walk(path) for name in dirs + files) return max(times) def remove_empty_directories(root): """Ascend up from the leaves accessible from `root` and remove empty directories. Parameters: root (str): path where to search for empty directories """ for dirpath, subdirs, files in os.walk(root, topdown=False): for sd in subdirs: sdp = os.path.join(dirpath, sd) try: os.rmdir(sdp) except OSError: pass def remove_dead_links(root): """Recursively removes any dead link that is present in root. Parameters: root (str): path where to search for dead links """ for dirpath, subdirs, files in os.walk(root, topdown=False): for f in files: path = join_path(dirpath, f) remove_if_dead_link(path) def remove_if_dead_link(path): """Removes the argument if it is a dead link. Parameters: path (str): The potential dead link """ if os.path.islink(path) and not os.path.exists(path): os.unlink(path) def remove_linked_tree(path): """Removes a directory and its contents. If the directory is a symlink, follows the link and removes the real directory before removing the link. Parameters: path (str): Directory to be removed """ if os.path.exists(path): if os.path.islink(path): shutil.rmtree(os.path.realpath(path), True) os.unlink(path) else: shutil.rmtree(path, True) @contextmanager def safe_remove(*files_or_dirs): """Context manager to remove the files passed as input, but restore them in case any exception is raised in the context block. Args: *files_or_dirs: glob expressions for files or directories to be removed Returns: Dictionary that maps deleted files to their temporary copy within the context block. """ # Find all the files or directories that match glob_matches = [glob.glob(x) for x in files_or_dirs] # Sort them so that shorter paths like "/foo/bar" come before # nested paths like "/foo/bar/baz.yaml". This simplifies the # handling of temporary copies below sorted_matches = sorted([ os.path.abspath(x) for x in itertools.chain(*glob_matches) ], key=len) # Copy files and directories in a temporary location removed, dst_root = {}, tempfile.mkdtemp() try: for id, file_or_dir in enumerate(sorted_matches): # The glob expression at the top ensures that the file/dir exists # at the time we enter the loop. Double check here since it might # happen that a previous iteration of the loop already removed it. # This is the case, for instance, if we remove the directory # "/foo/bar" before the file "/foo/bar/baz.yaml". if not os.path.exists(file_or_dir): continue # The monotonic ID is a simple way to make the filename # or directory name unique in the temporary folder basename = os.path.basename(file_or_dir) + '-{0}'.format(id) temporary_path = os.path.join(dst_root, basename) shutil.move(file_or_dir, temporary_path) removed[file_or_dir] = temporary_path yield removed except BaseException: # Restore the files that were removed for original_path, temporary_path in removed.items(): shutil.move(temporary_path, original_path) raise def fix_darwin_install_name(path): """Fix install name of dynamic libraries on Darwin to have full path. There are two parts of this task: 1. Use ``install_name('-id', ...)`` to change install name of a single lib 2. Use ``install_name('-change', ...)`` to change the cross linking between libs. The function assumes that all libraries are in one folder and currently won't follow subfolders. Parameters: path (str): directory in which .dylib files are located """ libs = glob.glob(join_path(path, "*.dylib")) for lib in libs: # fix install name first: install_name_tool = Executable('install_name_tool') install_name_tool('-id', lib, lib) otool = Executable('otool') long_deps = otool('-L', lib, output=str).split('\n') deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]] # fix all dependencies: for dep in deps: for loc in libs: # We really want to check for either # dep == os.path.basename(loc) or # dep == join_path(builddir, os.path.basename(loc)), # but we don't know builddir (nor how symbolic links look # in builddir). We thus only compare the basenames. if os.path.basename(dep) == os.path.basename(loc): install_name_tool('-change', dep, loc, lib) break def find(root, files, recursive=True): """Search for ``files`` starting from the ``root`` directory. Like GNU/BSD find but written entirely in Python. Examples: .. code-block:: console $ find /usr -name python is equivalent to: >>> find('/usr', 'python') .. code-block:: console $ find /usr/local/bin -maxdepth 1 -name python is equivalent to: >>> find('/usr/local/bin', 'python', recursive=False) Accepts any glob characters accepted by fnmatch: ========== ==================================== Pattern Meaning ========== ==================================== ``*`` matches everything ``?`` matches any single character ``[seq]`` matches any character in ``seq`` ``[!seq]`` matches any character not in ``seq`` ========== ==================================== Parameters: root (str): The root directory to start searching from files (str or Sequence): Library name(s) to search for recursive (bool): if False search only root folder, if True descends top-down from the root. Defaults to True. Returns: list: The files that have been found """ if isinstance(files, six.string_types): files = [files] if recursive: return _find_recursive(root, files) else: return _find_non_recursive(root, files) def _find_recursive(root, search_files): # The variable here is **on purpose** a defaultdict. The idea is that # we want to poke the filesystem as little as possible, but still maintain # stability in the order of the answer. Thus we are recording each library # found in a key, and reconstructing the stable order later. found_files = collections.defaultdict(list) # Make the path absolute to have os.walk also return an absolute path root = os.path.abspath(root) for path, _, list_files in os.walk(root): for search_file in search_files: matches = glob.glob(os.path.join(path, search_file)) matches = [os.path.join(path, x) for x in matches] found_files[search_file].extend(matches) answer = [] for search_file in search_files: answer.extend(found_files[search_file]) return answer def _find_non_recursive(root, search_files): # The variable here is **on purpose** a defaultdict as os.list_dir # can return files in any order (does not preserve stability) found_files = collections.defaultdict(list) # Make the path absolute to have absolute path returned root = os.path.abspath(root) for search_file in search_files: matches = glob.glob(os.path.join(root, search_file)) matches = [os.path.join(root, x) for x in matches] found_files[search_file].extend(matches) answer = [] for search_file in search_files: answer.extend(found_files[search_file]) return answer # Utilities for libraries and headers class FileList(Sequence): """Sequence of absolute paths to files. Provides a few convenience methods to manipulate file paths. """ def __init__(self, files): if isinstance(files, six.string_types): files = [files] self.files = list(dedupe(files)) @property def directories(self): """Stable de-duplication of the directories where the files reside. >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/libc.a']) >>> l.directories ['/dir1', '/dir2'] >>> h = HeaderList(['/dir1/a.h', '/dir1/b.h', '/dir2/c.h']) >>> h.directories ['/dir1', '/dir2'] Returns: list: A list of directories """ return list(dedupe( os.path.dirname(x) for x in self.files if os.path.dirname(x) )) @property def basenames(self): """Stable de-duplication of the base-names in the list >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.a']) >>> l.basenames ['liba.a', 'libb.a'] >>> h = HeaderList(['/dir1/a.h', '/dir2/b.h', '/dir3/a.h']) >>> h.basenames ['a.h', 'b.h'] Returns: list: A list of base-names """ return list(dedupe(os.path.basename(x) for x in self.files)) def __getitem__(self, item): cls = type(self) if isinstance(item, numbers.Integral): return self.files[item] return cls(self.files[item]) def __add__(self, other): return self.__class__(dedupe(self.files + list(other))) def __radd__(self, other): return self.__add__(other) def __eq__(self, other): return self.files == other.files def __len__(self): return len(self.files) def joined(self, separator=' '): return separator.join(self.files) def __repr__(self): return self.__class__.__name__ + '(' + repr(self.files) + ')' def __str__(self): return self.joined() class HeaderList(FileList): """Sequence of absolute paths to headers. Provides a few convenience methods to manipulate header paths and get commonly used compiler flags or names. """ # Make sure to only match complete words, otherwise path components such # as "xinclude" will cause false matches. # Avoid matching paths such as <prefix>/include/something/detail/include, # e.g. in the CUDA Toolkit which ships internal libc++ headers. include_regex = re.compile(r'(.*?)(\binclude\b)(.*)') def __init__(self, files): super(HeaderList, self).__init__(files) self._macro_definitions = [] self._directories = None @property def directories(self): """Directories to be searched for header files.""" values = self._directories if values is None: values = self._default_directories() return list(dedupe(values)) @directories.setter def directories(self, value): value = value or [] # Accept a single directory as input if isinstance(value, six.string_types): value = [value] self._directories = [os.path.normpath(x) for x in value] def _default_directories(self): """Default computation of directories based on the list of header files. """ dir_list = super(HeaderList, self).directories values = [] for d in dir_list: # If the path contains a subdirectory named 'include' then stop # there and don't add anything else to the path. m = self.include_regex.match(d) value = os.path.join(*m.group(1, 2)) if m else d values.append(value) return values @property def headers(self): """Stable de-duplication of the headers. Returns: list: A list of header files """ return self.files @property def names(self): """Stable de-duplication of header names in the list without extensions >>> h = HeaderList(['/dir1/a.h', '/dir2/b.h', '/dir3/a.h']) >>> h.names ['a', 'b'] Returns: list: A list of files without extensions """ names = [] for x in self.basenames: name = x # Valid extensions include: ['.cuh', '.hpp', '.hh', '.h'] for ext in ['.cuh', '.hpp', '.hh', '.h']: i = name.rfind(ext) if i != -1: names.append(name[:i]) break else: # No valid extension, should we still include it? names.append(name) return list(dedupe(names)) @property def include_flags(self): """Include flags >>> h = HeaderList(['/dir1/a.h', '/dir1/b.h', '/dir2/c.h']) >>> h.include_flags '-I/dir1 -I/dir2' Returns: str: A joined list of include flags """ return ' '.join(['-I' + x for x in self.directories]) @property def macro_definitions(self): """Macro definitions >>> h = HeaderList(['/dir1/a.h', '/dir1/b.h', '/dir2/c.h']) >>> h.add_macro('-DBOOST_LIB_NAME=boost_regex') >>> h.add_macro('-DBOOST_DYN_LINK') >>> h.macro_definitions '-DBOOST_LIB_NAME=boost_regex -DBOOST_DYN_LINK' Returns: str: A joined list of macro definitions """ return ' '.join(self._macro_definitions) @property def cpp_flags(self): """Include flags + macro definitions >>> h = HeaderList(['/dir1/a.h', '/dir1/b.h', '/dir2/c.h']) >>> h.cpp_flags '-I/dir1 -I/dir2' >>> h.add_macro('-DBOOST_DYN_LINK') >>> h.cpp_flags '-I/dir1 -I/dir2 -DBOOST_DYN_LINK' Returns: str: A joined list of include flags and macro definitions """ cpp_flags = self.include_flags if self.macro_definitions: cpp_flags += ' ' + self.macro_definitions return cpp_flags def add_macro(self, macro): """Add a macro definition Parameters: macro (str): The macro to add """ self._macro_definitions.append(macro) def find_headers(headers, root, recursive=False): """Returns an iterable object containing a list of full paths to headers if found. Accepts any glob characters accepted by fnmatch: ======= ==================================== Pattern Meaning ======= ==================================== * matches everything ? matches any single character [seq] matches any character in ``seq`` [!seq] matches any character not in ``seq`` ======= ==================================== Parameters: headers (str or list): Header name(s) to search for root (str): The root directory to start searching from recursive (bool): if False search only root folder, if True descends top-down from the root. Defaults to False. Returns: HeaderList: The headers that have been found """ if isinstance(headers, six.string_types): headers = [headers] elif not isinstance(headers, Sequence): message = '{0} expects a string or sequence of strings as the ' message += 'first argument [got {1} instead]' message = message.format(find_headers.__name__, type(headers)) raise TypeError(message) # Construct the right suffix for the headers suffixes = [ # C 'h', # C++ 'hpp', 'hxx', 'hh', 'H', 'txx', 'tcc', 'icc', # Fortran 'mod', 'inc', ] # List of headers we are searching with suffixes headers = ['{0}.{1}'.format(header, suffix) for header in headers for suffix in suffixes] return HeaderList(find(root, headers, recursive)) def find_all_headers(root): """Convenience function that returns the list of all headers found in the directory passed as argument. Args: root (str): directory where to look recursively for header files Returns: List of all headers found in ``root`` and subdirectories. """ return find_headers('*', root=root, recursive=True) class LibraryList(FileList): """Sequence of absolute paths to libraries Provides a few convenience methods to manipulate library paths and get commonly used compiler flags or names """ @property def libraries(self): """Stable de-duplication of library files. Returns: list: A list of library files """ return self.files @property def names(self): """Stable de-duplication of library names in the list >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.so']) >>> l.names ['a', 'b'] Returns: list: A list of library names """ names = [] for x in self.basenames: name = x if x.startswith('lib'): name = x[3:] # Valid extensions include: ['.dylib', '.so', '.a'] for ext in ['.dylib', '.so', '.a']: i = name.rfind(ext) if i != -1: names.append(name[:i]) break else: # No valid extension, should we still include it? names.append(name) return list(dedupe(names)) @property def search_flags(self): """Search flags for the libraries >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so']) >>> l.search_flags '-L/dir1 -L/dir2' Returns: str: A joined list of search flags """ return ' '.join(['-L' + x for x in self.directories]) @property def link_flags(self): """Link flags for the libraries >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so']) >>> l.link_flags '-la -lb' Returns: str: A joined list of link flags """ return ' '.join(['-l' + name for name in self.names]) @property def ld_flags(self): """Search flags + link flags >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so']) >>> l.ld_flags '-L/dir1 -L/dir2 -la -lb' Returns: str: A joined list of search flags and link flags """ return self.search_flags + ' ' + self.link_flags def find_system_libraries(libraries, shared=True): """Searches the usual system library locations for ``libraries``. Search order is as follows: 1. ``/lib64`` 2. ``/lib`` 3. ``/usr/lib64`` 4. ``/usr/lib`` 5. ``/usr/local/lib64`` 6. ``/usr/local/lib`` Accepts any glob characters accepted by fnmatch: ======= ==================================== Pattern Meaning ======= ==================================== * matches everything ? matches any single character [seq] matches any character in ``seq`` [!seq] matches any character not in ``seq`` ======= ==================================== Parameters: libraries (str or list): Library name(s) to search for shared (bool): if True searches for shared libraries, otherwise for static. Defaults to True. Returns: LibraryList: The libraries that have been found """ if isinstance(libraries, six.string_types): libraries = [libraries] elif not isinstance(libraries, Sequence): message = '{0} expects a string or sequence of strings as the ' message += 'first argument [got {1} instead]' message = message.format(find_system_libraries.__name__, type(libraries)) raise TypeError(message) libraries_found = [] search_locations = [ '/lib64', '/lib', '/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib', ] for library in libraries: for root in search_locations: result = find_libraries(library, root, shared, recursive=True) if result: libraries_found += result break return libraries_found def find_libraries(libraries, root, shared=True, recursive=False): """Returns an iterable of full paths to libraries found in a root dir. Accepts any glob characters accepted by fnmatch: ======= ==================================== Pattern Meaning ======= ==================================== * matches everything ? matches any single character [seq] matches any character in ``seq`` [!seq] matches any character not in ``seq`` ======= ==================================== Parameters: libraries (str or list): Library name(s) to search for root (str): The root directory to start searching from shared (bool): if True searches for shared libraries, otherwise for static. Defaults to True. recursive (bool): if False search only root folder, if True descends top-down from the root. Defaults to False. Returns: LibraryList: The libraries that have been found """ if isinstance(libraries, six.string_types): libraries = [libraries] elif not isinstance(libraries, Sequence): message = '{0} expects a string or sequence of strings as the ' message += 'first argument [got {1} instead]' message = message.format(find_libraries.__name__, type(libraries)) raise TypeError(message) # Construct the right suffix for the library if shared: # Used on both Linux and macOS suffixes = ['so'] if sys.platform == 'darwin': # Only used on macOS suffixes.append('dylib') else: suffixes = ['a'] # List of libraries we are searching with suffixes libraries = ['{0}.{1}'.format(lib, suffix) for lib in libraries for suffix in suffixes] if not recursive: # If not recursive, look for the libraries directly in root return LibraryList(find(root, libraries, False)) # To speedup the search for external packages configured e.g. in /usr, # perform first non-recursive search in root/lib then in root/lib64 and # finally search all of root recursively. The search stops when the first # match is found. for subdir in ('lib', 'lib64'): dirname = join_path(root, subdir) if not os.path.isdir(dirname): continue found_libs = find(dirname, libraries, False) if found_libs: break else: found_libs = find(root, libraries, True) return LibraryList(found_libs) @memoized def can_access_dir(path): """Returns True if the argument is an accessible directory. Args: path: path to be tested Returns: True if ``path`` is an accessible directory, else False """ return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK) @memoized def can_write_to_dir(path): """Return True if the argument is a directory in which we can write. Args: path: path to be tested Returns: True if ``path`` is an writeable directory, else False """ return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK | os.W_OK) @memoized def files_in(*search_paths): """Returns all the files in paths passed as arguments. Caller must ensure that each path in ``search_paths`` is a directory. Args: *search_paths: directories to be searched Returns: List of (file, full_path) tuples with all the files found. """ files = [] for d in filter(can_access_dir, search_paths): files.extend(filter( lambda x: os.path.isfile(x[1]), [(f, os.path.join(d, f)) for f in os.listdir(d)] )) return files def search_paths_for_executables(*path_hints): """Given a list of path hints returns a list of paths where to search for an executable. Args: *path_hints (list of paths): list of paths taken into consideration for a search Returns: A list containing the real path of every existing directory in `path_hints` and its `bin` subdirectory if it exists. """ executable_paths = [] for path in path_hints: if not os.path.isdir(path): continue path = os.path.abspath(path) executable_paths.append(path) bin_dir = os.path.join(path, 'bin') if os.path.isdir(bin_dir): executable_paths.append(bin_dir) return executable_paths def partition_path(path, entry=None): """ Split the prefixes of the path at the first occurrence of entry and return a 3-tuple containing a list of the prefixes before the entry, a string of the prefix ending with the entry, and a list of the prefixes after the entry. If the entry is not a node in the path, the result will be the prefix list followed by an empty string and an empty list. """ paths = prefixes(path) if entry is not None: # Derive the index of entry within paths, which will correspond to # the location of the entry in within the path. try: entries = path.split(os.sep) i = entries.index(entry) if '' in entries: i -= 1 return paths[:i], paths[i], paths[i + 1:] except ValueError: pass return paths, '', [] def prefixes(path): """ Returns a list containing the path and its ancestors, top-to-bottom. The list for an absolute path will not include an ``os.sep`` entry. For example, assuming ``os.sep`` is ``/``, given path ``/ab/cd/efg`` the resulting paths will be, in order: ``/ab``, ``/ab/cd``, and ``/ab/cd/efg`` The list for a relative path starting ``./`` will not include ``.``. For example, path ``./hi/jkl/mn`` results in a list with the following paths, in order: ``./hi``, ``./hi/jkl``, and ``./hi/jkl/mn``. Parameters: path (str): the string used to derive ancestor paths Returns: A list containing ancestor paths in order and ending with the path """ if not path: return [] parts = path.strip(os.sep).split(os.sep) if path.startswith(os.sep): parts.insert(0, os.sep) paths = [os.path.join(*parts[:i + 1]) for i in range(len(parts))] try: paths.remove(os.sep) except ValueError: pass try: paths.remove('.') except ValueError: pass return paths def md5sum(file): """Compute the MD5 sum of a file. Args: file (str): file to be checksummed Returns: MD5 sum of the file's content """ md5 = hashlib.md5() with open(file, "rb") as f: md5.update(f.read()) return md5.digest() def remove_directory_contents(dir): """Remove all contents of a directory.""" if os.path.exists(dir): for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]: if os.path.isfile(entry) or os.path.islink(entry): os.unlink(entry) else: shutil.rmtree(entry) @contextmanager def keep_modification_time(*filenames): """ Context manager to keep the modification timestamps of the input files. Tolerates and has no effect on non-existent files and files that are deleted by the nested code. Parameters: *filenames: one or more files that must have their modification timestamps unchanged """ mtimes = {} for f in filenames: if os.path.exists(f): mtimes[f] = os.path.getmtime(f) yield for f, mtime in mtimes.items(): if os.path.exists(f): os.utime(f, (os.path.getatime(f), mtime)) @contextmanager def temporary_dir(*args, **kwargs): """Create a temporary directory and cd's into it. Delete the directory on exit. Takes the same arguments as tempfile.mkdtemp() """ tmp_dir = tempfile.mkdtemp(*args, **kwargs) try: with working_dir(tmp_dir): yield tmp_dir finally: remove_directory_contents(tmp_dir)
player1537-forks/spack
var/spack/repos/builtin/packages/blitz/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Blitz(AutotoolsPackage): """N-dimensional arrays for C++""" homepage = "https://github.com/blitzpp/blitz" url = "https://github.com/blitzpp/blitz/archive/1.0.2.tar.gz" version('1.0.2', sha256='500db9c3b2617e1f03d0e548977aec10d36811ba1c43bb5ef250c0e3853ae1c2') version('1.0.1', sha256='b62fc3f07b64b264307b01fec5e4f2793e09a68dcb5378984aedbc2e4b3adcef') version('1.0.0', sha256='79c06ea9a0585ba0e290c8140300e3ad19491c45c1d90feb52819abc3b58a0c1') depends_on('python@:2.7', type='build', when='@:1.0.1') depends_on('python@3:', type='build', when='@1.0.2:') build_targets = ['lib'] def check(self): make('check-testsuite') make('check-examples')
player1537-forks/spack
var/spack/repos/builtin/packages/r-spatstat-linnet/package.py
<filename>var/spack/repos/builtin/packages/r-spatstat-linnet/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RSpatstatLinnet(RPackage): """Linear Networks Functionality of the 'spatstat' Family. Defines types of spatial data on a linear network and provides functionality for geometrical operations, data analysis and modelling of data on a linear network, in the 'spatstat' family of packages. Contains definitions and support for linear networks, including creation of networks, geometrical measurements, topological connectivity, geometrical operations such as inserting and deleting vertices, intersecting a network with another object, and interactive editing of networks. Data types defined on a network include point patterns, pixel images, functions, and tessellations. Exploratory methods include kernel estimation of intensity on a network, K-functions and pair correlation functions on a network, simulation envelopes, nearest neighbour distance and empty space distance, relative risk estimation with cross-validated bandwidth selection. Formal hypothesis tests of random pattern (chi-squared, Kolmogorov-Smirnov, Monte Carlo, Diggle-Cressie-Loosmore-Ford, Dao-Genton, two-stage Monte Carlo) and tests for covariate effects (Cox-Berman-Waller-Lawson, Kolmogorov-Smirnov, ANOVA) are also supported. Parametric models can be fitted to point pattern data using the function lppm() similar to glm(). Only Poisson models are implemented so far. Models may involve dependence on covariates and dependence on marks. Models are fitted by maximum likelihood. Fitted point process models can be simulated, automatically. Formal hypothesis tests of a fitted model are supported (likelihood ratio test, analysis of deviance, Monte Carlo tests) along with basic tools for model selection (stepwise(), AIC()) and variable selection (sdr). Tools for validating the fitted model include simulation envelopes, residuals, residual plots and Q-Q plots, leverage and influence diagnostics, partial residuals, and added variable plots. Random point patterns on a network can be generated using a variety of models.""" cran = "spatstat.linnet" version('2.3-1', sha256='119ba6e3da651aa9594f70a7a35349209534215aa640c2653aeddc6aa25038c3') depends_on('r@3.5.0:', type=('build', 'run')) depends_on('r-spatstat-data@2.1-0:', type=('build', 'run')) depends_on('r-spatstat-geom@2.3-0:', type=('build', 'run')) depends_on('r-spatstat-core@2.3-0:', type=('build', 'run')) depends_on('r-spatstat-utils@2.2-0:', type=('build', 'run')) depends_on('r-matrix', type=('build', 'run')) depends_on('r-spatstat-sparse@2.0:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/py-brotlipy/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyBrotlipy(PythonPackage): """Python binding to the Brotli library.""" homepage = "https://github.com/python-hyper/brotlipy/" pypi = "brotlipy/brotlipy-0.7.0.tar.gz" version('0.7.0', sha256='36def0b859beaf21910157b4c33eb3b06d8ce459c942102f16988cca6ea164df') depends_on('py-setuptools', type='build') depends_on('py-cffi@1.0.0:', type=('build', 'run')) depends_on('py-enum34@1.0.4:1', when='^python@:3.3', type=('build', 'run')) # TODO: Builds against internal copy of headers, doesn't seem to be a way # to use external brotli installation # depends_on('brotli')
player1537-forks/spack
var/spack/repos/builtin/packages/genomefinisher/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os from spack import * class Genomefinisher(Package): """GFinisher is an application tools for refinement and finalization of prokaryotic genomes assemblies using the bias of GC Skew to identify assembly errors and organizes the contigs/scaffolds with genomes references.""" homepage = "http://gfinisher.sourceforge.net" url = "https://sourceforge.net/projects/gfinisher/files/GenomeFinisher_1.4.zip" version('1.4', sha256='8efbebaab4b577c72193f14c2c362b96fb949981fd66d2cca1364839af8bf1e3') depends_on('java@8:', type='run') def install(self, spec, prefix): mkdirp(prefix.bin) jar_file = 'GenomeFinisher.jar' install(jar_file, prefix.bin) install_tree('lib', prefix.lib) # Set up a helper script to call java on the jar file, # explicitly codes the path for java and the jar file. script_sh = join_path(os.path.dirname(__file__), "genomefinisher.sh") script = prefix.bin.genomefinisher install(script_sh, script) set_executable(script) # Munge the helper script to explicitly point to java and the jar file # jar file. java = spec['java'].prefix.bin.java kwargs = {'ignore_absent': False, 'backup': False, 'string': False} filter_file('^java', java, script, **kwargs) filter_file(jar_file, join_path(prefix.bin, jar_file), script, **kwargs)
player1537-forks/spack
var/spack/repos/builtin/packages/stringtie/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Stringtie(MakefilePackage): """StringTie is a fast and highly efficient assembler of RNA-Seq alignments into potential transcripts.""" homepage = "https://ccb.jhu.edu/software/stringtie" url = "https://github.com/gpertea/stringtie/archive/v1.3.3b.tar.gz" version('1.3.4d', sha256='0134c0adc264efd31a1df4301b33bfcf3b3fe96bd3990ce3df90819bad9af968') version('1.3.4a', sha256='6164a5fa9bf8807ef68ec89f47e3a61fe57fa07fe858f52fb6627f705bf71add') version('1.3.3b', sha256='30e8a3a29b474f0abeef1540d9b4624a827d8b29d7347226d86a38afea28bc0f') depends_on('samtools') def install(self, spec, prefix): mkdirp(prefix.bin) install('stringtie', prefix.bin)
player1537-forks/spack
var/spack/repos/builtin/packages/libmetalink/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Libmetalink(AutotoolsPackage): """Libmetalink is a library to read Metalink XML download description format. It supports both Metalink version 3 and Metalink version 4 (RFC 5854).""" homepage = "https://launchpad.net/libmetalink" url = "https://github.com/metalink-dev/libmetalink/archive/release-0.1.3.tar.gz" version('0.1.3', sha256='7469c4a64b9dd78c3f23fa575fe001bbfd548c181492a2067b59609872122d7a') version('0.1.2', sha256='64af0979c11658f7a1659ca97ebc3c7bac8104253bf504015ac3b9c363382bae') version('0.1.1', sha256='e9b8dff68b0b999884c21f68d9b1cc0c1993270e3e1f639f82e27b1eb960cb66') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build') depends_on('pkgconfig', type='build') depends_on('expat@2.1.0:') depends_on('libxml2@2.7.8:')
player1537-forks/spack
var/spack/repos/builtin/packages/r-mlinterfaces/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RMlinterfaces(RPackage): """Uniform interfaces to R machine learning procedures for data in Bioconductor containers. This package provides uniform interfaces to machine learning code for data in R and Bioconductor containers.""" bioc = "MLInterfaces" version('1.74.0', commit='5ee73b6491b1d68d7b49ddce6483df98ad880946') version('1.70.0', commit='7b076c3e85314dd5fd5bd8a98e8123d08d9acd3b') version('1.64.1', commit='<KEY>') version('1.62.1', commit='<KEY>') version('1.60.1', commit='019e9ed44923e5d845a4800246aa044ddd59d548') version('1.58.1', commit='4e2b5efa019fcb677dc82a58a1668c8a00cdfe07') version('1.56.0', commit='<KEY>') depends_on('r@2.9:', type=('build', 'run')) depends_on('r@3.5:', type=('build', 'run'), when='@1.60.1:') depends_on('r-rcpp', type=('build', 'run'), when='@1.70.0:') depends_on('r-biocgenerics@0.13.11:', type=('build', 'run')) depends_on('r-biobase', type=('build', 'run')) depends_on('r-annotate', type=('build', 'run')) depends_on('r-cluster', type=('build', 'run')) depends_on('r-gdata', type=('build', 'run')) depends_on('r-pls', type=('build', 'run')) depends_on('r-sfsmisc', type=('build', 'run')) depends_on('r-mass', type=('build', 'run')) depends_on('r-rpart', type=('build', 'run')) depends_on('r-genefilter', type=('build', 'run')) depends_on('r-fpc', type=('build', 'run')) depends_on('r-ggvis', type=('build', 'run')) depends_on('r-shiny', type=('build', 'run')) depends_on('r-gbm', type=('build', 'run')) depends_on('r-rcolorbrewer', type=('build', 'run')) depends_on('r-hwriter', type=('build', 'run')) depends_on('r-threejs@0.2.2:', type=('build', 'run')) depends_on('r-mlbench', type=('build', 'run')) depends_on('r-magrittr', type=('build', 'run'), when='@1.74.0:') depends_on('r-rda', type=('build', 'run'), when='@:1.64.1')
player1537-forks/spack
var/spack/repos/builtin/packages/r-multcomp/package.py
<filename>var/spack/repos/builtin/packages/r-multcomp/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RMultcomp(RPackage): """Simultaneous Inference in General Parametric Models. Simultaneous tests and confidence intervals for general linear hypotheses in parametric models, including linear, generalized linear, linear mixed effects, and survival models. The package includes demos reproducing analyzes presented in the book "Multiple Comparisons Using R" (Bretz, Hothorn, Westfall, 2010, CRC Press).""" cran = "multcomp" version('1.4-18', sha256='107a5e65cfff158b271d7386240dc8672d8cf45313f016e0ed83767faf7c2806') version('1.4-15', sha256='9927607efb3eb84ac3d25d82daf2faef6a69e05a334b163ce43fd31c14b19bce') version('1.4-10', sha256='29bcc635c0262e304551b139cd9ee655ab25a908d9693e1cacabfc2a936df5cf') version('1.4-8', sha256='a20876619312310e9523d67e9090af501383ce49dc6113c6b4ca30f9c943a73a') version('1.4-6', sha256='fe9efbe671416a49819cbdb9137cc218faebcd76e0f170fd1c8d3c84c42eeda2') depends_on('r-mvtnorm@1.0-10:', type=('build', 'run')) depends_on('r-survival@2.39-4:', type=('build', 'run')) depends_on('r-th-data@1.0-2:', type=('build', 'run')) depends_on('r-sandwich@2.3-0:', type=('build', 'run')) depends_on('r-codetools', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/seqprep/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Seqprep(MakefilePackage): """SeqPrep is a program to merge paired end Illumina reads that are overlapping into a single longer read.""" homepage = "https://github.com/jstjohn/SeqPrep" url = "https://github.com/jstjohn/SeqPrep/archive/v1.3.2.tar.gz" version('1.3.2', sha256='2b8a462a0e0a3e51f70be7730dc77b1f2bb69e74845dd0fbd2110a921c32265a') depends_on('zlib', type='link') def install(self, spec, prefix): mkdirp(prefix.bin) install('SeqPrep', prefix.bin)
player1537-forks/spack
var/spack/repos/builtin.mock/packages/v1-provider/package.py
<filename>var/spack/repos/builtin.mock/packages/v1-provider/package.py<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class V1Provider(Package): """Mimic the real netlib-lapack, that may be built on top of an optimized blas. """ homepage = "https://dev.null" version('1.0') provides('v1')
player1537-forks/spack
var/spack/repos/builtin/packages/r-hypergraph/package.py
<filename>var/spack/repos/builtin/packages/r-hypergraph/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RHypergraph(RPackage): """A package providing hypergraph data structures. A package that implements some simple capabilities for representing and manipulating hypergraphs.""" bioc = "hypergraph" version('1.66.0', commit='e9c47336df6409006622818f541f258103163a39') version('1.62.0', commit='<PASSWORD>') version('1.56.0', commit='<KEY>') version('1.54.0', commit='<KEY>') version('1.52.0', commit='<PASSWORD>') version('1.50.0', commit='<KEY>5') version('1.48.0', commit='a4c19ea0b5f15204f706a7bfdea5363706382820') depends_on('r@2.1.0:', type=('build', 'run')) depends_on('r-graph', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/mpas-model/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class MpasModel(MakefilePackage): """The Model for Prediction Across Scales (MPAS) is a collaborative project for developing atmosphere, ocean and other earth-system simulation components for use in climate, regional climate and weather studies.""" homepage = "https://mpas-dev.github.io/" url = "https://github.com/MPAS-Dev/MPAS-Model/archive/v7.0.tar.gz" maintainers = ['t-brown'] version('7.1', sha256='9b5c181b7d0163ae33d24d7a79ede6990495134b58cf4500ba5c8c94192102bc') version('7.0', sha256='f898ce257e66cff9e29320458870570e55721d16cb000de7f2cc27de7fdef14f') version('6.3', sha256='e7f1d9ebfeb6ada37d42a286aaedb2e69335cbc857049dc5c5544bb51e7a8db8') version('6.2', sha256='2a81825a62a468bf5c56ef9d9677aa2eb88acf78d4f996cb49a7db98b94a6b16') depends_on('mpi') depends_on('parallelio') patch('makefile.patch', when='@7.0') parallel = False resource(when='@6.2:6.3', name='MPAS-Data', git='https://github.com/MPAS-Dev/MPAS-Data.git', commit='33561790de8b43087ab850be833f51a4e605f1bb') resource(when='@7.0:', name='MPAS-Data', git='https://github.com/MPAS-Dev/MPAS-Data.git', tag='v7.0') def target(self, model, action): spec = self.spec satisfies = spec.satisfies fflags = [self.compiler.openmp_flag] cppflags = ['-D_MPI'] if satisfies('%gcc'): fflags.extend([ '-ffree-line-length-none', '-fconvert=big-endian', '-ffree-form', '-fdefault-real-8', '-fdefault-double-8', ]) cppflags.append('-DUNDERSCORE') elif satisfies('%fj'): fflags.extend([ '-Free', '-Fwide', '-CcdRR8', ]) elif satisfies('%intel'): fflags.extend([ '-r8', '-convert big_endian', '-FR', ]) cppflags.append('-DUNDERSCORE') targets = [ 'FC_PARALLEL={0}'.format(spec['mpi'].mpifc), 'CC_PARALLEL={0}'.format(spec['mpi'].mpicc), 'CXX_PARALLEL={0}'.format(spec['mpi'].mpicxx), 'FC_SERIAL={0}'.format(spack_fc), 'CC_SERIAL={0}'.format(spack_cc), 'CXX_SERIAL={0}'.format(spack_cxx), 'CFLAGS_OMP={0}'.format(self.compiler.openmp_flag), 'FFLAGS_OMP={0}'.format(' '.join(fflags)), 'CPPFLAGS={0}'.format(' '.join(cppflags)), 'PIO={0}'.format(spec['parallelio'].prefix), 'NETCDF={0}'.format(spec['netcdf-c'].prefix), 'NETCDFF={0}'.format(spec['netcdf-fortran'].prefix) ] if satisfies('^parallelio+pnetcdf'): targets.append( 'PNETCDF={0}'.format(spec['parallel-netcdf'].prefix) ) targets.extend([ 'USE_PIO2=true', 'CPP_FLAGS=-D_MPI', 'OPENMP=true', 'CORE={0}'.format(model), action ]) return targets def build(self, spec, prefix): copy_tree(join_path('MPAS-Data', 'atmosphere'), join_path('src', 'core_atmosphere', 'physics')) make(*self.target('init_atmosphere', 'all')) mkdir('bin') copy('init_atmosphere_model', 'bin') make(*self.target('init_atmosphere', 'clean')) make(*self.target('atmosphere', 'all')) copy('atmosphere_model', 'bin') def install(self, spec, prefix): install_tree('bin', prefix.bin)
player1537-forks/spack
var/spack/repos/builtin/packages/libndp/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Libndp(AutotoolsPackage): """Libndp - Library for Neighbor Discovery Protocol""" homepage = "http://www.libndp.org/" url = "https://github.com/jpirko/libndp/archive/v1.7.tar.gz" version('1.7', sha256='44be73630ee785ed9f571f9aaaeba0e1d375fa337fd841270034c813b5b0e6fd') version('1.6', sha256='565d6c4167f83ec697c762ea002f23e8f0b00828d0749b1ce928f068543e5aad') version('1.5', sha256='42c0a8938d4302c72a42e2d954deef7e4903bb3974da6804a929a3cd0b5b6aa7') version('1.4', sha256='b9b23d14e9b2d87745810d9d0e956e9fb45f44e794b1629492850c5a8fbbb083') version('1.3', sha256='e933dc1b9ce85089de8ba0f6ba4c3ec47eba0e9a404e14c1789a6fa9e23793f6') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') depends_on('m4', type='build')
player1537-forks/spack
lib/spack/spack/analyzers/environment_variables.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """An environment analyzer will read and parse the environment variables file in the installed package directory, generating a json file that has an index of key, value pairs for environment variables.""" import os import llnl.util.tty as tty from spack.util.environment import EnvironmentModifications from .analyzer_base import AnalyzerBase class EnvironmentVariables(AnalyzerBase): name = "environment_variables" outfile = "spack-analyzer-environment-variables.json" description = "environment variables parsed from spack-build-env.txt" def run(self): """ Load, parse, and save spack-build-env.txt to analyzers. Read in the spack-build-env.txt file from the package install directory and parse the environment variables into key value pairs. The result should have the key for the analyzer, the name. """ env_file = os.path.join(self.meta_dir, "spack-build-env.txt") return {self.name: self._read_environment_file(env_file)} def _read_environment_file(self, filename): """ Read and parse the environment file. Given an environment file, we want to read it, split by semicolons and new lines, and then parse down to the subset of SPACK_* variables. We assume that all spack prefix variables are not secrets, and unlike the install_manifest.json, we don't (at least to start) parse the values to remove path prefixes specific to user systems. """ if not os.path.exists(filename): tty.warn("No environment file available") return mods = EnvironmentModifications.from_sourcing_file(filename) env = {} mods.apply_modifications(env) return env
player1537-forks/spack
var/spack/repos/builtin/packages/xfsprogs/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Xfsprogs(AutotoolsPackage): """XFS User Tools.""" homepage = "https://github.com/mtanski/xfsprogs" url = "http://kernel.org/pub/linux/utils/fs/xfs/xfsprogs/xfsprogs-4.17.0.tar.xz" version('5.11.0', sha256='0e9c390fcdbb8a79e1b8f5e6e25fd529fc9f9c2ef8f2d5e647b3556b82d1b353') version('5.8.0', sha256='8ef46ed9e6bb927f407f541dc4324857c908ddf1374265edc910d23724048c6b') version('5.7.0', sha256='8f2348a68a686a3f4491dda5d62dd32d885fbc52d32875edd41e2c296e7b4f35') version('5.6.0', sha256='0aba2aac5d80d07646dde868437fc337af2c7326edadcc6d6a7c0bfd3190c1e6') version('4.20.0', sha256='beafdfd080352a8c9d543491e0874d0e8809cb643a3b9d352d5feed38d77022a') depends_on('libinih') depends_on('gettext') depends_on('uuid') depends_on('util-linux') def flag_handler(self, name, flags): iflags = [] if name == 'cflags': if self.spec.satisfies('@:5.4.0 %gcc@10:'): iflags.append('-fcommon') return (iflags, None, flags) def setup_build_environment(self, env): env.append_path('C_INCLUDE_PATH', self.spec['util-linux'].prefix.include.blkid) def configure_args(self): args = ['LDFLAGS=-lintl', "--with-systemd-unit-dir=" + self.spec['xfsprogs'].prefix.lib.systemd.system] return args def install(self, spec, prefix): make('install') make('install-dev') def setup_run_environment(self, env): env.prepend_path('PATH', self.prefix.sbin)
player1537-forks/spack
var/spack/repos/builtin/packages/hmmer/package.py
<filename>var/spack/repos/builtin/packages/hmmer/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Hmmer(Package): """HMMER is used for searching sequence databases for sequence homologs, and for making sequence alignments. It implements methods using probabilistic models called profile hidden Markov models (profile HMMs). """ homepage = 'http://www.hmmer.org' url = 'http://eddylab.org/software/hmmer/hmmer-3.3.tar.gz' version('3.3.2', sha256='92fee9b5efe37a5276352d3502775e7c46e9f7a0ee45a331eacb2a0cac713c69') version('3.3', sha256='0186bf40af67032666014971ed8ddc3cf2834bebc2be5b3bc0304a93e763736c') version('3.2.1', sha256='a56129f9d786ec25265774519fc4e736bbc16e4076946dcbd7f2c16efc8e2b9c') version('3.1b2', sha256='dd16edf4385c1df072c9e2f58c16ee1872d855a018a2ee6894205277017b5536') version('3.0', sha256='6977e6473fcb554b1d5a86dc9edffffa53918c1bd88d7fd20d7499f1ba719e83') version('2.4i', sha256='73cb85c2197017fa7a25482556ed250bdeed256974b99b0c25e02854e710a886') version('2.3.2', sha256='d20e1779fcdff34ab4e986ea74a6c4ac5c5f01da2993b14e92c94d2f076828b4') version('2.3.1', sha256='3956d53af8de5bb99eec18cba0628e86924c6543639d290293b6677a9224ea3f') variant('mpi', default=True, description='Compile with MPI') variant('gsl', default=False, description='Compile with GSL') depends_on('mpi', when='+mpi') depends_on('gsl', when='+gsl') def install(self, spec, prefix): configure_args = [ '--prefix={0}'.format(prefix) ] if '+gsl' in self.spec: configure_args.extend([ '--with-gsl', 'LIBS=-lgsl -lgslcblas' ]) if '+mpi' in self.spec: configure_args.append('--enable-mpi') configure(*configure_args) make() if self.run_tests: make('check') make('install')
player1537-forks/spack
var/spack/repos/builtin/packages/boinc-client/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class BoincClient(AutotoolsPackage): """BOINC is a platform for high-throughput computing on a large scale (thousands or millions of computers). It can be used for volunteer computing (using consumer devices) or grid computing (using organizational resources). It supports virtualized, parallel, and GPU-based applications.""" homepage = "https://boinc.berkeley.edu/" url = "https://github.com/BOINC/boinc/archive/client_release/7.16/7.16.5.tar.gz" version('7.16.5', sha256='33db60991b253e717c6124cce4750ae7729eaab4e54ec718b9e37f87012d668a') variant('manager', default=False, description='Builds the client manager') variant('graphics', default=False, description='Graphic apps support') # Dependency documentation: # https://boinc.berkeley.edu/trac/wiki/SoftwarePrereqsUnix conflicts('%gcc@:3.0.4') depends_on('autoconf@2.58:', type='build') depends_on('automake@1.8:', type='build') depends_on('libtool@1.5:', type='build') depends_on('m4@1.4:', type='build') depends_on('curl@7.17.1:') depends_on('openssl@0.9.8:') depends_on('freeglut@3:', when='+graphics') depends_on('libsm', when='+graphics') depends_on('libice', when='+graphics') depends_on('libxmu', when='+graphics') depends_on('libxi', when='+graphics') depends_on('libx11', when='+graphics') depends_on('libjpeg', when='+graphics') depends_on('wxwidgets@3.0.0:', when='+manager') depends_on('libnotify', when='+manager') depends_on('sqlite@3.1:', when='+manager') patch('systemd-fix.patch') def configure_args(self): spec = self.spec args = [] args.append("--disable-server") args.append("--enable-client") if '+manager' in spec: args.append('--enable-manager') else: args.append('--disable-manager') return args
player1537-forks/spack
var/spack/repos/builtin/packages/fakexrandr/package.py
<filename>var/spack/repos/builtin/packages/fakexrandr/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Fakexrandr(MakefilePackage): """This is a tool to cheat an X11 server to believe that there are more monitors than there actually are.""" homepage = "https://github.com/phillipberndt/fakexrandr" git = "https://github.com/phillipberndt/fakexrandr.git" version('master', branch='master') depends_on('libxrandr') depends_on('libxinerama') depends_on('libx11') depends_on('python', type=('build', 'run')) def edit(self, spec, prefix): # Installation instructions involve running `configure` script, # but this script just writes a `config.h` file like below. version = spec['libxrandr'].version with open('config.h', 'w') as config: config.write(""" #define XRANDR_MAJOR {0} #define XRANDR_MINOR {1} #define XRANDR_PATCH {2} #define REAL_XRANDR_LIB "{3}" #define FAKEXRANDR_INSTALL_DIR "{4}" """.format(version[0], version[1], version[2], spec['libxrandr'].libs[0], prefix.lib)) # Also need to hack Makefile makefile = FileFilter('Makefile') makefile.filter('PREFIX=/usr', 'PREFIX=' + prefix) makefile.filter('-fPIC', self.compiler.cc_pic_flag) # And tool used to generate skeleton filter_file('gcc', spack_cc, 'make_skeleton.py') if 'platform=darwin' in spec: makefile.filter('ldconfig', '')
player1537-forks/spack
var/spack/repos/builtin/packages/uftrace/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import re from spack import * class Uftrace(AutotoolsPackage): """Dynamic function graph tracer for Linux which demangles C, C++ and Rust calls""" homepage = 'https://uftrace.github.io/slide/' url = 'https://github.com/namhyung/uftrace/archive/v0.11.tar.gz' git = 'https://github.com/namhyung/uftrace.git' executables = ['^uftrace$'] maintainers = ['bernhardkaindl'] tags = ['trace-tools'] # The build process uses 'git describe --tags' to get the package version version('master', branch='master', get_full_repo=True) version('0.11', sha256='101dbb13cb3320ee76525ec26426f2aa1de4e3ee5af74f79cb403ae4d2c6c871') version('0.10', sha256='b8b56d540ea95c3eafe56440d6a998e0a140d53ca2584916b6ca82702795bbd9') variant("doc", default=False, description="Build uftrace's documentation") variant("python2", default=False, description="Build uftrace with python2 support") variant("python3", default=True, description="Build uftrace with python3 support") depends_on('pandoc', when="+doc", type='build') depends_on('capstone') depends_on('elfutils') depends_on('lsof', type='test') depends_on('pkgconfig', type='build') depends_on('libunwind') depends_on('ncurses') depends_on('python@2.7:', when='+python2') depends_on('python@3.5:', when='+python3') depends_on('lua-luajit') # Fix the version string if building below another git repo. Submitted upstream: @when('@:0.11') def patch(self): filter_file('shell git', 'shell test -e .git && git', 'Makefile') def check(self): make('test', *['V=1', '-j{0}'.format(max(int(make_jobs), 20))]) # In certain cases, tests using TCP/IP can hang. Ensure that spack can continue: os.system("kill -9 `lsof -t ./uftrace` 2>/dev/null") def install(self, spec, prefix): make('install', *['V=1']) def installcheck(self): pass def test(self): """Perform stand-alone/smoke tests using the installed package.""" uftrace = self.prefix.bin.uftrace self.run_test(uftrace, ['-A', '.', '-R', '.', '-P', 'main', uftrace, '-V'], [r'dwarf', r'luajit', r'tui', r'sched', r'dynamic', r'main\(2, ', r' getopt_long\(2, ', r' .*printf.*\(', r'} = 0; /\* main \*/'], installed=True, purpose='test: testing the installation') @classmethod def determine_version(cls, exe): output = Executable(exe)('--version', output=str, error=str) match = re.search(r'uftrace v(\S+)', output) return match.group(1) if match else 'None'
player1537-forks/spack
var/spack/repos/builtin/packages/r-adegraphics/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RAdegraphics(RPackage): """An S4 Lattice-Based Package for the Representation of Multivariate Data. Graphical functionalities for the representation of multivariate data. It is a complete re-implementation of the functions available in the 'ade4' package.""" cran = "adegraphics" version('1.0-16', sha256='7ba59ce9aeefe1c25b4b118d08ef458ffd34115412c147cc428629e72a82ec3a') version('1.0-15', sha256='87bbcd072e9a898955f5ede4315e82365086a50a2887bf5bd2e94bbb4d3f678a') depends_on('r@3.0.2:', type=('build', 'run')) depends_on('r-ade4@1.7-13:', type=('build', 'run')) depends_on('r-kernsmooth', type=('build', 'run')) depends_on('r-lattice', type=('build', 'run')) depends_on('r-latticeextra', type=('build', 'run')) depends_on('r-rcolorbrewer', type=('build', 'run')) depends_on('r-sp@1.1-1:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/r-fontawesome/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RFontawesome(RPackage): """Easily Work with 'Font Awesome' Icons. Easily and flexibly insert 'Font Awesome' icons into 'R Markdown' documents and 'Shiny' apps. These icons can be inserted into HTML content through inline 'SVG' tags or 'i' tags. There is also a utility function for exporting 'Font Awesome' icons as 'PNG' images for those situations where raster graphics are needed.""" cran = "fontawesome" version('0.2.2', sha256='572db64d1b3c9be301935e0ca7baec69f3a6e0aa802e23f1f224b3724259df64') depends_on('r@3.3.0:', type=('build', 'run')) depends_on('r-rlang@0.4.10:', type=('build', 'run')) depends_on('r-htmltools@0.5.1.1:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/r-xtable/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RXtable(RPackage): """Export Tables to LaTeX or HTML. Coerce data to LaTeX and HTML tables.""" cran = "xtable" version('1.8-4', sha256='5abec0e8c27865ef0880f1d19c9f9ca7cc0fd24eadaa72bcd270c3fb4075fd1c') version('1.8-3', sha256='53b2b0fff8d7a8bba434063c2a01b867f510a4389ded2691fbedbc845f08c325') version('1.8-2', sha256='1623a1cde2e130fedb46f98840c3a882f1cbb167b292ef2bd86d70baefc4280d') depends_on('r@2.10.0:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/py-quaternionic/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyQuaternionic(PythonPackage): """Interpret numpy arrays as quaternionic arrays with numba acceleration""" homepage = "https://github.com/moble/quaternionic" pypi = "quaternionic/quaternionic-1.0.1.tar.gz" maintainers = ['nilsvu', 'moble'] version('1.0.1', sha256='ea69733d7311784963922bf08cc0c9c938b62fee2f91219f56544ff30658c10e') depends_on('python@3.6:3.9', type=('build', 'run')) depends_on('py-poetry-core@1.0.1:', type='build') depends_on('py-importlib-metadata@1:', when='^python@:3.7', type=('build', 'run')) depends_on('py-numpy@1.13:', type=('build', 'run')) depends_on('py-scipy@1:', type=('build', 'run')) depends_on('py-numba@0.50:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/perl-soap-lite/package.py
<reponame>player1537-forks/spack<filename>var/spack/repos/builtin/packages/perl-soap-lite/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PerlSoapLite(PerlPackage): """Perl's Web Services Toolkit""" homepage = "https://metacpan.org/pod/SOAP::Lite" url = "http://search.cpan.org/CPAN/authors/id/P/PH/PHRED/SOAP-Lite-1.22.tar.gz" version('1.22', sha256='92f492f8722cb3002cd1dce11238cee5599bb5bd451a062966df45223d33693a') depends_on('perl-io-sessiondata', type=('build', 'run')) depends_on('perl-lwp-protocol-https', type=('build', 'run')) depends_on('perl-task-weaken', type=('build', 'run')) depends_on('perl-xml-parser-lite', type=('build', 'run')) depends_on('perl-xml-parser', type=('build', 'run')) depends_on('perl-test-warn', type=('build', 'run')) depends_on('perl-class-inspector', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/circos/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from os import symlink from spack import * class Circos(Package): """Circos is a software package for visualizing data and information.""" homepage = "http://circos.ca/" url = "http://circos.ca/distribution/circos-0.69-6.tgz" version('0.69-6', sha256='52d29bfd294992199f738a8d546a49754b0125319a1685a28daca71348291566') depends_on('perl', type='run') depends_on('perl-clone', type='run') depends_on('perl-config-general', type='run') depends_on('perl-exporter-tiny', type='run') depends_on('perl-font-ttf', type='run') depends_on('perl-gd', type='run') depends_on('perl-io-string', type='run') depends_on('perl-list-moreutils', type='run') depends_on('perl-math-round', type='run') depends_on('perl-math-bezier', type='run') depends_on('perl-math-vecstat', type='run') depends_on('perl-params-validate', type='run') depends_on('perl-readonly', type='run') depends_on('perl-regexp-common', type='run') depends_on('perl-set-intspan', type='run') depends_on('perl-statistics-basic', type='run') depends_on('perl-svg', type='run') depends_on('perl-text-format', type='run') def install(self, spec, prefix): basedir = prefix.lib.circos install_tree('.', basedir) mkdirp(prefix.bin) symlink(basedir.bin.circos, prefix.bin.circos)
player1537-forks/spack
var/spack/repos/builtin/packages/r-graphlayouts/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RGraphlayouts(RPackage): """Additional Layout Algorithms for Network Visualizations. Several new layout algorithms to visualize networks are provided which are not part of 'igraph'. Most are based on the concept of stress majorization by Gansner et al. (2004) <doi:10.1007/978-3-540-31843-9_25>. Some more specific algorithms allow to emphasize hidden group structures in networks or focus on specific nodes.""" cran = "graphlayouts" version('0.8.0', sha256='d724266778e4d97ca7a762253c293ffa3d09e2627cb1c3c7a654c690819defd0') version('0.7.1', sha256='380f8ccb0b08735694e83f661fd56a0d592a78448ae91b89c290ba8582d66717') version('0.5.0', sha256='83f61ce07580c5a64c7044c12b20d98ccf138c7e78ff12855cdfc206e1fab10d') depends_on('r@3.2.0:', type=('build', 'run')) depends_on('r-igraph', type=('build', 'run')) depends_on('r-rcpp', type=('build', 'run')) depends_on('r-rcpparmadillo', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/py-atpublic/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyAtpublic(PythonPackage): """This library provides two very simple decorators that document the publicness of the names in your module.""" homepage = "https://public.readthedocs.io" pypi = "atpublic/atpublic-2.1.2.tar.gz" version('2.1.2', sha256='82a2f2c0343ac67913f67cdee8fa4da294a4d6b863111527a459c8e4d1a646c8') version('2.1.1', sha256='fa1d48bcb85bbed90f6ffee6936578f65ff0e93aa607397bd88eaeb408bd96d8') depends_on('python@3.6:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-typing-extensions', when='^python@:3.7', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/templight-tools/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class TemplightTools(CMakePackage): """Supporting tools for the Templight Profiler""" homepage = "https://github.com/mikael-s-persson/templight-tools" git = "https://github.com/mikael-s-persson/templight-tools.git" version('develop', branch='master') depends_on('cmake @2.8.7:', type='build') depends_on('boost @1.48.1: +filesystem +graph +program_options +test')
player1537-forks/spack
var/spack/repos/builtin/packages/r-reordercluster/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RReordercluster(RPackage): """Reordering the dendrogram according to the class labels. Tools for performing the leaf reordering for the dendrogram that preserves the hierarchical clustering result and at the same time tries to group instances from the same class together.""" cran = "ReorderCluster" version('1.0', sha256='a87898faa20380aac3e06a52eedcb2f0eb2b35ab74fdc3435d40ee9f1d28476b') depends_on('r@2.10:', type=('build', 'run')) depends_on('r-gplots', type=('build', 'run')) depends_on('r-rcpp@0.11.0:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/tig/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Tig(AutotoolsPackage): """Text-mode interface for git""" homepage = "https://jonas.github.io/tig/" url = "https://github.com/jonas/tig/releases/download/tig-2.2.2/tig-2.2.2.tar.gz" version('2.2.2', sha256='316214d87f7693abc0cbe8ebbb85decdf5e1b49d7ad760ac801af3dd73385e35') depends_on('ncurses')
player1537-forks/spack
var/spack/repos/builtin/packages/r-xml2/package.py
<filename>var/spack/repos/builtin/packages/r-xml2/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RXml2(RPackage): """Package required POI jars for the xlsx package. Work with XML files using a simple, consistent interface. Built on top of the 'libxml2' C library.""" cran = "xml2" version('1.3.3', sha256='cb4e9c0d31618ed67d2bfa4c7b5e52680e11612ed356a8164b541d44163c1c8d') version('1.3.2', sha256='df22f9e7e3189d8c9b8804eaf0105324fdac983cffe743552f6d76613600a4cf') version('1.2.2', sha256='3050f147c4335be2925a576557bbda36bd52a5bba3110d47b740a2dd811a78f4') version('1.2.1', sha256='5615bbc94607efc3bc192551992b349091df802ae34b855cfa817733f2690605') version('1.1.1', sha256='00f3e3b66b76760c19da5f6dddc98e6f30de36a96b211e59e1a3f4ff58763116') depends_on('r@3.1.0:', type=('build', 'run')) depends_on('libxml2') depends_on('r-rcpp@0.12.12:', type=('build', 'run'), when='@:1.2') depends_on('r-bh', type=('build', 'run'), when='@:1.1.1')
player1537-forks/spack
var/spack/repos/builtin/packages/boostmplcartesianproduct/package.py
<filename>var/spack/repos/builtin/packages/boostmplcartesianproduct/package.py<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Boostmplcartesianproduct(Package): """Cartesian_product is an extension to the Boost.MPL library and as such requires a version of the Boost libraries on your system. """ homepage = "http://www.organicvectory.com/index.php?option=com_content&view=article&id=75:boostmplcartesianproduct&catid=42:boost&Itemid=78" url = "https://github.com/quinoacomputing/BoostMPLCartesianProduct/tarball/20161205" version('20161205', sha256='1fa8e367e4dc545b34016bf57d802858ce38baf40aff20f7c93b329895a18572') def install(self, spec, prefix): install_tree('boost/mpl', join_path(prefix.include, 'boost', 'mpl'))
player1537-forks/spack
var/spack/repos/builtin/packages/py-pybobyqa/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyPybobyqa(PythonPackage): """Py-BOBYQA is a flexible package for solving bound-constrained general objective minimization, without requiring derivatives of the objective.""" homepage = "https://github.com/numericalalgorithmsgroup/pybobyqa/" pypi = "Py-BOBYQA/Py-BOBYQA-1.3.tar.gz" version('1.3', sha256='7b0b27b7b9a7cfef94557c8832c0c30757e86764e32878677427381f0691a8fb') depends_on('py-setuptools', type='build') depends_on('py-scipy@0.17:', type=('build', 'run')) depends_on('py-pandas@0.17:', type=('build', 'run')) depends_on('py-numpy@1.11:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/r-dmrcate/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RDmrcate(RPackage): """Methylation array and sequencing spatial analysis methods. De novo identification and extraction of differentially methylated regions (DMRs) from the human genome using Whole Genome Bisulfite Sequencing (WGBS) and Illumina Infinium Array (450K and EPIC) data. Provides functionality for filtering probes possibly confounded by SNPs and cross-hybridisation. Includes GRanges generation and plotting functions.""" bioc = "DMRcate" version('2.8.5', commit='<PASSWORD>10932a9<PASSWORD>3<PASSWORD>a6<PASSWORD>') version('2.4.1', commit='bc6242a0291a9b997872f575a4417d38550c9550') depends_on('r@3.6.0:', type=('build', 'run')) depends_on('r@4.0.0:', type=('build', 'run'), when='@2.8.5:') depends_on('r-experimenthub', type=('build', 'run')) depends_on('r-bsseq', type=('build', 'run')) depends_on('r-genomeinfodb', type=('build', 'run')) depends_on('r-limma', type=('build', 'run')) depends_on('r-edger', type=('build', 'run')) depends_on('r-dss', type=('build', 'run')) depends_on('r-minfi', type=('build', 'run')) depends_on('r-missmethyl', type=('build', 'run')) depends_on('r-genomicranges', type=('build', 'run')) depends_on('r-plyr', type=('build', 'run')) depends_on('r-gviz', type=('build', 'run')) depends_on('r-iranges', type=('build', 'run')) depends_on('r-s4vectors', type=('build', 'run')) depends_on('r-summarizedexperiment', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/openturns/package.py
<filename>var/spack/repos/builtin/packages/openturns/package.py # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Openturns(CMakePackage): """OpenTURNS is a scientific C++ and Python library featuring an internal data model and algorithms dedicated to the treatment of uncertainties. The main goal of this library is to provide all functionalities needed to treat uncertainties in studies with industrial applications. Targeted users are all engineers who want to introduce the probabilistic dimension in their so far deterministic studies.""" homepage = "https://openturns.github.io/www/" url = "https://github.com/openturns/openturns/archive/refs/tags/v1.18.tar.gz" git = "https://github.com/openturns/openturns.git" maintainers = ['liuyangzhuan'] version('1.18', sha256='1840d3fd8b38fd5967b1fa04e49d8f760c2c497400430e97623595ca48754ae0') version('master', branch='master') variant('python', default=True, description='Build Python bindings') extends('python', when='+python') depends_on('mpi', type=('build', 'run')) depends_on('lapack', type=('build', 'run')) depends_on('cmake@2.8:', type='build') depends_on('swig', type=('build', 'run')) depends_on('py-numpy@1.7:', type=('build', 'run')) depends_on('py-pandas', type=('build', 'run')) depends_on('py-matplotlib', type=('build', 'run')) depends_on('boost+system+serialization+thread', type=('build', 'run')) depends_on('intel-tbb', type=('build', 'run')) depends_on('py-cloudpickle', type=('build', 'run')) depends_on('py-urllib3', type=('build', 'run')) def cmake_args(self): spec = self.spec args = [ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DCMAKE_INSTALL_LIBDIR:STRING=%s' % self.prefix.lib, '-DCMAKE_INSTALL_BINDIR:STRING=%s' % self.prefix.bin, '-DLAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(";"), ] if '+python' in spec: args.extend([ # By default picks up the system python not the Spack build '-DPYTHON_EXECUTABLE={0}'.format(spec['python'].command.path), # By default installs to the python prefix '-DPYTHON_SITE_PACKAGES={0}'.format(python_platlib), ]) return args
player1537-forks/spack
var/spack/repos/builtin/packages/libmaxminddb/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Libmaxminddb(AutotoolsPackage): """C library for the MaxMind DB file format""" homepage = "https://github.com/maxmind/libmaxminddb" url = "https://github.com/maxmind/libmaxminddb/releases/download/1.3.2/libmaxminddb-1.3.2.tar.gz" version('1.3.2', sha256='e6f881aa6bd8cfa154a44d965450620df1f714c6dc9dd9971ad98f6e04f6c0f0') def configure_args(self): args = ['--disable-debug', '--disable-dependency-tracking', '--disable-silent-rules'] return args
player1537-forks/spack
var/spack/repos/builtin/packages/fujitsu-ssl2/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class FujitsuSsl2(Package): """Fujitsu SSL2 (Scientific Subroutine Library II) is Math library for Fujitsu compiler. Fujitsu SSL2 implementation only for Fujitsu compiler. Fujitsu SSL2 provides the function of blas, lapack and scalapack. """ homepage = "https://www.fujitsu.com/us/" has_code = False variant("parallel", default=True, description="Build with thread-parallel versions") conflicts("%arm") conflicts("%cce") conflicts("%apple-clang") conflicts("%clang") conflicts("%gcc") conflicts("%intel") conflicts("%nag") conflicts("%pgi") conflicts("%xl") conflicts("%xl_r") provides("blas") provides("lapack") provides("scalapack") def install(self, spec, prefix): raise InstallError( "Fujitsu SSL2 is not installable; it is vendor supplied \ You need to specify it as an external package in packages.yaml" ) @property def blas_libs(self): spec = self.spec libslist = [] if spec.target == "a64fx": # Build with SVE support if "+parallel" in spec: # parallel libslist.append("libfjlapackexsve.so") else: libslist.append("libfjlapacksve.so") else: if "+parallel" in spec: # parallel libslist.append("libfjlapackex.so") else: libslist.append("libfjlapack.so") if "+parallel" in spec: # parallel libslist.extend(["libfjomphk.so", "libfjomp.so"]) if spec.target == "a64fx": # Build with SVE support if "+parallel" in spec: # parallel libslist.append("libssl2mtexsve.a") libslist.append("libssl2mtsve.a") else: if "+parallel" in spec: # parallel libslist.append("libssl2mtex.a") libslist.append("libssl2mt.a") libslist.append("libfj90i.so") if spec.target == "a64fx": # Build with SVE support libslist.append("libfj90fmt_sve.a") else: libslist.append("libfj90fmt.a") libslist.extend(["libfj90f.so", "libfjsrcinfo.so", "libfj90rt.so"]) libspath = find(self.prefix.lib64, libslist, recursive=False) libs = LibraryList(libspath) return libs @property def lapack_libs(self): return self.blas_libs @property def scalapack_libs(self): spec = self.spec libslist = [] if spec.target == "a64fx": # Build with SVE support libslist.append("libfjscalapacksve.so") if "+parallel" in spec: # parallel libslist.append("libfjlapackexsve.so") else: libslist.append("libfjlapacksve.so") libslist.append("libscalapacksve.a") else: libslist.append("libfjscalapack.so") if "+parallel" in spec: # parallel libslist.append("libfjlapackex.so") else: libslist.append("libfjlapack.so") libslist.append("libscalapack.a") libslist.extend( ["libmpi_usempi_ignore_tkr.so", "libmpi_mpifh.so"] ) if "+parallel" in spec: # parallel libslist.extend(["libfjomphk.so", "libfjomp.so"]) if spec.target == "a64fx": # Build with SVE support if "+parallel" in spec: # parallel libslist.append("libssl2mtexsve.a") libslist.append("libssl2mtsve.a") else: if "+parallel" in spec: # parallel libslist.append("libssl2mtex.a") libslist.append("libssl2mt.a") libslist.append("libfj90i.so") if spec.target == "a64fx": # Build with SVE support libslist.append("libfj90fmt_sve.a") else: libslist.append("libfj90fmt.a") libslist.extend(["libfj90f.so", "libfjsrcinfo.so", "libfj90rt.so"]) libspath = find(self.prefix.lib64, libslist, recursive=False) libs = LibraryList(libspath) return libs def setup_dependent_build_environment(self, env, dependent_spec): path = self.prefix.include env.append_flags( "fcc_ENV", "-idirafter " + path ) env.append_flags( "FCC_ENV", "-idirafter " + path ) @property def headers(self): path = join_path( self.spec.prefix, "clang-comp" ) headers = find_headers('cssl', path, recursive=True) return headers
player1537-forks/spack
var/spack/repos/builtin.mock/packages/hash-test4/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class HashTest4(Package): """This package isn't compared with others, but it contains constructs that package hashing logic has tripped over in the past. """ homepage = "http://www.hashtest4.org" url = "http://www.hashtest1.org/downloads/hashtest4-1.1.tar.bz2" version('1.1', 'a' * 32) def install(self, spec, prefix): pass @staticmethod def examine_prefix(pkg): pass run_after('install')( examine_prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/py-pytest-check-links/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyPytestCheckLinks(PythonPackage): """pytest plugin that checks URLs for HTML-containing files.""" homepage = "https://github.com/jupyterlab/pytest-check-links" pypi = "pytest-check-links/pytest_check_links-0.3.4.tar.gz" version('0.3.4', sha256='4b3216548431bf9796557e8ee8fd8e5e77a69a4690b3b2f9bcf6fb5af16a502b') depends_on('py-setuptools@17.1:', type='build') depends_on('py-pbr@1.9:', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/r-rcppcnpy/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RRcppcnpy(RPackage): """Read-Write Support for 'NumPy' Files via 'Rcpp'. The 'cnpy' library written by <NAME> provides read and write facilities for files created with (or for) the 'NumPy' extension for 'Python'. Vectors and matrices of numeric types can be read or written to and from files as well as compressed files. Support for integer files is available if the package has been built with -std=c++11 which should be the default on all platforms since the release of R 3.3.0.""" cran = "RcppCNPy" version('0.2.10', sha256='77d6fbc86520a08da40d44c0b82767099f8f719ca95870d91efff1a9cab1ab9c') version('0.2.9', sha256='733f004ad1a8b0e5aafbf547c4349d2df3118afd57f1ff99f20e39135c6edb30') depends_on('r@3.1.0:', type=('build', 'run')) depends_on('r-rcpp', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/intel-gpu-tools/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import sys from spack import * class IntelGpuTools(AutotoolsPackage, XorgPackage): """Intel GPU Tools is a collection of tools for development and testing of the Intel DRM driver. There are many macro-level test suites that get used against the driver, including xtest, rendercheck, piglit, and oglconform, but failures from those can be difficult to track down to kernel changes, and many require complicated build procedures or specific testing environments to get useful results. Therefore, Intel GPU Tools includes low-level tools and tests specifically for development and testing of the Intel DRM Driver.""" homepage = "https://cgit.freedesktop.org/xorg/app/intel-gpu-tools/" xorg_mirror_path = "app/intel-gpu-tools-1.16.tar.gz" version('1.20', sha256='c6ee992301e43ec14ef810ef532e2601ecf7399315f942207ae0dd568fd9c2b7') version('1.16', sha256='4874e6e7704c8d315deaf5b44cc9467ea5e502c7f816470a4a28827fcb34643f') depends_on('libdrm@2.4.64:') depends_on('libpciaccess@0.10:', when=(sys.platform != 'darwin')) depends_on('libunwind') depends_on('kmod') depends_on('cairo@1.12.0:') depends_on('glib') depends_on('flex', type='build') depends_on('bison', type='build') depends_on('python@3:', type='build') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build') def configure_args(self): glib_include = join_path(self.spec['glib'].prefix.include, 'glib-2.0') return ['CPPFLAGS=-I{0}'.format(glib_include)] # xrandr ? # gtk-doc-tools # python-docutils # x11proto-dri2-dev # xutils-dev
player1537-forks/spack
var/spack/repos/builtin/packages/py-word2number/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyWord2number(PythonPackage): """This is a Python module to convert number words (eg. twenty one) to numeric digits (21). It works for positive numbers upto the range of 999,999,999,999 (i.e. billions).""" homepage = "https://w2n.readthedocs.io" pypi = "word2number/word2number-1.1.zip" version('1.1', sha256='70e27a5d387f67b04c71fbb7621c05930b19bfd26efd6851e6e0f9969dcde7d0') depends_on('py-setuptools', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/atop/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Atop(Package): """Atop is an ASCII full-screen performance monitor for Linux""" homepage = "https://www.atoptool.nl/index.php" url = "https://www.atoptool.nl/download/atop-2.2-3.tar.gz" version('2.5.0', sha256='4b911057ce50463b6e8b3016c5963d48535c0cddeebc6eda817e292b22f93f33') version('2.4.0', sha256='be1c010a77086b7d98376fce96514afcd73c3f20a8d1fe01520899ff69a73d69') version('2.3.0', sha256='73e4725de0bafac8c63b032e8479e2305e3962afbe977ec1abd45f9e104eb264') version('2.2.6', sha256='d0386840ee4df36e5d0ad55f144661b434d9ad35d94deadc0405b514485db615') version('2.2-3', sha256='c785b8a2355be28b3de6b58a8ea4c4fcab8fadeaa57a99afeb03c66fac8e055d') depends_on('zlib') depends_on('ncurses') def setup_build_environment(self, env): env.append_flags('LDFLAGS', '-ltinfo') def install(self, spec, prefix): make() mkdirp(prefix.bin) install("atop", join_path(prefix.bin, "atop")) mkdirp(join_path(prefix.man, "man1")) install(join_path("man", "atop.1"), join_path(prefix.man, "man1", "atop.1"))
player1537-forks/spack
var/spack/repos/builtin/packages/py-scikit-build/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyScikitBuild(PythonPackage): """scikit-build is an improved build system generator for CPython C/C++/Fortran/Cython extensions. It provides better support for additional compilers, build systems, cross compilation, and locating dependencies and their associated build requirements. The scikit-build package is fundamentally just glue between the setuptools Python module and CMake.""" homepage = "https://scikit-build.readthedocs.io/en/latest/" url = "https://github.com/scikit-build/scikit-build/archive/0.10.0.tar.gz" maintainers = ['coreyjadams'] version('0.12.0', sha256='c32a415d2e7920a4a966b037403c93b02c8a958d8badf3c60abd4b4493f7d988') version('0.10.0', sha256='2beec252813b20327072c15e9d997f15972aedcc6a130d0154979ff0fdb1b010') depends_on('py-setuptools@28.0.0:', type=('build', 'run')) depends_on('py-packaging', type=('build', 'run')) depends_on('py-wheel@0.29.0:', type=('build', 'run')) depends_on('py-distro', type=('build', 'run'), when='@0.11:')
player1537-forks/spack
var/spack/repos/builtin/packages/tramonto/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Tramonto(CMakePackage): """Tramonto: Software for Nanostructured Fluids in Materials and Biology""" homepage = "https://software.sandia.gov/tramonto/" git = "https://github.com/Tramonto/Tramonto.git" version('develop', branch='master') depends_on('trilinos@:12+nox') def cmake_args(self): spec = self.spec args = [] args.extend(['-DTRILINOS_PATH:PATH=%s/lib/cmake/Trilinos' % spec['trilinos'].prefix]) return args
player1537-forks/spack
var/spack/repos/builtin/packages/openslide/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Openslide(AutotoolsPackage): """OpenSlide reads whole slide image files.""" homepage = "https://openslide.org/" url = "https://github.com/openslide/openslide/releases/download/v3.4.1/openslide-3.4.1.tar.xz" version('3.4.1', sha256='9938034dba7f48fadc90a2cdf8cfe94c5613b04098d1348a5ff19da95b990564') depends_on('pkgconfig', type='build') depends_on('openjpeg') depends_on('jpeg') depends_on('libtiff') depends_on('libxml2') depends_on('sqlite@3.6:') depends_on('glib') depends_on('cairo+pdf') depends_on('gdk-pixbuf')
player1537-forks/spack
var/spack/repos/builtin/packages/r-shape/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RShape(RPackage): """Functions for Plotting Graphical Shapes, Colors. Functions for plotting graphical shapes such as ellipses, circles, cylinders, arrows, ...""" cran = "shape" version('1.4.6', sha256='b9103e5ed05c223c8147dbe3b87a0d73184697343634a353a2ae722f7ace0b7b') version('1.4.5', sha256='094a79b8f42226189227fd7af71868e42106caa25a4d7f80a26977e8bc84189f') version('1.4.4', sha256='f4cb1b7d7c84cf08d2fa97f712ea7eb53ed5fa16e5c7293b820bceabea984d41') version('1.4.3', sha256='720f6ca9c70a39a3900af9d074bff864b18ac58013b21d48b779047481b93ded') version('1.4.2', sha256='c6c08ba9cc2e90e5c9d3d5223529b57061a041f637886ad7665b9fa27465637a') depends_on('r@2.0.1:', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/sst-macro/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class SstMacro(AutotoolsPackage): """The Structural Simulation Toolkit Macroscale Element Library simulates large-scale parallel computer architectures for the coarse-grained study of distributed-memory applications. The simulator is driven from either a trace file or skeleton application. SST/macro's modular architecture can be extended with additional network models, trace file formats, software services, and processor models. """ homepage = "http://sst.sandia.gov/about_sstmacro.html" git = "https://github.com/sstsimulator/sst-macro.git" url = "https://github.com/sstsimulator/sst-macro/releases/download/v11.0.0_Final/sstmacro-11.0.0.tar.gz" maintainers = ['jjwilke'] version('11.0.0', sha256='30367baed670b5b501320a068671556c9071286a0f0c478f9994a30d8fe5bdea') version('10.1.0', sha256='e15d99ce58d282fdff849af6de267746a4c89f3b8c5ab6c1e1e7b53a01127e73') version('10.0.0', sha256='064b732256f3bec9b553e00bcbc9a1d82172ec194f2b69c8797f585200b12566') version('master', branch='master') version('develop', branch='devel') depends_on('autoconf@1.68:', type='build', when='@master:') depends_on('automake@1.11.1:', type='build', when='@master:') depends_on('libtool@1.2.4:', type='build', when='@master:') depends_on('m4', type='build', when='@master:') depends_on('binutils', type='build') depends_on('zlib', type=('build', 'link')) depends_on('otf2', when='+otf2') depends_on('llvm+clang@5:9', when='+skeletonizer') depends_on('mpi', when='+pdes_mpi') depends_on('sst-core@develop', when='@develop+core') depends_on('sst-core@master', when='@master+core') depends_on('sst-core@10.1.0', when='@10.1.0+core') depends_on('sst-core@10.0.0', when='@10.0.0+core') depends_on('gettext') variant('pdes_threads', default=True, description='Enable thread-parallel PDES simulation') variant('pdes_mpi', default=False, description='Enable distributed PDES simulation') variant('core', default=False, description='Use SST Core for PDES') variant('otf2', default=False, description='Enable OTF2 trace emission and replay support') variant('skeletonizer', default=False, description='Enable Clang source-to-source autoskeletonization') variant('static', default=True, description='Build static libraries') variant('shared', default=True, description='Build shared libraries') variant('werror', default=False, description='Build with all warnings as errors') variant('warnings', default=False, description='Build with all possible warnings') # force out-of-source builds build_directory = 'spack-build' def autoreconf(self, spec, prefix): bash = which('bash') bash('./bootstrap.sh') def configure_args(self): args = ['--disable-regex'] spec = self.spec args.append( '--enable-static=%s' % ('yes' if '+static' in spec else 'no')) args.append( '--enable-shared=%s' % ('yes' if '+shared' in spec else 'no')) if spec.satisfies("@8.0.0:"): args.extend([ '--%sable-otf2' % ('en' if '+otf2' in spec else 'dis'), '--%sable-multithread' % ('en' if '+pdes_threads' in spec else 'dis') ]) if '+skeletonizer' in spec: args.append('--with-clang=' + spec['llvm'].prefix) if spec.satisfies("@10:"): if "+warnings" in spec: args.append("--with-warnings") if "+werror" in spec: args.append("--with-werror") if '+core' in spec: args.append('--with-sst-core=%s' % spec['sst-core'].prefix) # Optional MPI support need_core_mpi = False if "+core" in spec: if "+pdes_mpi" in spec["sst-core"]: need_core_mpi = True if '+pdes_mpi' in spec or need_core_mpi: env['CC'] = spec['mpi'].mpicc env['CXX'] = spec['mpi'].mpicxx env['F77'] = spec['mpi'].mpif77 env['FC'] = spec['mpi'].mpifc return args
player1537-forks/spack
var/spack/repos/builtin/packages/py-sphinx-argparse/package.py
<reponame>player1537-forks/spack<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PySphinxArgparse(PythonPackage): """Sphinx extension to automatically document argparse-based commands.""" homepage = "https://pypi.org/project/sphinx-argparse" pypi = "sphinx-argparse/sphinx-argparse-0.3.1.tar.gz" maintainers = ['sethrj'] version('0.3.1', sha256='82151cbd43ccec94a1530155f4ad34f251aaca6a0ffd5516d7fadf952d32dc1e') depends_on('python@2.7.0:2.7,3.5:', type=('build', 'run')) depends_on('py-sphinx@1.2.0:', type=('build', 'run')) depends_on('py-poetry-core', type='build') depends_on('py-setuptools', type='build')
player1537-forks/spack
var/spack/repos/builtin/packages/spherepack/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Spherepack(Package): """SPHEREPACK - A Package for Modeling Geophysical Processes""" homepage = "https://www2.cisl.ucar.edu/resources/legacy/spherepack" url = "https://www2.cisl.ucar.edu/sites/default/files/spherepack3.2.tar" version('3.2', sha256='d58ef8cbc45cf2ad24f73a9f73f5f9d4fbe03cd9e2e7722e526fffb68be581ba') def install(self, spec, prefix): if self.compiler.fc is None: raise InstallError("SPHEREPACK requires a Fortran 90 compiler") make("MAKE=make", "F90=f90 -O2", "AR=ar", "libspherepack") make("MAKE=make", "F90=f90 -O2", "AR=ar", "testspherepack") install_tree("lib", prefix.lib)
player1537-forks/spack
var/spack/repos/builtin/packages/perl-class-inspector/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PerlClassInspector(PerlPackage): """Get information about a class and its structure""" homepage = "https://metacpan.org/pod/Class::Inspector" url = "http://search.cpan.org/CPAN/authors/id/P/PL/PLICEASE/Class-Inspector-1.32.tar.gz" version('1.32', sha256='cefadc8b5338e43e570bc43f583e7c98d535c17b196bcf9084bb41d561cc0535')
player1537-forks/spack
var/spack/repos/builtin/packages/py-getorganelle/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyGetorganelle(PythonPackage): """Organelle Genome Assembly Toolkit (Chloroplast/Mitocondrial/ITS)""" homepage = "https://github.com/Kinggerm/GetOrganelle" url = "https://github.com/Kinggerm/GetOrganelle/archive/refs/tags/1.7.5.0.tar.gz" maintainers = ['dorton21'] version('1.7.5.0', sha256='c498196737726cb4c0158f23037bf301a069f5028ece729bb4d09c7d915df93d') depends_on('py-setuptools', type='build') depends_on('py-numpy@1.16.4:', type=('build', 'run')) depends_on('py-scipy@1.3.0:', type=('build', 'run')) depends_on('py-sympy@1.4:', type=('build', 'run')) depends_on('py-requests', type=('build', 'run')) depends_on('bowtie2', type='run') depends_on('spades', type='run') depends_on('blast-plus', type='run') # Allow access to relevant runtime scripts # I.e. get_organelle_config.py, get_organelle_from_reads.py, etc. def setup_run_environment(self, env): env.prepend_path('PATH', prefix) env.prepend_path('PATH', prefix.Utilities)
player1537-forks/spack
var/spack/repos/builtin/packages/commons-lang/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class CommonsLang(Package): """The standard Java libraries fail to provide enough methods for manipulation of its core classes. Apache Commons Lang provides these extra methods. Lang provides a host of helper utilities for the java.lang API, notably String manipulation methods, basic numerical methods, object reflection, concurrency, creation and serialization and System properties. Additionally it contains basic enhancements to java.util.Date and a series of utilities dedicated to help with building methods, such as hashCode, toString and equals.""" homepage = "https://commons.apache.org/proper/commons-lang/" url = "https://archive.apache.org/dist/commons/lang/binaries/commons-lang-2.6-bin.tar.gz" version('2.6', sha256='ff6a244bb71a9a1c859e81cb744d0ce698c20e04f13a7ef7dbffb99c8122752c') version('2.4', sha256='00e6b3174e31196d726c14302c8e7e9ba9b8409d57a8a9821c7648beeda31c5e') extends('jdk') depends_on('java@2:', type='run') def install(self, spec, prefix): install('commons-lang-{0}.jar'.format(self.version), prefix)
player1537-forks/spack
var/spack/repos/builtin/packages/r-lhs/package.py
<gh_stars>10-100 # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class RLhs(RPackage): """Latin Hypercube Samples. Provides a number of methods for creating and augmenting Latin Hypercube Samples.""" cran = "lhs" version('1.1.3', sha256='e43b8d48db1cf26013697e2a798ed1d31d1ee1790f2ebfecb280176c0e0c06d1') version('1.1.1', sha256='903e9f2adde87f6f9ad41dd52ff83d28a645dba69934c7535142cb48f10090dc') version('1.0.1', sha256='a4d5ac0c6f585f2880364c867fa94e6554698beb65d3678ba5938dd84fc6ea53') version('1.0', sha256='38c53482b360bdea89ddcfadf6d45476c80b99aee8902f97c5e97975903e2745') version('0.16', sha256='9cd199c3b5b2be1736d585ef0fd39a00e31fc015a053333a7a319668d0809425') depends_on('r@3.3.0:', type=('build', 'run')) depends_on('r@3.4.0:', type=('build', 'run'), when='@1.0:') depends_on('r-rcpp', type=('build', 'run'), when='@1.0:')
player1537-forks/spack
var/spack/repos/builtin/packages/py-pyflakes/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PyPyflakes(PythonPackage): """A simple program which checks Python source files for errors.""" homepage = "https://github.com/PyCQA/pyflakes" pypi = "pyflakes/pyflakes-2.4.0.tar.gz" version('2.4.0', sha256='05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c') version('2.3.0', sha256='e59fd8e750e588358f1b8885e5a4751203a0516e0ee6d34811089ac294c8806f') version('2.2.0', sha256='35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8') version('2.1.1', sha256='d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2') version('2.1.0', sha256='5e8c00e30c464c99e0b501dc160b13a14af7f27d4dffb529c556e30a159e231d') version('1.6.0', sha256='8d616a382f243dbf19b54743f280b80198be0bca3a5396f1d2e1fca6223e8805') version('1.5.0', sha256='aa0d4dff45c0cc2214ba158d29280f8fa1129f3e87858ef825930845146337f4') version('1.4.0', sha256='05c8a1702088e9b54acb422f78210afc6074b3472afa7a0a77f0b8aa3f5db605') version('1.3.0', sha256='a4f93317c97a9d9ed71d6ecfe08b68e3de9fea3f4d94dcd1d9d83ccbf929bc31') version('1.2.3', sha256='2e4a1b636d8809d8f0a69f341acf15b2e401a3221ede11be439911d23ce2139e') version('1.2.2', sha256='58741f9d3bffeba8f88452c1eddcf1b3eee464560e4589e4b81de8b3c9e42e4d') version('1.2.1', sha256='7e5e3a5e7ce8d1afb9cbcff2bb10cffaf83e1d94ab7c78eb86a715a88c32e22f') version('1.2.0', sha256='3633e000ffdc307ff1a7d7450e895ff8813e20b084ef263b5669eef9bc4c7a52') version('1.1.0', sha256='e5f959931987e2be178781554b485d52342ec9f1b43f891d2dad07a691c7a89a') version('0.9.2', sha256='02691c23ce699f252874b7c27f14cf26e3d4e82b58e5d584f000b7ab5be36a5f') version('0.9.1', sha256='baad29ac1e884c7077eb32ed1d9ee5cf30bf4b888329e1fcb51b9aa5298cb3b9') version('0.9.0', sha256='4c4d73085ce5de9d8147011c060d129659baa1111d1a5a3035f2bd03f2976538') depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) # Most Python packages only require py-setuptools as a build dependency. # However, py-pyflakes requires py-setuptools during runtime as well. depends_on('py-setuptools', type=('build', 'run'))
player1537-forks/spack
var/spack/repos/builtin/packages/rsync/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import re from spack import * class Rsync(AutotoolsPackage): """An open source utility that provides fast incremental file transfer.""" homepage = "https://rsync.samba.org" url = "https://download.samba.org/pub/rsync/src/rsync-3.1.2.tar.gz" version('3.2.3', sha256='becc3c504ceea499f4167a260040ccf4d9f2ef9499ad5683c179a697146ce50e') version('3.2.2', sha256='644bd3841779507665211fd7db8359c8a10670c57e305b4aab61b4e40037afa8') version('3.1.3', sha256='55cc554efec5fdaad70de921cd5a5eeb6c29a95524c715f3bbf849235b0800c0') version('3.1.2', sha256='ecfa62a7fa3c4c18b9eccd8c16eaddee4bd308a76ea50b5c02a5840f09c0a1c2') version('3.1.1', sha256='7de4364fcf5fe42f3bdb514417f1c40d10bbca896abe7e7f2c581c6ea08a2621') depends_on('zlib') depends_on('popt') depends_on('openssl', when='@3.2:') depends_on('xxhash', when='@3.2:') depends_on('zstd', when='@3.2:') depends_on('lz4', when='@3.2:') conflicts('%nvhpc') executables = ['^rsync$'] @classmethod def determine_version(cls, exe): output = Executable(exe)('--version', output=str, error=str) match = re.search(r'rsync\s+version\s+(\S+)', output) return match.group(1) if match else None def configure_args(self): return ['--with-included-zlib=no']
player1537-forks/spack
var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py
<reponame>player1537-forks/spack # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import platform from spack import * class IntelOneapiAdvisor(IntelOneApiPackage): """Intel Advisor is a design and analysis tool for achieving high application performance. This is done through efficient threading, vectorization, and memory use, and GPU offload on current and future Intel hardware.""" maintainers = ['rscohn2'] homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/advisor.html' if platform.system() == 'Linux': version('2022.0.0', url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18369/l_oneapi_advisor_p_2022.0.0.92_offline.sh', sha256='f1c4317c2222c56fb2e292513f7eec7ec27eb1049d3600cb975bc08ed1477993', expand=False) version('2021.4.0', url='https://registrationcenter-download.intel.com/akdlm/irc_nas/18220/l_oneapi_advisor_p_2021.4.0.389_offline.sh', sha256='dd948f7312629d9975e12a57664f736b8e011de948771b4c05ad444438532be8', expand=False) @property def component_dir(self): return 'advisor'
player1537-forks/spack
var/spack/repos/builtin.mock/packages/build-warnings/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class BuildWarnings(Package): """This package's install fails but only emits warnings.""" homepage = "http://www.example.com/trivial_install" url = "http://www.unit-test-should-replace-this-url/trivial_install-1.0.tar.gz" version('1.0', '0123456789abcdef0123456789abcdef') def install(self, spec, prefix): with open('configure', 'w') as f: f.write("""#!/bin/sh\n echo 'checking for gcc... /Users/gamblin2/src/spack/lib/spack/env/clang/clang' echo 'checking whether the C compiler works... yes' echo 'checking for C compiler default output file name... a.out' echo 'WARNING: ALL CAPITAL WARNING!' echo 'checking for suffix of executables...' echo 'foo.c:89: warning: some weird warning!' exit 1 """) configure()