diff --git a/CHANGELOG.md b/CHANGELOG.md index b7e4cc0ad345a16997ee01dffb676cdd609a496d..7140e90f29a4b9ec5802ece4b9e59c2053f39eab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,34 @@ +# v0.15.3 (2020-07-28) + +This release contains the following bugfixes: + +* Fix handling of relative view paths (#17721) +* Fixes for binary relocation (#17418, #17455) +* Fix redundant printing of error messages in build environment (#17709) + +It also adds a support script for Spack tutorials: + +* Add a tutorial setup script to share/spack (#17705, #17722) + +# v0.15.2 (2020-07-23) + +This minor release includes two new features: + +* Spack install verbosity is decreased, and more debug levels are added (#17546) +* The $spack/share/spack/keys directory contains public keys that may be optionally trusted for public binary mirrors (#17684) + +This release also includes several important fixes: + +* MPICC and related variables are now cleand in the build environment (#17450) +* LLVM flang only builds CUDA offload components when +cuda (#17466) +* CI pipelines no longer upload user environments that can contain secrets to the internet (#17545) +* CI pipelines add bootstrapped compilers to the compiler config (#17536) +* `spack buildcache list` does not exit on first failure and lists later mirrors (#17565) +* Apple's "gcc" executable that is an apple-clang compiler does not generate a gcc compiler config (#17589) +* Mixed compiler toolchains are merged more naturally across different compiler suffixes (#17590) +* Cray Shasta platforms detect the OS properly (#17467) +* Additional more minor fixes. + # v0.15.1 (2020-07-10) This minor release includes several important fixes: diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index b295341d489d5e0d00e0bb78d3692471406d2361..5fd7163e2e7c13c483aef28e993c1432c3c5ada6 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -174,8 +174,9 @@ def _lock(self, op, timeout=None): # If the file were writable, we'd have opened it 'r+' raise LockROFileError(self.path) - tty.debug("{0} locking [{1}:{2}]: timeout {3} sec" - .format(lock_type[op], self._start, self._length, timeout)) + self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec" + .format(lock_type[op], self._start, self._length, + timeout)) poll_intervals = iter(Lock._poll_interval_generator()) start_time = time.time() @@ -211,14 +212,14 @@ def _poll_lock(self, op): # help for debugging distributed locking if self.debug: # All locks read the owner PID and host - self._read_debug_data() - tty.debug('{0} locked {1} [{2}:{3}] (owner={4})' - .format(lock_type[op], self.path, - self._start, self._length, self.pid)) + self._read_log_debug_data() + self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})' + .format(lock_type[op], self.path, + self._start, self._length, self.pid)) # Exclusive locks write their PID/host if op == fcntl.LOCK_EX: - self._write_debug_data() + self._write_log_debug_data() return True @@ -245,7 +246,7 @@ def _ensure_parent_directory(self): raise return parent - def _read_debug_data(self): + def _read_log_debug_data(self): """Read PID and host data out of the file if it is there.""" self.old_pid = self.pid self.old_host = self.host @@ -257,7 +258,7 @@ def _read_debug_data(self): _, _, self.host = host.rpartition('=') self.pid = int(self.pid) - def _write_debug_data(self): + def _write_log_debug_data(self): """Write PID and host data to the file, recording old values.""" self.old_pid = self.pid self.old_host = self.host @@ -473,9 +474,6 @@ def release_write(self, release_fn=None): else: return False - def _debug(self, *args): - tty.debug(*args) - def _get_counts_desc(self): return '(reads {0}, writes {1})'.format(self._reads, self._writes) \ if tty.is_verbose() else '' @@ -484,58 +482,50 @@ def _log_acquired(self, locktype, wait_time, nattempts): attempts_part = _attempts_str(wait_time, nattempts) now = datetime.now() desc = 'Acquired at %s' % now.strftime("%H:%M:%S.%f") - self._debug(self._status_msg(locktype, '{0}{1}'. - format(desc, attempts_part))) + self._log_debug(self._status_msg(locktype, '{0}{1}' + .format(desc, attempts_part))) def _log_acquiring(self, locktype): - self._debug2(self._status_msg(locktype, 'Acquiring')) + self._log_debug(self._status_msg(locktype, 'Acquiring'), level=3) + + def _log_debug(self, *args, **kwargs): + """Output lock debug messages.""" + kwargs['level'] = kwargs.get('level', 2) + tty.debug(*args, **kwargs) def _log_downgraded(self, wait_time, nattempts): attempts_part = _attempts_str(wait_time, nattempts) now = datetime.now() desc = 'Downgraded at %s' % now.strftime("%H:%M:%S.%f") - self._debug(self._status_msg('READ LOCK', '{0}{1}' - .format(desc, attempts_part))) + self._log_debug(self._status_msg('READ LOCK', '{0}{1}' + .format(desc, attempts_part))) def _log_downgrading(self): - self._debug2(self._status_msg('WRITE LOCK', 'Downgrading')) + self._log_debug(self._status_msg('WRITE LOCK', 'Downgrading'), level=3) def _log_released(self, locktype): now = datetime.now() desc = 'Released at %s' % now.strftime("%H:%M:%S.%f") - self._debug(self._status_msg(locktype, desc)) + self._log_debug(self._status_msg(locktype, desc)) def _log_releasing(self, locktype): - self._debug2(self._status_msg(locktype, 'Releasing')) + self._log_debug(self._status_msg(locktype, 'Releasing'), level=3) def _log_upgraded(self, wait_time, nattempts): attempts_part = _attempts_str(wait_time, nattempts) now = datetime.now() desc = 'Upgraded at %s' % now.strftime("%H:%M:%S.%f") - self._debug(self._status_msg('WRITE LOCK', '{0}{1}'. - format(desc, attempts_part))) + self._log_debug(self._status_msg('WRITE LOCK', '{0}{1}'. + format(desc, attempts_part))) def _log_upgrading(self): - self._debug2(self._status_msg('READ LOCK', 'Upgrading')) + self._log_debug(self._status_msg('READ LOCK', 'Upgrading'), level=3) def _status_msg(self, locktype, status): status_desc = '[{0}] {1}'.format(status, self._get_counts_desc()) return '{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}'.format( locktype, self, status_desc) - def _debug2(self, *args): - # TODO: Easy place to make a single, temporary change to the - # TODO: debug level associated with the more detailed messages. - # TODO: - # TODO: Someday it would be great if we could switch this to - # TODO: another level, perhaps _between_ debug and verbose, or - # TODO: some other form of filtering so the first level of - # TODO: debugging doesn't have to generate these messages. Using - # TODO: verbose here did not work as expected because tests like - # TODO: test_spec_json will write the verbose messages to the - # TODO: output that is used to check test correctness. - tty.debug(*args) - class LockTransaction(object): """Simple nested transaction context manager that uses a file lock. diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index 41eef5d2842a5e87d1acd802c2bb56fc8d06f266..79ca5a99292dd7d3769e7b0b2e55539e9cf3a1e4 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -19,7 +19,8 @@ from llnl.util.tty.color import cprint, cwrite, cescape, clen -_debug = False +# Globals +_debug = 0 _verbose = False _stacktrace = False _timestamp = False @@ -29,21 +30,26 @@ indent = " " +def debug_level(): + return _debug + + def is_verbose(): return _verbose -def is_debug(): - return _debug +def is_debug(level=1): + return _debug >= level def is_stacktrace(): return _stacktrace -def set_debug(flag): +def set_debug(level=0): global _debug - _debug = flag + assert level >= 0, 'Debug level must be a positive value' + _debug = level def set_verbose(flag): @@ -132,12 +138,17 @@ def process_stacktrace(countback): return st_text +def show_pid(): + return is_debug(2) + + def get_timestamp(force=False): """Get a string timestamp""" if _debug or _timestamp or force: # Note inclusion of the PID is useful for parallel builds. - return '[{0}, {1}] '.format( - datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), os.getpid()) + pid = ', {0}'.format(os.getpid()) if show_pid() else '' + return '[{0}{1}] '.format( + datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), pid) else: return '' @@ -197,7 +208,8 @@ def verbose(message, *args, **kwargs): def debug(message, *args, **kwargs): - if _debug: + level = kwargs.get('level', 1) + if is_debug(level): kwargs.setdefault('format', 'g') kwargs.setdefault('stream', sys.stderr) info(message, *args, **kwargs) diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py index 76e07c0e080b5e8de6ad653e8137313a49420dc5..de5ffa8eec62148948b63f49cd9bfd27b5a8ea6a 100644 --- a/lib/spack/llnl/util/tty/log.py +++ b/lib/spack/llnl/util/tty/log.py @@ -323,14 +323,14 @@ class log_output(object): work within test frameworks like nose and pytest. """ - def __init__(self, file_like=None, echo=False, debug=False, buffer=False): + def __init__(self, file_like=None, echo=False, debug=0, buffer=False): """Create a new output log context manager. Args: file_like (str or stream): open file object or name of file where output should be logged echo (bool): whether to echo output in addition to logging it - debug (bool): whether to enable tty debug mode during logging + debug (int): positive to enable tty debug mode during logging buffer (bool): pass buffer=True to skip unbuffering output; note this doesn't set up any *new* buffering @@ -355,7 +355,7 @@ def __init__(self, file_like=None, echo=False, debug=False, buffer=False): self._active = False # used to prevent re-entry def __call__(self, file_like=None, echo=None, debug=None, buffer=None): - """Thie behaves the same as init. It allows a logger to be reused. + """This behaves the same as init. It allows a logger to be reused. Arguments are the same as for ``__init__()``. Args here take precedence over those passed to ``__init__()``. diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index ceb1e256b1a723904d27df5feff55b083fca12fb..862c8baa606dab79686aeb3869eb73dc061301ca 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -5,7 +5,7 @@ #: major, minor, patch version for Spack, in a tuple -spack_version_info = (0, 15, 1) +spack_version_info = (0, 15, 3) #: String containing Spack version joined with .'s spack_version = '.'.join(str(v) for v in spack_version_info) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 6041891f44106d9bb28a8e211e406a09247d56e2..f53501cbd66dc822acbfe9706de359cb1aa81c35 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -466,8 +466,8 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False, web_util.push_to_url( specfile_path, remote_specfile_path, keep_original=False) - tty.msg('Buildcache for "%s" written to \n %s' % - (spec, remote_spackfile_path)) + tty.debug('Buildcache for "{0}" written to \n {1}' + .format(spec, remote_spackfile_path)) try: # create an index.html for the build_cache directory so specs can be @@ -843,13 +843,13 @@ def get_spec(spec=None, force=False): mirror_dir = url_util.local_file_path(fetch_url_build_cache) if mirror_dir: - tty.msg("Finding buildcaches in %s" % mirror_dir) + tty.debug('Finding buildcaches in {0}'.format(mirror_dir)) link = url_util.join(fetch_url_build_cache, specfile_name) urls.add(link) else: - tty.msg("Finding buildcaches at %s" % - url_util.format(fetch_url_build_cache)) + tty.debug('Finding buildcaches at {0}' + .format(url_util.format(fetch_url_build_cache))) link = url_util.join(fetch_url_build_cache, specfile_name) urls.add(link) @@ -872,8 +872,8 @@ def get_specs(allarch=False): fetch_url_build_cache = url_util.join( mirror.fetch_url, _build_cache_relative_path) - tty.msg("Finding buildcaches at %s" % - url_util.format(fetch_url_build_cache)) + tty.debug('Finding buildcaches at {0}' + .format(url_util.format(fetch_url_build_cache))) index_url = url_util.join(fetch_url_build_cache, 'index.json') @@ -924,15 +924,15 @@ def get_keys(install=False, trust=False, force=False): mirror_dir = url_util.local_file_path(fetch_url_build_cache) if mirror_dir: - tty.msg("Finding public keys in %s" % mirror_dir) + tty.debug('Finding public keys in {0}'.format(mirror_dir)) files = os.listdir(str(mirror_dir)) for file in files: if re.search(r'\.key', file) or re.search(r'\.pub', file): link = url_util.join(fetch_url_build_cache, file) keys.add(link) else: - tty.msg("Finding public keys at %s" % - url_util.format(fetch_url_build_cache)) + tty.debug('Finding public keys at {0}' + .format(url_util.format(fetch_url_build_cache))) # For s3 mirror need to request index.html directly p, links = web_util.spider( url_util.join(fetch_url_build_cache, 'index.html')) @@ -950,14 +950,14 @@ def get_keys(install=False, trust=False, force=False): stage.fetch() except fs.FetchError: continue - tty.msg('Found key %s' % link) + tty.debug('Found key {0}'.format(link)) if install: if trust: Gpg.trust(stage.save_filename) - tty.msg('Added this key to trusted keys.') + tty.debug('Added this key to trusted keys.') else: - tty.msg('Will not add this key to trusted keys.' - 'Use -t to install all downloaded keys') + tty.debug('Will not add this key to trusted keys.' + 'Use -t to install all downloaded keys') def needs_rebuild(spec, mirror_url, rebuild_on_errors=False): @@ -1044,7 +1044,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None, """ rebuilds = {} for mirror in spack.mirror.MirrorCollection(mirrors).values(): - tty.msg('Checking for built specs at %s' % mirror.fetch_url) + tty.debug('Checking for built specs at {0}'.format(mirror.fetch_url)) rebuild_list = [] diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index d3e825f1dd99d35af993050078483f7a9d90c9a8..e3c45cc27b7881acdfc2709ef241a7c6942e536e 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -351,6 +351,9 @@ def env_status(args): % (ev.manifest_name, env.path)) else: tty.msg('In environment %s' % env.name) + + # Check if environment views can be safely activated + env.check_views() else: tty.msg('No active environment') diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 0f0fb94c1ee44c5b38a14a54d2497b4765f13f41..45f57a8417dab00bb6beb63534aa2b4801d81116 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -681,8 +681,9 @@ def _default_make_compilers(cmp_id, paths): sort_fn = lambda variation: ( 'cc' not in by_compiler_id[variation], # None last 'cxx' not in by_compiler_id[variation], # None last - variation.prefix, - variation.suffix, + + getattr(variation, 'prefix', None), + getattr(variation, 'suffix', None), ) compilers = [] diff --git a/lib/spack/spack/container/images.json b/lib/spack/spack/container/images.json index e4761d4b40c499087cdae2605ef9edf75c0e4c7b..24271e072174a5ade107265cbef0f9baf17d0966 100644 --- a/lib/spack/spack/container/images.json +++ b/lib/spack/spack/container/images.json @@ -13,7 +13,9 @@ "0.14.2": "0.14.2", "0.15": "0.15", "0.15.0": "0.15.0", - "0.15.1": "0.15.1" + "0.15.1": "0.15.1", + "0.15.2": "0.15.2", + "0.15.3": "0.15.3" } }, "ubuntu:16.04": { @@ -30,7 +32,9 @@ "0.14.2": "0.14.2", "0.15": "0.15", "0.15.0": "0.15.0", - "0.15.1": "0.15.1" + "0.15.1": "0.15.1", + "0.15.2": "0.15.2", + "0.15.3": "0.15.3" } }, "centos:7": { @@ -47,7 +51,9 @@ "0.14.2": "0.14.2", "0.15": "0.15", "0.15.0": "0.15.0", - "0.15.1": "0.15.1" + "0.15.1": "0.15.1", + "0.15.2": "0.15.2", + "0.15.3": "0.15.3" } }, "centos:6": { @@ -64,7 +70,9 @@ "0.14.2": "0.14.2", "0.15": "0.15", "0.15.0": "0.15.0", - "0.15.1": "0.15.1" + "0.15.1": "0.15.1", + "0.15.2": "0.15.2", + "0.15.3": "0.15.3" } } } diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index f7b50c30c9cdb04b416ddb672780357b801903bf..99aa3963d53ec43b2a27a85e7d4ec6c774c02679 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -175,9 +175,20 @@ def activate( # MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive) # become PATH variables. # - if add_view and default_view_name in env.views: - with spack.store.db.read_transaction(): - cmds += env.add_default_view_to_shell(shell) + try: + if add_view and default_view_name in env.views: + with spack.store.db.read_transaction(): + cmds += env.add_default_view_to_shell(shell) + except (spack.repo.UnknownPackageError, + spack.repo.UnknownNamespaceError) as e: + tty.error(e) + tty.die( + 'Environment view is broken due to a missing package or repo.\n', + ' To activate without views enabled, activate with:\n', + ' spack env activate -V {0}\n'.format(env.name), + ' To remove it and resolve the issue, ' + 'force concretize with the command:\n', + ' spack -e {0} concretize --force'.format(env.name)) return cmds @@ -230,9 +241,15 @@ def deactivate(shell='sh'): cmds += ' unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n' cmds += 'fi;\n' - if default_view_name in _active_environment.views: - with spack.store.db.read_transaction(): - cmds += _active_environment.rm_default_view_from_shell(shell) + try: + if default_view_name in _active_environment.views: + with spack.store.db.read_transaction(): + cmds += _active_environment.rm_default_view_from_shell(shell) + except (spack.repo.UnknownPackageError, + spack.repo.UnknownNamespaceError) as e: + tty.warn(e) + tty.warn('Could not fully deactivate view due to missing package ' + 'or repo, shell environment may be corrupt.') tty.debug("Deactivated environmennt '%s'" % _active_environment.name) _active_environment = None @@ -446,8 +463,9 @@ def _eval_conditional(string): class ViewDescriptor(object): - def __init__(self, root, projections={}, select=[], exclude=[], + def __init__(self, base_path, root, projections={}, select=[], exclude=[], link=default_view_link): + self.base = base_path self.root = root self.projections = projections self.select = select @@ -477,15 +495,19 @@ def to_dict(self): return ret @staticmethod - def from_dict(d): - return ViewDescriptor(d['root'], + def from_dict(base_path, d): + return ViewDescriptor(base_path, + d['root'], d.get('projections', {}), d.get('select', []), d.get('exclude', []), d.get('link', default_view_link)) def view(self): - return YamlFilesystemView(self.root, spack.store.layout, + root = self.root + if not os.path.isabs(root): + root = os.path.normpath(os.path.join(self.base, self.root)) + return YamlFilesystemView(root, spack.store.layout, ignore_conflicts=True, projections=self.projections) @@ -527,20 +549,29 @@ def regenerate(self, all_specs, roots): installed_specs_for_view = set( s for s in specs_for_view if s in self and s.package.installed) - view = self.view() + # To ensure there are no conflicts with packages being installed + # that cannot be resolved or have repos that have been removed + # we always regenerate the view from scratch. We must first make + # sure the root directory exists for the very first time though. + root = self.root + if not os.path.isabs(root): + root = os.path.normpath(os.path.join(self.base, self.root)) + fs.mkdirp(root) + with fs.replace_directory_transaction(root): + view = self.view() - view.clean() - specs_in_view = set(view.get_all_specs()) - tty.msg("Updating view at {0}".format(self.root)) + view.clean() + specs_in_view = set(view.get_all_specs()) + tty.msg("Updating view at {0}".format(self.root)) - rm_specs = specs_in_view - installed_specs_for_view - add_specs = installed_specs_for_view - specs_in_view + rm_specs = specs_in_view - installed_specs_for_view + add_specs = installed_specs_for_view - specs_in_view - # pass all_specs in, as it's expensive to read all the - # spec.yaml files twice. - view.remove_specs(*rm_specs, with_dependents=False, - all_specs=specs_in_view) - view.add_specs(*add_specs, with_dependencies=False) + # pass all_specs in, as it's expensive to read all the + # spec.yaml files twice. + view.remove_specs(*rm_specs, with_dependents=False, + all_specs=specs_in_view) + view.add_specs(*add_specs, with_dependencies=False) class Environment(object): @@ -586,9 +617,11 @@ def __init__(self, path, init_file=None, with_view=None): self.views = {} elif with_view is True: self.views = { - default_view_name: ViewDescriptor(self.view_path_default)} + default_view_name: ViewDescriptor(self.path, + self.view_path_default)} elif isinstance(with_view, six.string_types): - self.views = {default_view_name: ViewDescriptor(with_view)} + self.views = {default_view_name: ViewDescriptor(self.path, + with_view)} # If with_view is None, then defer to the view settings determined by # the manifest file @@ -659,11 +692,14 @@ def _read_manifest(self, f, raw_yaml=None): # enable_view can be boolean, string, or None if enable_view is True or enable_view is None: self.views = { - default_view_name: ViewDescriptor(self.view_path_default)} + default_view_name: ViewDescriptor(self.path, + self.view_path_default)} elif isinstance(enable_view, six.string_types): - self.views = {default_view_name: ViewDescriptor(enable_view)} + self.views = {default_view_name: ViewDescriptor(self.path, + enable_view)} elif enable_view: - self.views = dict((name, ViewDescriptor.from_dict(values)) + path = self.path + self.views = dict((name, ViewDescriptor.from_dict(path, values)) for name, values in enable_view.items()) else: self.views = {} @@ -1097,7 +1133,7 @@ def update_default_view(self, viewpath): if name in self.views: self.default_view.root = viewpath else: - self.views[name] = ViewDescriptor(viewpath) + self.views[name] = ViewDescriptor(self.path, viewpath) else: self.views.pop(name, None) @@ -1111,6 +1147,24 @@ def regenerate_views(self): for view in self.views.values(): view.regenerate(specs, self.roots()) + def check_views(self): + """Checks if the environments default view can be activated.""" + try: + # This is effectively a no-op, but it touches all packages in the + # default view if they are installed. + for view_name, view in self.views.items(): + for _, spec in self.concretized_specs(): + if spec in view and spec.package.installed: + tty.debug( + 'Spec %s in view %s' % (spec.name, view_name)) + except (spack.repo.UnknownPackageError, + spack.repo.UnknownNamespaceError) as e: + tty.warn(e) + tty.warn( + 'Environment %s includes out of date packages or repos. ' + 'Loading the environment view will require reconcretization.' + % self.name) + def _env_modifications_for_default_view(self, reverse=False): all_mods = spack.util.environment.EnvironmentModifications() @@ -1490,9 +1544,10 @@ def write(self, regenerate_views=True): default_name = default_view_name if self.views and len(self.views) == 1 and default_name in self.views: path = self.default_view.root - if self.default_view == ViewDescriptor(self.view_path_default): + if self.default_view == ViewDescriptor(self.path, + self.view_path_default): view = True - elif self.default_view == ViewDescriptor(path): + elif self.default_view == ViewDescriptor(self.path, path): view = path else: view = dict((name, view.to_dict()) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 5f0cc4db5d7ca95bf3292f4e4260e988335b15ef..fb87373897fc677fc2e9b97d054a80af9009bdad 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -289,10 +289,11 @@ def candidate_urls(self): @_needs_stage def fetch(self): if self.archive_file: - tty.msg("Already downloaded %s" % self.archive_file) + tty.debug('Already downloaded {0}'.format(self.archive_file)) return url = None + errors = [] for url in self.candidate_urls: try: partial_file, save_file = self._fetch_from_url(url) @@ -300,8 +301,10 @@ def fetch(self): os.rename(partial_file, save_file) break except FetchError as e: - tty.msg(str(e)) - pass + errors.append(str(e)) + + for msg in errors: + tty.debug(msg) if not self.archive_file: raise FailedDownloadError(url) @@ -312,7 +315,7 @@ def _fetch_from_url(self, url): if self.stage.save_filename: save_file = self.stage.save_filename partial_file = self.stage.save_filename + '.part' - tty.msg("Fetching %s" % url) + tty.debug('Fetching {0}'.format(url)) if partial_file: save_args = ['-C', '-', # continue partial downloads @@ -327,6 +330,8 @@ def _fetch_from_url(self, url): '-', # print out HTML headers '-L', # resolve 3xx redirects url, + '--stderr', # redirect stderr output + '-', # redirect to stdout ] if not spack.config.get('config:verify_ssl'): @@ -412,8 +417,8 @@ def cachable(self): @_needs_stage def expand(self): if not self.expand_archive: - tty.msg("Staging unexpanded archive %s in %s" % ( - self.archive_file, self.stage.source_path)) + tty.debug('Staging unexpanded archive {0} in {1}' + .format(self.archive_file, self.stage.source_path)) if not self.stage.expanded: mkdirp(self.stage.source_path) dest = os.path.join(self.stage.source_path, @@ -421,7 +426,7 @@ def expand(self): shutil.move(self.archive_file, dest) return - tty.msg("Staging archive: %s" % self.archive_file) + tty.debug('Staging archive: {0}'.format(self.archive_file)) if not self.archive_file: raise NoArchiveFileError( @@ -564,7 +569,7 @@ def fetch(self): raise # Notify the user how we fetched. - tty.msg('Using cached archive: %s' % path) + tty.debug('Using cached archive: {0}'.format(path)) class VCSFetchStrategy(FetchStrategy): @@ -594,7 +599,8 @@ def __init__(self, **kwargs): @_needs_stage def check(self): - tty.msg("No checksum needed when fetching with %s" % self.url_attr) + tty.debug('No checksum needed when fetching with {0}' + .format(self.url_attr)) @_needs_stage def expand(self): @@ -672,7 +678,7 @@ def go(self): @_needs_stage def fetch(self): - tty.msg("Getting go resource:", self.url) + tty.debug('Getting go resource: {0}'.format(self.url)) with working_dir(self.stage.path): try: @@ -788,10 +794,10 @@ def _repo_info(self): @_needs_stage def fetch(self): if self.stage.expanded: - tty.msg("Already fetched {0}".format(self.stage.source_path)) + tty.debug('Already fetched {0}'.format(self.stage.source_path)) return - tty.msg("Cloning git repository: {0}".format(self._repo_info())) + tty.debug('Cloning git repository: {0}'.format(self._repo_info())) git = self.git if self.commit: @@ -959,10 +965,10 @@ def mirror_id(self): @_needs_stage def fetch(self): if self.stage.expanded: - tty.msg("Already fetched %s" % self.stage.source_path) + tty.debug('Already fetched {0}'.format(self.stage.source_path)) return - tty.msg("Checking out subversion repository: %s" % self.url) + tty.debug('Checking out subversion repository: {0}'.format(self.url)) args = ['checkout', '--force', '--quiet'] if self.revision: @@ -1068,13 +1074,14 @@ def mirror_id(self): @_needs_stage def fetch(self): if self.stage.expanded: - tty.msg("Already fetched %s" % self.stage.source_path) + tty.debug('Already fetched {0}'.format(self.stage.source_path)) return args = [] if self.revision: args.append('at revision %s' % self.revision) - tty.msg("Cloning mercurial repository:", self.url, *args) + tty.debug('Cloning mercurial repository: {0} {1}' + .format(self.url, args)) args = ['clone'] @@ -1130,7 +1137,7 @@ def __init__(self, *args, **kwargs): @_needs_stage def fetch(self): if self.archive_file: - tty.msg("Already downloaded %s" % self.archive_file) + tty.debug('Already downloaded {0}'.format(self.archive_file)) return parsed_url = url_util.parse(self.url) @@ -1138,7 +1145,7 @@ def fetch(self): raise FetchError( 'S3FetchStrategy can only fetch from s3:// urls.') - tty.msg("Fetching %s" % self.url) + tty.debug('Fetching {0}'.format(self.url)) basename = os.path.basename(parsed_url.path) diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index b9ae0f46fa907cf031d953774150259e24af7736..2d4b488ac39cdd37bc643667660e4cf07d6ef8c8 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -215,18 +215,18 @@ def _hms(seconds): def _install_from_cache(pkg, cache_only, explicit, unsigned=False): """ - Install the package from binary cache + Extract the package from binary cache Args: pkg (PackageBase): the package to install from the binary cache - cache_only (bool): only install from binary cache + cache_only (bool): only extract from binary cache explicit (bool): ``True`` if installing the package was explicitly requested by the user, otherwise, ``False`` unsigned (bool): ``True`` if binary package signatures to be checked, otherwise, ``False`` Return: - (bool) ``True`` if the package was installed from binary cache, + (bool) ``True`` if the package was extract from binary cache, ``False`` otherwise """ installed_from_cache = _try_install_from_binary_cache(pkg, explicit, @@ -237,10 +237,10 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False): if cache_only: tty.die('{0} when cache-only specified'.format(pre)) - tty.debug('{0}: installing from source'.format(pre)) + tty.msg('{0}: installing from source'.format(pre)) return False - tty.debug('Successfully installed {0} from binary cache'.format(pkg_id)) + tty.debug('Successfully extracted {0} from binary cache'.format(pkg_id)) _print_installed_pkg(pkg.spec.prefix) spack.hooks.post_install(pkg.spec) return True @@ -275,17 +275,17 @@ def _process_external_package(pkg, explicit): if spec.external_module: tty.msg('{0} has external module in {1}' .format(pre, spec.external_module)) - tty.msg('{0} is actually installed in {1}' - .format(pre, spec.external_path)) + tty.debug('{0} is actually installed in {1}' + .format(pre, spec.external_path)) else: - tty.msg("{0} externally installed in {1}" + tty.msg('{0} externally installed in {1}' .format(pre, spec.external_path)) try: # Check if the package was already registered in the DB. # If this is the case, then just exit. rec = spack.store.db.get_record(spec) - tty.msg('{0} already registered in DB'.format(pre)) + tty.debug('{0} already registered in DB'.format(pre)) # Update the value of rec.explicit if it is necessary _update_explicit_entry_in_db(pkg, rec, explicit) @@ -294,11 +294,11 @@ def _process_external_package(pkg, explicit): # If not, register it and generate the module file. # For external packages we just need to run # post-install hooks to generate module files. - tty.msg('{0} generating module file'.format(pre)) + tty.debug('{0} generating module file'.format(pre)) spack.hooks.post_install(spec) # Add to the DB - tty.msg('{0} registering into DB'.format(pre)) + tty.debug('{0} registering into DB'.format(pre)) spack.store.db.add(spec, None, explicit=explicit) @@ -314,7 +314,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned): otherwise, ``False`` Return: - (bool) ``True`` if the package was installed from binary cache, + (bool) ``True`` if the package was extracted from binary cache, else ``False`` """ tarball = binary_distribution.download_tarball(binary_spec) @@ -325,7 +325,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned): return False pkg_id = package_id(pkg) - tty.msg('Installing {0} from binary cache'.format(pkg_id)) + tty.msg('Extracting {0} from binary cache'.format(pkg_id)) binary_distribution.extract_tarball(binary_spec, tarball, allow_root=False, unsigned=unsigned, force=False) pkg.installed_from_binary_cache = True @@ -335,10 +335,10 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned): def _try_install_from_binary_cache(pkg, explicit, unsigned=False): """ - Try to install the package from binary cache. + Try to extract the package from binary cache. Args: - pkg (PackageBase): the package to be installed from binary cache + pkg (PackageBase): the package to be extracted from binary cache explicit (bool): the package was explicitly requested by the user unsigned (bool): ``True`` if binary package signatures to be checked, otherwise, ``False`` @@ -369,7 +369,7 @@ def _update_explicit_entry_in_db(pkg, rec, explicit): with spack.store.db.write_transaction(): rec = spack.store.db.get_record(pkg.spec) message = '{s.name}@{s.version} : marking the package explicit' - tty.msg(message.format(s=pkg.spec)) + tty.debug(message.format(s=pkg.spec)) rec.explicit = True @@ -452,7 +452,8 @@ def install_msg(name, pid): Return: (str) Colorized installing message """ - return '{0}: '.format(pid) + colorize('@*{Installing} @*g{%s}' % name) + pre = '{0}: '.format(pid) if tty.show_pid() else '' + return pre + colorize('@*{Installing} @*g{%s}' % name) def log(pkg): @@ -1064,7 +1065,8 @@ def _install_task(self, task, **kwargs): pkg.run_tests = (tests is True or tests and pkg.name in tests) - pre = '{0}: {1}:'.format(self.pid, pkg.name) + pid = '{0}: '.format(self.pid) if tty.show_pid() else '' + pre = '{0}{1}:'.format(pid, pkg.name) def build_process(): """ @@ -1083,8 +1085,8 @@ def build_process(): pkg.do_stage() pkg_id = package_id(pkg) - tty.msg('{0} Building {1} [{2}]' - .format(pre, pkg_id, pkg.build_system_class)) + tty.debug('{0} Building {1} [{2}]' + .format(pre, pkg_id, pkg.build_system_class)) # get verbosity from do_install() parameter or saved value echo = verbose @@ -1105,8 +1107,8 @@ def build_process(): if install_source and os.path.isdir(source_path): src_target = os.path.join(pkg.spec.prefix, 'share', pkg.name, 'src') - tty.msg('{0} Copying source to {1}' - .format(pre, src_target)) + tty.debug('{0} Copying source to {1}' + .format(pre, src_target)) fs.install_tree(pkg.stage.source_path, src_target) # Do the real install in the source directory. @@ -1128,7 +1130,7 @@ def build_process(): pass # cache debug settings - debug_enabled = tty.is_debug() + debug_level = tty.debug_level() # Spawn a daemon that reads from a pipe and redirects # everything to log_path @@ -1137,11 +1139,11 @@ def build_process(): pkg.phases, pkg._InstallPhase_phases): with logger.force_echo(): - inner_debug = tty.is_debug() - tty.set_debug(debug_enabled) + inner_debug_level = tty.debug_level() + tty.set_debug(debug_level) tty.msg("{0} Executing phase: '{1}'" .format(pre, phase_name)) - tty.set_debug(inner_debug) + tty.set_debug(inner_debug_level) # Redirect stdout and stderr to daemon pipe phase = getattr(pkg, phase_attr) @@ -1157,11 +1159,11 @@ def build_process(): pkg._total_time = time.time() - start_time build_time = pkg._total_time - pkg._fetch_time - tty.msg('{0} Successfully installed {1}' - .format(pre, pkg_id), - 'Fetch: {0}. Build: {1}. Total: {2}.' - .format(_hms(pkg._fetch_time), _hms(build_time), - _hms(pkg._total_time))) + tty.debug('{0} Successfully installed {1}' + .format(pre, pkg_id), + 'Fetch: {0}. Build: {1}. Total: {2}.' + .format(_hms(pkg._fetch_time), _hms(build_time), + _hms(pkg._total_time))) _print_installed_pkg(pkg.prefix) # preserve verbosity across runs @@ -1192,7 +1194,8 @@ def build_process(): except spack.build_environment.StopPhase as e: # A StopPhase exception means that do_install was asked to # stop early from clients, and is not an error at this point - tty.debug('{0} {1}'.format(self.pid, str(e))) + pre = '{0}'.format(self.pid) if tty.show_pid() else '' + tty.debug('{0}{1}'.format(pid, str(e))) tty.debug('Package stage directory : {0}' .format(pkg.stage.source_path)) @@ -1565,9 +1568,14 @@ def install(self, **kwargs): except (Exception, SystemExit) as exc: # Best effort installs suppress the exception and mark the # package as a failure UNLESS this is the explicit package. - err = 'Failed to install {0} due to {1}: {2}' - tty.error(err.format(pkg.name, exc.__class__.__name__, - str(exc))) + if (not isinstance(exc, spack.error.SpackError) or + not exc.printed): + # SpackErrors can be printed by the build process or at + # lower levels -- skip printing if already printed. + # TODO: sort out this and SpackEror.print_context() + err = 'Failed to install {0} due to {1}: {2}' + tty.error( + err.format(pkg.name, exc.__class__.__name__, str(exc))) self._update_failed(task, True, exc) diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 86783bfb2b7d4db53a1c1b7d7b407044951ddedc..96b79e7fa12035cf99a7231467b8f1112275a76b 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -362,8 +362,9 @@ def make_argument_parser(**kwargs): '-C', '--config-scope', dest='config_scopes', action='append', metavar='DIR', help="add a custom configuration scope") parser.add_argument( - '-d', '--debug', action='store_true', - help="write out debug logs during compile") + '-d', '--debug', action='count', default=0, + help="write out debug messages " + "(more d's for more verbosity: -d, -dd, -ddd, etc.)") parser.add_argument( '--timestamp', action='store_true', help="Add a timestamp to tty output") @@ -438,7 +439,7 @@ def setup_main_options(args): tty.set_debug(args.debug) tty.set_stacktrace(args.stacktrace) - # debug must be set first so that it can even affect behvaior of + # debug must be set first so that it can even affect behavior of # errors raised by spack.config. if args.debug: spack.error.debug = True @@ -710,7 +711,7 @@ def main(argv=None): if not args.no_env: env = ev.find_environment(args) if env: - ev.activate(env, args.use_env_repo) + ev.activate(env, args.use_env_repo, add_view=False) if args.print_shell_vars: print_setup_info(*args.print_shell_vars.split(',')) diff --git a/lib/spack/spack/operating_systems/cray_backend.py b/lib/spack/spack/operating_systems/cray_backend.py index eaf8360c2c4a94f3ebb3c0f11b6e595d2fa6f15f..5f113eba0b7a466f1c6e3a34b4c4b4d7d2afb810 100644 --- a/lib/spack/spack/operating_systems/cray_backend.py +++ b/lib/spack/spack/operating_systems/cray_backend.py @@ -97,6 +97,9 @@ def __str__(self): def _detect_crayos_version(cls): if os.path.isfile(_cle_release_file): release_attrs = read_cle_release_file() + if 'RELEASE' not in release_attrs: + # This Cray system uses a base OS not CLE/CNL + return None v = spack.version.Version(release_attrs['RELEASE']) return v[0] elif os.path.isfile(_clerelease_file): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index cedcfffed28f7bf8e35fcca64fee39f3d935f9be..c5cae4f9b0aed6595319b3d00920826ec860f1cd 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1122,9 +1122,8 @@ def do_fetch(self, mirror_only=False): raise ValueError("Can only fetch concrete packages.") if not self.has_code: - tty.msg( - "No fetch required for %s: package has no code." % self.name - ) + tty.debug('No fetch required for {0}: package has no code.' + .format(self.name)) start_time = time.time() checksum = spack.config.get('config:checksum') @@ -1140,7 +1139,8 @@ def do_fetch(self, mirror_only=False): ignore_checksum = tty.get_yes_or_no(" Fetch anyway?", default=False) if ignore_checksum: - tty.msg("Fetching with no checksum.", ck_msg) + tty.debug('Fetching with no checksum. {0}' + .format(ck_msg)) if not ignore_checksum: raise FetchError("Will not fetch %s" % @@ -1196,7 +1196,7 @@ def do_patch(self): # If there are no patches, note it. if not patches and not has_patch_fun: - tty.msg("No patches needed for %s" % self.name) + tty.debug('No patches needed for {0}'.format(self.name)) return # Construct paths to special files in the archive dir used to @@ -1209,15 +1209,15 @@ def do_patch(self): # If we encounter an archive that failed to patch, restage it # so that we can apply all the patches again. if os.path.isfile(bad_file): - tty.msg("Patching failed last time. Restaging.") + tty.debug('Patching failed last time. Restaging.') self.stage.restage() # If this file exists, then we already applied all the patches. if os.path.isfile(good_file): - tty.msg("Already patched %s" % self.name) + tty.debug('Already patched {0}'.format(self.name)) return elif os.path.isfile(no_patches_file): - tty.msg("No patches needed for %s" % self.name) + tty.debug('No patches needed for {0}'.format(self.name)) return # Apply all the patches for specs that match this one @@ -1226,7 +1226,7 @@ def do_patch(self): try: with working_dir(self.stage.source_path): patch.apply(self.stage) - tty.msg('Applied patch %s' % patch.path_or_url) + tty.debug('Applied patch {0}'.format(patch.path_or_url)) patched = True except spack.error.SpackError as e: tty.debug(e) @@ -1240,7 +1240,7 @@ def do_patch(self): try: with working_dir(self.stage.source_path): self.patch() - tty.msg("Ran patch() for %s" % self.name) + tty.debug('Ran patch() for {0}'.format(self.name)) patched = True except spack.multimethod.NoSuchMethodError: # We are running a multimethod without a default case. @@ -1250,12 +1250,12 @@ def do_patch(self): # directive, AND the patch function didn't apply, say # no patches are needed. Otherwise, we already # printed a message for each patch. - tty.msg("No patches needed for %s" % self.name) + tty.debug('No patches needed for {0}'.format(self.name)) except spack.error.SpackError as e: tty.debug(e) # Touch bad file if anything goes wrong. - tty.msg("patch() function failed for %s" % self.name) + tty.msg('patch() function failed for {0}'.format(self.name)) touch(bad_file) raise @@ -1342,7 +1342,7 @@ def _has_make_target(self, target): if os.path.exists(makefile): break else: - tty.msg('No Makefile found in the build directory') + tty.debug('No Makefile found in the build directory') return False # Check if 'target' is a valid target. @@ -1373,7 +1373,8 @@ def _has_make_target(self, target): for missing_target_msg in missing_target_msgs: if missing_target_msg.format(target) in stderr: - tty.msg("Target '" + target + "' not found in " + makefile) + tty.debug("Target '{0}' not found in {1}" + .format(target, makefile)) return False return True @@ -1401,7 +1402,7 @@ def _has_ninja_target(self, target): # Check if we have a Ninja build script if not os.path.exists('build.ninja'): - tty.msg('No Ninja build script found in the build directory') + tty.debug('No Ninja build script found in the build directory') return False # Get a list of all targets in the Ninja build script @@ -1413,7 +1414,8 @@ def _has_ninja_target(self, target): if line.startswith(target + ':')] if not matches: - tty.msg("Target '" + target + "' not found in build.ninja") + tty.debug("Target '{0}' not found in build.ninja" + .format(target)) return False return True @@ -1720,11 +1722,12 @@ def uninstall_by_spec(spec, force=False, deprecator=None): if specs: if deprecator: spack.store.db.deprecate(specs[0], deprecator) - tty.msg("Deprecating stale DB entry for " - "%s" % spec.short_spec) + tty.debug('Deprecating stale DB entry for {0}' + .format(spec.short_spec)) else: spack.store.db.remove(specs[0]) - tty.msg("Removed stale DB entry for %s" % spec.short_spec) + tty.debug('Removed stale DB entry for {0}' + .format(spec.short_spec)) return else: raise InstallError(str(spec) + " is not installed.") @@ -1797,7 +1800,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None): error_msg += "\n\nThe error:\n\n{0}".format(tb_msg) tty.warn(error_msg) - tty.msg("Successfully uninstalled %s" % spec.short_spec) + tty.msg('Successfully uninstalled {0}'.format(spec.short_spec)) def do_uninstall(self, force=False): """Uninstall this package by spec.""" diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index 9c8770c3680654b353dc6a330278c7f4bae5eb38..c6d367e9a68f49fadbb0a1ac55a8f39b28e7c82e 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -20,7 +20,7 @@ _craype_name_to_target_name = { 'x86-cascadelake': 'cascadelake', 'x86-naples': 'zen', - 'x86-rome': 'zen', # Cheating because we have the wrong modules on rzcrayz + 'x86-rome': 'zen2', 'x86-skylake': 'skylake_avx512', 'mic-knl': 'mic_knl', 'interlagos': 'bulldozer', diff --git a/lib/spack/spack/schema/container.py b/lib/spack/spack/schema/container.py index 36cb74a875134a3943e7f764b3f2892a563b6a8a..c6e54732e66a4a5c5b0c004ba5f92bec83b1e4ea 100644 --- a/lib/spack/spack/schema/container.py +++ b/lib/spack/spack/schema/container.py @@ -32,7 +32,8 @@ 'enum': [ 'develop', '0.14', '0.14.0', '0.14.1', '0.14.2', - '0.15', '0.15.0', '0.15.1', + '0.15', '0.15.0', '0.15.1', '0.15.2', + '0.15.3', ] } }, diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 5d3b0db502161efc6c3cf98ae42daf4d11ad46be..553da4fec29e290632cefef861209934788d39f8 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -415,10 +415,11 @@ def fetch(self, mirror_only=False): # Join URLs of mirror roots with mirror paths. Because # urljoin() will strip everything past the final '/' in # the root, so we add a '/' if it is not present. - urls = [] + mirror_urls = [] for mirror in spack.mirror.MirrorCollection().values(): for rel_path in self.mirror_paths: - urls.append(url_util.join(mirror.fetch_url, rel_path)) + mirror_urls.append( + url_util.join(mirror.fetch_url, rel_path)) # If this archive is normally fetched from a tarball URL, # then use the same digest. `spack mirror` ensures that @@ -436,7 +437,8 @@ def fetch(self, mirror_only=False): self.skip_checksum_for_mirror = not bool(digest) # Add URL strategies for all the mirrors with the digest - for url in urls: + # Insert fetchers in the order that the URLs are provided. + for url in reversed(mirror_urls): fetchers.insert( 0, fs.from_url_scheme( url, digest, expand=expand, extension=extension)) @@ -458,6 +460,11 @@ def generate_fetchers(): for fetcher in dynamic_fetchers: yield fetcher + def print_errors(errors): + for msg in errors: + tty.debug(msg) + + errors = [] for fetcher in generate_fetchers(): try: fetcher.stage = self @@ -468,14 +475,18 @@ def generate_fetchers(): # Don't bother reporting when something is not cached. continue except spack.error.SpackError as e: - tty.msg("Fetching from %s failed." % fetcher) + errors.append('Fetching from {0} failed.'.format(fetcher)) tty.debug(e) continue else: - err_msg = "All fetchers failed for %s" % self.name + print_errors(errors) + + err_msg = 'All fetchers failed for {0}'.format(self.name) self.fetcher = self.default_fetcher raise fs.FetchError(err_msg, None) + print_errors(errors) + def check(self): """Check the downloaded archive against a checksum digest. No-op if this stage checks code out of a repository.""" @@ -536,9 +547,9 @@ def expand_archive(self): downloaded.""" if not self.expanded: self.fetcher.expand() - tty.msg("Created stage in %s" % self.path) + tty.debug('Created stage in {0}'.format(self.path)) else: - tty.msg("Already staged %s in %s" % (self.name, self.path)) + tty.debug('Already staged {0} in {1}'.format(self.name, self.path)) def restage(self): """Removes the expanded archive path if it exists, then re-expands @@ -709,13 +720,13 @@ def __exit__(self, exc_type, exc_val, exc_tb): pass def fetch(self, *args, **kwargs): - tty.msg("No need to fetch for DIY.") + tty.debug('No need to fetch for DIY.') def check(self): - tty.msg("No checksum needed for DIY.") + tty.debug('No checksum needed for DIY.') def expand_archive(self): - tty.msg("Using source directory: %s" % self.source_path) + tty.debug('Using source directory: {0}'.format(self.source_path)) @property def expanded(self): @@ -733,7 +744,7 @@ def destroy(self): pass def cache_local(self): - tty.msg("Sources for DIY stages are not cached") + tty.debug('Sources for DIY stages are not cached') def ensure_access(file): @@ -783,12 +794,12 @@ def get_checksums_for_versions( max_len = max(len(str(v)) for v in sorted_versions) num_ver = len(sorted_versions) - tty.msg("Found {0} version{1} of {2}:".format( - num_ver, '' if num_ver == 1 else 's', name), - "", - *spack.cmd.elide_list( - ["{0:{1}} {2}".format(str(v), max_len, url_dict[v]) - for v in sorted_versions])) + tty.debug('Found {0} version{1} of {2}:'.format( + num_ver, '' if num_ver == 1 else 's', name), + '', + *spack.cmd.elide_list( + ['{0:{1}} {2}'.format(str(v), max_len, url_dict[v]) + for v in sorted_versions])) print() if batch: @@ -803,9 +814,10 @@ def get_checksums_for_versions( versions = sorted_versions[:archives_to_fetch] urls = [url_dict[v] for v in versions] - tty.msg("Downloading...") + tty.debug('Downloading...') version_hashes = [] i = 0 + errors = [] for url, version in zip(urls, versions): try: if fetch_options: @@ -826,10 +838,12 @@ def get_checksums_for_versions( hashlib.sha256, stage.archive_file))) i += 1 except FailedDownloadError: - tty.msg("Failed to fetch {0}".format(url)) + errors.append('Failed to fetch {0}'.format(url)) except Exception as e: - tty.msg("Something failed on {0}, skipping.".format(url), - " ({0})".format(e)) + tty.msg('Something failed on {0}, skipping. ({1})'.format(url, e)) + + for msg in errors: + tty.debug(msg) if not version_hashes: tty.die("Could not fetch any versions for {0}".format(name)) @@ -844,8 +858,8 @@ def get_checksums_for_versions( ]) num_hash = len(version_hashes) - tty.msg("Checksummed {0} version{1} of {2}:".format( - num_hash, '' if num_hash == 1 else 's', name)) + tty.debug('Checksummed {0} version{1} of {2}:'.format( + num_hash, '' if num_hash == 1 else 's', name)) return version_lines diff --git a/lib/spack/spack/test/cache_fetch.py b/lib/spack/spack/test/cache_fetch.py new file mode 100644 index 0000000000000000000000000000000000000000..3b4c3cb88773071df1d6a790ebccf0cff527e0d7 --- /dev/null +++ b/lib/spack/spack/test/cache_fetch.py @@ -0,0 +1,36 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os +import pytest + +from llnl.util.filesystem import mkdirp, touch + +from spack.stage import Stage +from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError + + +def test_fetch_missing_cache(tmpdir): + """Ensure raise a missing cache file.""" + testpath = str(tmpdir) + + fetcher = CacheURLFetchStrategy(url='file:///not-a-real-cache-file') + with Stage(fetcher, path=testpath): + with pytest.raises(NoCacheError, match=r'No cache'): + fetcher.fetch() + + +def test_fetch(tmpdir): + """Ensure a fetch after expanding is effectively a no-op.""" + testpath = str(tmpdir) + cache = os.path.join(testpath, 'cache.tar.gz') + touch(cache) + url = 'file:///{0}'.format(cache) + + fetcher = CacheURLFetchStrategy(url=url) + with Stage(fetcher, path=testpath) as stage: + source_path = stage.source_path + mkdirp(source_path) + fetcher.fetch() diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index b1cc9ce8b5b2fd12c5bdbdc1de2fae5927090294..87f7a58667f2f65bede658ef04069c04122d950c 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -16,7 +16,7 @@ from spack.cmd.env import _env_create from spack.spec import Spec -from spack.main import SpackCommand +from spack.main import SpackCommand, SpackCommandError from spack.stage import stage_prefix from spack.util.mock_package import MockPackageMultiRepo @@ -284,6 +284,45 @@ def test_environment_status(capsys, tmpdir): assert 'in current directory' in env('status') +def test_env_status_broken_view( + mutable_mock_env_path, mock_archive, mock_fetch, mock_packages, + install_mockery +): + with ev.create('test'): + install('trivial-install-test-package') + + # switch to a new repo that doesn't include the installed package + # test that Spack detects the missing package and warns the user + new_repo = MockPackageMultiRepo() + with spack.repo.swap(new_repo): + output = env('status') + assert 'In environment test' in output + assert 'Environment test includes out of date' in output + + # Test that the warning goes away when it's fixed + output = env('status') + assert 'In environment test' in output + assert 'Environment test includes out of date' not in output + + +def test_env_activate_broken_view( + mutable_mock_env_path, mock_archive, mock_fetch, mock_packages, + install_mockery +): + with ev.create('test'): + install('trivial-install-test-package') + + # switch to a new repo that doesn't include the installed package + # test that Spack detects the missing package and fails gracefully + new_repo = MockPackageMultiRepo() + with spack.repo.swap(new_repo): + with pytest.raises(SpackCommandError): + env('activate', '--sh', 'test') + + # test replacing repo fixes it + env('activate', '--sh', 'test') + + def test_to_lockfile_dict(): e = ev.create('test') e.add('mpileaks') diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py index fdaf19debebe61b4b7f708ff725f6eb945d35865..ec114538839494f2098718801701e4b35c90c5bb 100644 --- a/lib/spack/spack/test/cmd/install.py +++ b/lib/spack/spack/test/cmd/install.py @@ -173,8 +173,8 @@ def test_package_output(tmpdir, capsys, install_mockery, mock_fetch): # make sure that output from the actual package file appears in the # right place in the build log. - assert re.search(r"BEFORE INSTALL\n==>( \[.+\])? './configure'", out) - assert "'install'\nAFTER INSTALL" in out + assert "BEFORE INSTALL" in out + assert "AFTER INSTALL" in out @pytest.mark.disable_clean_stage_check @@ -220,10 +220,12 @@ def test_show_log_on_error(mock_packages, mock_archive, mock_fetch, assert install.error.pkg.name == 'build-error' assert 'Full build log:' in out - # Message shows up for ProcessError (1), ChildError (1), and output (1) + print(out) + + # Message shows up for ProcessError (1) and output (1) errors = [line for line in out.split('\n') if 'configure: error: cannot run C compiled programs' in line] - assert len(errors) == 3 + assert len(errors) == 2 def test_install_overwrite( diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 2f779c6a5f393184f75b4cb947954b09348bd56e..6d15dff7f62cca059aa7c94054eaeb0e259a7411 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -344,10 +344,9 @@ def test_nosource_pkg_install( # Make sure install works even though there is no associated code. pkg.do_install() - - # Also make sure an error is raised if `do_fetch` is called. - pkg.do_fetch() - assert "No fetch required for nosource" in capfd.readouterr()[0] + out = capfd.readouterr() + assert "Installing dependency-install" in out[0] + assert "Missing a source id for nosource" in out[1] def test_nosource_pkg_install_post_install( diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index cc4b168e6c8094dc9d01033d45eea26a7c14672e..68b70e084087be25d53368672a4b0d02612c8b68 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -99,10 +99,21 @@ def test_hms(sec, result): assert inst._hms(sec) == result -def test_install_msg(): +def test_install_msg(monkeypatch): + """Test results of call to install_msg based on debug level.""" name = 'some-package' pid = 123456 - expected = "{0}: Installing {1}".format(pid, name) + install_msg = 'Installing {0}'.format(name) + + monkeypatch.setattr(tty, '_debug', 0) + assert inst.install_msg(name, pid) == install_msg + + monkeypatch.setattr(tty, '_debug', 1) + assert inst.install_msg(name, pid) == install_msg + + # Expect the PID to be added at debug level 2 + monkeypatch.setattr(tty, '_debug', 2) + expected = "{0}: {1}".format(pid, install_msg) assert inst.install_msg(name, pid) == expected @@ -151,7 +162,6 @@ def test_process_external_package_module(install_mockery, monkeypatch, capfd): out = capfd.readouterr()[0] assert 'has external module in {0}'.format(spec.external_module) in out - assert 'is actually installed in {0}'.format(spec.external_path) in out def test_process_binary_cache_tarball_none(install_mockery, monkeypatch, @@ -180,7 +190,7 @@ def _spec(spec): spec = spack.spec.Spec('a').concretized() assert inst._process_binary_cache_tarball(spec.package, spec, False, False) - assert 'Installing a from binary cache' in capfd.readouterr()[0] + assert 'Extracting a from binary cache' in capfd.readouterr()[0] def test_try_install_from_binary_cache(install_mockery, mock_packages, diff --git a/lib/spack/spack/test/llnl/util/tty/tty.py b/lib/spack/spack/test/llnl/util/tty/tty.py new file mode 100644 index 0000000000000000000000000000000000000000..b8366a97383e48b919b157ff6e90333e824507da --- /dev/null +++ b/lib/spack/spack/test/llnl/util/tty/tty.py @@ -0,0 +1,87 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os + +import pytest +import llnl.util.tty as tty + + +def test_get_timestamp(monkeypatch): + """Ensure the results of get_timestamp are reasonable.""" + + # Debug disabled should return an empty string + monkeypatch.setattr(tty, '_debug', 0) + assert not tty.get_timestamp(False), 'Expected an empty string' + + # Debug disabled but force the timestamp should return a string + assert tty.get_timestamp(True), 'Expected a timestamp/non-empty string' + + pid_str = ' {0}'.format(os.getpid()) + + # Level 1 debugging should return a timestamp WITHOUT the pid + monkeypatch.setattr(tty, '_debug', 1) + out_str = tty.get_timestamp(False) + assert out_str and pid_str not in out_str, 'Expected no PID in results' + + # Level 2 debugging should also return a timestamp WITH the pid + monkeypatch.setattr(tty, '_debug', 2) + out_str = tty.get_timestamp(False) + assert out_str and pid_str in out_str, 'Expected PID in results' + + +@pytest.mark.parametrize('msg,enabled,trace,newline', [ + ('', False, False, False), # Nothing is output + (Exception(''), True, False, True), # Exception output + ('trace', True, True, False), # stacktrace output + ('newline', True, False, True), # newline in output + ('no newline', True, False, False) # no newline output +]) +def test_msg(capfd, monkeypatch, enabled, msg, trace, newline): + """Ensure the output from msg with options is appropriate.""" + + # temporarily use the parameterized settings + monkeypatch.setattr(tty, '_msg_enabled', enabled) + monkeypatch.setattr(tty, '_stacktrace', trace) + + expected = [msg if isinstance(msg, str) else 'Exception: '] + if newline: + expected[0] = '{0}\n'.format(expected[0]) + if trace: + expected.insert(0, '.py') + + tty.msg(msg, newline=newline) + out = capfd.readouterr()[0] + for msg in expected: + assert msg in out + + +@pytest.mark.parametrize('msg,trace,wrap', [ + (Exception(''), False, False), # Exception output + ('trace', True, False), # stacktrace output + ('wrap', False, True), # wrap in output +]) +def test_info(capfd, monkeypatch, msg, trace, wrap): + """Ensure the output from info with options is appropriate.""" + + # temporarily use the parameterized settings + monkeypatch.setattr(tty, '_stacktrace', trace) + + expected = [msg if isinstance(msg, str) else 'Exception: '] + if trace: + expected.insert(0, '.py') + + extra = 'This extra argument *should* make for a sufficiently long line' \ + ' that needs to be wrapped if the option is enabled.' + args = [msg, extra] + + num_newlines = 3 if wrap else 2 + + tty.info(*args, wrap=wrap, countback=3) + out = capfd.readouterr()[0] + for msg in expected: + assert msg in out + + assert out.count('\n') == num_newlines diff --git a/lib/spack/spack/test/s3_fetch.py b/lib/spack/spack/test/s3_fetch.py index 682f1a28429d1d4b497f04a92cfbad424fd6195a..70efad19cefef73f9780cddad19d405cda1b2972 100644 --- a/lib/spack/spack/test/s3_fetch.py +++ b/lib/spack/spack/test/s3_fetch.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os import pytest import spack.fetch_strategy as spack_fs @@ -27,3 +28,19 @@ def test_s3fetchstrategy_bad_url(tmpdir): assert fetcher.archive_file is None with pytest.raises(spack_fs.FetchError): fetcher.fetch() + + +def test_s3fetchstrategy_downloaded(tmpdir): + """Ensure fetch with archive file already downloaded is a noop.""" + testpath = str(tmpdir) + archive = os.path.join(testpath, 's3.tar.gz') + + class Archived_S3FS(spack_fs.S3FetchStrategy): + @property + def archive_file(self): + return archive + + url = 's3:///{0}'.format(archive) + fetcher = Archived_S3FS(url=url) + with spack_stage.Stage(fetcher, path=testpath): + fetcher.fetch() diff --git a/lib/spack/spack/util/mock_package.py b/lib/spack/spack/util/mock_package.py index 3d8ae30b103b1e2f626681f6ba51caf9808b73e4..5e12cc16693bee9136c574780bfc070d03a8f727 100644 --- a/lib/spack/spack/util/mock_package.py +++ b/lib/spack/spack/util/mock_package.py @@ -77,6 +77,8 @@ def __init__(self): def get(self, spec): if not isinstance(spec, spack.spec.Spec): spec = Spec(spec) + if spec.name not in self.spec_to_pkg: + raise spack.repo.UnknownPackageError(spec.fullname) return self.spec_to_pkg[spec.name] def get_pkg_class(self, name): diff --git a/share/spack/keys/tutorial.pub b/share/spack/keys/tutorial.pub new file mode 100644 index 0000000000000000000000000000000000000000..4add41cf364ed20cc42e8aafcd13cb5d08c347f1 --- /dev/null +++ b/share/spack/keys/tutorial.pub @@ -0,0 +1,38 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQENBF1IgqcBCADqSIBM0TT4+6Acv6SUpQ2l1Ql+UVRtJ74VGFOw+8I8aBWcBryB +wNsS/Drxn9M9rX8il2aGtAmwc1dhTh0JvdZO7KqG8Q4vvWOytdLnGSE61LV4147q +S/dJiYH2DCvhMKpOByIsEiuoTrUHzd1EQBnEPSwAQV8oWPrc1++f3iYmRemsOBCT +BldAu7Y5RwjI3qQ6GazoCF5rd1uyiMYrpT4amEKFE91VRe+IG8XfEaSTapOc/hO3 +Sw4fzPelA2qD12I+JMj56vM0fQy3TXD5qngIb+leb2jGI+0bTz8RGS0xSMYVvftA +upzQPaQIfzijVBt3tFSayx/NXKR0p+EuCqGBABEBAAG0MFNwYWNrIEJ1aWxkIFBp +cGVsaW5lIChEZW1vIEtleSkgPGtleUBzcGFjay5kZW1vPokBTgQTAQgAOBYhBDHI +4nh6FErErdiO0pX4aBGV4jnYBQJdSIKnAhsvBQsJCAcCBhUKCQgLAgQWAgMBAh4B +AheAAAoJEJX4aBGV4jnYpf0IAJDYEjpm0h1pNswTvmnEhgNVbojCGRfAts7F5uf8 +IFXGafKQsekMWZh0Ig0YXVn72jsOuNK/+keErMfXM3DFNTq0Ki7mcFedR9r5EfLf +4YW2n6mphsfMgsg8NwKVLFYWyhQQ4OzhdydPxkGVhEebHwfHNQ3aIcqbFmzkhxnX +CIYh2Flf3T306tKX4lXbhsXKG1L/bLtDiFRaMCBp66HGZ8u9Dbyy/W8aDwyx4duD +MG+y2OrhOf+zEu3ZPFyc/jsjmfnUtIfQVyRajh/8vh+i9fkvFlLaOQittNElt3z1 +8+ybGjE9qWY/mvR2ZqnP8SVkGvxSpBVfVXiFFdepvuPAcLu5AQ0EXUiCpwEIAJ2s +npNBAVocDUSdOF/Z/eCRvy3epuYm5f1Ge1ao9K2qWYno2FatnsYxK4qqB5yGRkfj +sEzAGP8JtJvqDSuB5Xk7CIjRNOwoSB3hqvmxWh2h+HsITUhMl11FZ0Cllz+etXcK +APz2ZHSKnA3R8uf4JzIr1cHLS+gDBoj8NgBCZhcyva2b5UC///FLm1+/Lpvekd0U +n7B524hbXhFUG+UMfHO/U1c4TvCMt7RGMoWUtRzfO6XB1VQCwWJBVcVGl8Yy59Zk +3K76VbFWQWOq6fRBE0xHBAga7pOgCc9qrb+FGl1IHUT8aV8CzkxckHlNb3PlntmE +lXZLPcGFWaPtGtuIJVsAEQEAAYkCbAQYAQgAIBYhBDHI4nh6FErErdiO0pX4aBGV +4jnYBQJdSIKnAhsuAUAJEJX4aBGV4jnYwHQgBBkBCAAdFiEEneR3pKqi9Rnivv07 +CYCNVr37XP0FAl1IgqcACgkQCYCNVr37XP13RQf/Ttxidgo9upF8jxrWnT5YhM6D +ozzGWzqE+/KDBX+o4f33o6uzozjESRXQUKdclC9ftDJQ84lFTMs3Z+/12ZDqCV2k +2qf0VfXg4e5xMq4tt6hojXUeYSfeGZXNU9LzjURCcMD+amIKjVztFg4kl3KHW3Pi +/aPTr4xWWgy2tZ1FDEuA5J6AZiKKJSVeoSPOGANouPqm4fNj273XFXQepIhQ5wve +4No0abxfXcLt5Yp3y06rNCBC9QdC++19N5+ajn2z9Qd2ZwztPb0mNuqHAok4vrlE +1c4WBWk93Nfy9fKImalGENpPDz0td2H9pNC9IafOWltGSWSINRrU1GeaNXS/uAOT +CADjcDN+emLbDTTReW4FLoQ0mPJ0tACgszGW50PtncTMPSj4uxSktQPWWk41oD9q +gpXm1Vgto4GvPWYs/ewR6Kyd8K0YkBxbRFyYOmycu3/zzYJnry+EHdvtQspwUDPg +QlI/avDrncERzICsbd86Jz0CMY4kzpg5v9dt/N6WnHlSk/S+vv4pPUDSz26Q4Ehh +iDvDavLGyzKSlVzWQ4bzzlQxXbDL6TZyVAQ4DBI4sI+WGtLbfD51EI5G9BfmDsbw +XJ0Dt2yEwRfDUx/lYbAMvhUnWEu2DSpYdJb8GG0GKTGqU4YpvO1JgTCsLSLIAHfT +tQMw04Gs+kORRNbggsdTD4sR +=N5Wp +-----END PGP PUBLIC KEY BLOCK----- + diff --git a/share/spack/setup-tutorial-env.sh b/share/spack/setup-tutorial-env.sh new file mode 100755 index 0000000000000000000000000000000000000000..1f46f15c2d0c11fb5fd43fe4892880a98bc18083 --- /dev/null +++ b/share/spack/setup-tutorial-env.sh @@ -0,0 +1,129 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +############################################################################### +# +# This file is part of Spack and sets up the environment for the Spack tutorial +# It is intended to be run on ubuntu-18.04 or an ubuntu-18.04 container or AWS +# cloud9 environment +# +# Components: +# 1. apt installs for packages used in the tutorial +# these include compilers and externals used by the tutorial and +# basic spack requirements like python and curl +# 2. spack configuration files +# these set the default configuration for Spack to use x86_64 and suppress +# certain gpg warnings. The gpg warnings are not relevant for the tutorial +# and the default x86_64 architecture allows us to run the same tutorial on +# any x86_64 architecture without needing new binary packages. +# 3. aws cloud9 configuration to expand available storage +# when we run on aws cloud9 we have to expand the storage from 10G to 30G +# because we install too much software for a default cloud9 instance +############################################################################### + +#### +# Ensure we're on Ubuntu 18.04 +#### + +if [ -f /etc/os-release ]; then + . /etc/os-release +fi +if [ x"$UBUNTU_CODENAME" != "xbionic" ]; then + echo "The tutorial setup script must be run on Ubuntu 18.04." + return 1 &>/dev/null || exit 1 # works if sourced or run +fi + +#### +# Install packages needed for tutorial +#### + +# compilers, basic system components, externals +# There are retries around these because apt fails frequently on new instances, +# due to unattended updates running in the background and taking the lock. +until sudo apt-get update -y; do + echo "==> apt-get update failed. retrying..." + sleep 5 +done + +until sudo apt-get install -y --no-install-recommends \ + autoconf make python3 python3-pip \ + build-essential ca-certificates curl git gnupg2 iproute2 emacs \ + file openssh-server tcl unzip vim wget \ + clang g++ g++-6 gcc gcc-6 gfortran gfortran-6 \ + zlib1g zlib1g-dev mpich; do + echo "==> apt-get install failed. retrying..." + sleep 5 +done + +#### +# Upgrade boto3 python package on AWS systems +#### +pip3 install --upgrade boto3 + + +#### +# Spack configuration settings for tutorial +#### + +# create spack system config +sudo mkdir -p /etc/spack + +# set default arch to x86_64 +sudo tee /etc/spack/packages.yaml << EOF > /dev/null +packages: + all: + target: [x86_64] +EOF + +# suppress gpg warnings +sudo tee /etc/spack/config.yaml << EOF > /dev/null +config: + suppress_gpg_warnings: true +EOF + +#### +# AWS set volume size to at least 30G +#### + +# Hardcode the specified size to 30G +SIZE=30 + +# Get the ID of the environment host Amazon EC2 instance. +INSTANCEID=$(curl http://169.254.169.254/latest/meta-data//instance-id) + +# Get the ID of the Amazon EBS volume associated with the instance. +VOLUMEID=$(aws ec2 describe-instances \ + --instance-id $INSTANCEID \ + --query "Reservations[0].Instances[0].BlockDeviceMappings[0].Ebs.VolumeId" \ + --output text) + +# Resize the EBS volume. +aws ec2 modify-volume --volume-id $VOLUMEID --size $SIZE + +# Wait for the resize to finish. +while [ \ + "$(aws ec2 describe-volumes-modifications \ + --volume-id $VOLUMEID \ + --filters Name=modification-state,Values="optimizing","completed" \ + --query "length(VolumesModifications)"\ + --output text)" != "1" ]; do + sleep 1 +done + +if [ -e /dev/xvda1 ] +then + # Rewrite the partition table so that the partition takes up all the space that it can. + sudo growpart /dev/xvda 1 + + # Expand the size of the file system. + sudo resize2fs /dev/xvda1 + +else + # Rewrite the partition table so that the partition takes up all the space that it can. + sudo growpart /dev/nvme0n1 1 + + # Expand the size of the file system. + sudo resize2fs /dev/nvme0n1p1 +fi