Install and run tests for dijitso in a Docker container

Build: #107 failed Changes by Chris Richardson and Jan Blechta

Stages & jobs

  1. Build Stage

  2. Test Stage

Build result summary

Details

Completed
Duration
1 minute
Labels
None
Revisions
dijitso
8812382f98e82b4192d93b935287ea45e6d36b3a 8812382f98e82b4192d93b935287ea45e6d36b3a
testing
1fac8b226a1b4c408525379af321de3d009f0691 1fac8b226a1b4c408525379af321de3d009f0691

No failed tests found, a possible compilation error occurred.

Responsible

Code commits

dijitso
Author Commit Message Commit date
Chris Richardson Chris Richardson 8812382f98e82b4192d93b935287ea45e6d36b3a 8812382f98e82b4192d93b935287ea45e6d36b3a Fix flake8 warning
testing
Author Commit Message Commit date
Jan Blechta Jan Blechta 1fac8b226a1b4c408525379af321de3d009f0691 1fac8b226a1b4c408525379af321de3d009f0691 Add sphinx to dolfinx
Jan Blechta Jan Blechta 2df22da893007c5a6cb6119a596686b8d790184e 2df22da893007c5a6cb6119a596686b8d790184e Fix bug in dolfinx regression test
Jan Blechta Jan Blechta c50f2dbd59753d007542676d087b05b7820b9dd5 c50f2dbd59753d007542676d087b05b7820b9dd5 Test documentatation build in dolfinx
Jan Blechta Jan Blechta 196050538542c9a3ea369c80bd72cc5495d0ff8f 196050538542c9a3ea369c80bd72cc5495d0ff8f Add Python demos to dolfinx tests

Error summary for Unit tests (py3)

The job generated some errors, drill down into the full build log for more details.

usermod: no changes
rank 0: send size with root=0.
rank 0: send data with root=0.
rank 0: send size with root=0.
rank 0: send data with root=0.
Missing .coveralls.yml file. Using only env variables.
Submitting coverage to coveralls.io...
{"source_files": [{"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/__init__.py", "source": "# -*- coding: utf-8 -*-\n# Copyright (C) 2015-2016 Martin Sandve Aln\u00e6s\n#\n# This file is part of DIJITSO.\n#\n# DIJITSO is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# DIJITSO is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\n\nfrom pkg_resources import get_distribution\n\n\"\"\"This is dijitso -- a lightweight distributed just-in-time shared\nlibrary builder.\"\"\"\n\n__author__ = \"Martin Sandve Aln\u00e6s\"\n__version__ = get_distribution('fenics-dijitso').version\n\n__all__ = [\"validate_params\", \"jit\", \"extract_factory_function\",\n           \"set_log_level\"]\n\nfrom dijitso.params import validate_params\nfrom dijitso.jit import jit, DijitsoError\nfrom dijitso.jit import extract_factory_function\nfrom dijitso.log import set_log_level\n\n# Import main function, entry point to script\nfrom dijitso.__main__ import main\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, null, null, null, 1, 1, null, 1, null, null, 1, 1, 1, 1, null, null, 1]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/__main__.py", "source": "# -*- coding: utf-8 -*-\n# Copyright (C) 2015-2016 Martin Sandve Aln\u00e6s\n#\n# This file is part of DIJITSO.\n#\n# DIJITSO is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# DIJITSO is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"This is the commandline interface to dijitso. For usage help, run 'dijitso --help'.\"\"\"\n\nimport sys\nimport argparse\n\nfrom dijitso.params import validate_params\nimport dijitso.cmdline as cmd_namespace\n\n\ndef build_commands(cmd_namespace):\n    \"\"\"Collects functions called cmd_<basename> from given namespace.\n\n    Returns dict {basename: function}.\n    \"\"\"\n    commands = {}\n    cmd_args = {}\n    for name in list(cmd_namespace.keys()):\n        if name.startswith(\"cmd_\"):\n            cmd_name = name.replace(\"cmd_\", \"\")\n            cmd = cmd_namespace[name]\n            commands[cmd_name] = cmd\n            args_name = \"args_\" + cmd_name\n            if args_name in cmd_namespace:\n                cmd_args[cmd_name] = cmd_namespace.get(args_name)\n    return commands, cmd_args\n\n\ndef add_top_arguments(parser):\n    \"Add arguments to top level parser.\"\n    parser.add_argument(\"--verbose\", \"-v\", default=False,\n                        help=\"set logging level\")\n    parser.add_argument(\"--cache-dir\", \"-r\", default=None,\n                        help=\"use non-default cache root path\")\n    parser.add_argument(\"--dry-run\", \"-n\", default=False,\n                        help=\"only show what would be done, don't modify filesystem\")\n\n\ndef extract_params_from_args(args):\n    p = {}\n    p[\"cache\"] = {}\n    if args.cache_dir is not None:\n        p[\"cache\"][\"cache_dir\"] = args.cache_dir\n    return p\n\n\ndef add_common_arguments(parser):\n    \"Add arguments to each subparser.\"\n    pass\n\n\ndef add_cmd_arguments(cmd, parser, args):\n    \"Add arguments specific to a command.\"\n    if hasattr(cmd, \"add_arguments\"):\n        cmd.add_arguments(parser)\n\n\ndef build_parsers(commands, args):\n    \"\"\"Builds a top parser with subparsers for each command.\"\"\"\n    top_parser = argparse.ArgumentParser()\n    add_top_arguments(top_parser)\n\n    subparsers = top_parser.add_subparsers(help=\"command description\", dest=\"cmd_name\")\n    cmd_parsers = {}\n    for cmd_name, cmd in commands.items():\n        parser = subparsers.add_parser(cmd_name, help=cmd.__doc__)\n        add_common_arguments(parser)\n        if cmd_name in args:\n            args[cmd_name](parser)\n        cmd_parsers[cmd_name] = parser\n\n    return top_parser, subparsers, cmd_parsers\n\n\ndef main(args=None):\n    \"\"\"This is the commandline tool for the python module dijitso.\"\"\"\n\n    if args is None:\n        args = sys.argv[1:]\n\n    # Build subparsers for each command\n    commands, cmd_args = build_commands(vars(cmd_namespace))\n    top_parser, subparsers, cmd_parsers = build_parsers(commands, cmd_args)\n\n    # Populate args namespace\n    args_ns = argparse.Namespace()\n    top_parser.parse_args(args, namespace=args_ns)\n\n    # Extract generic params\n    params = extract_params_from_args(args_ns)\n    params = validate_params(params)\n\n    # Run the chosen command (argparse doesn't allow\n    # getting to this point with an invalid cmd_name)\n    assert args_ns.cmd_name in commands\n    cmd = commands[args_ns.cmd_name]\n    return cmd(args_ns, params)\n\n\nif __name__ == \"__main__\":\n    sys.exit(main())\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, 1, 1, null, 1, 1, null, null, 1, null, null, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, null, null, 1, null, 0, null, 0, null, 0, null, null, null, 1, 0, 0, 0, 0, 0, null, null, 1, null, 0, null, null, 1, null, 0, 0, null, null, 1, null, 0, 0, null, 0, 0, 0, 0, 0, 0, 0, 0, null, 0, null, null, 1, null, null, 0, 0, null, null, 0, 0, null, null, 0, 0, null, null, 0, 0, null, null, null, 0, 0, 0, null, null, 1, 0]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/build.py", "source": "# -*- coding: utf-8 -*-\n# Copyright (C) 2015-2016 Martin Sandve Aln\u00e6s\n#\n# This file is part of DIJITSO.\n#\n# DIJITSO is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# DIJITSO is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"Utilities for building libraries with dijitso.\"\"\"\n\nimport tempfile\nimport os\nimport sys\n\nfrom dijitso.system import get_status_output, lockfree_move_file\nfrom dijitso.system import make_dirs, make_executable, store_textfile\nfrom dijitso.log import warning, info, debug\nfrom dijitso.cache import ensure_dirs, make_lib_dir, make_inc_dir\nfrom dijitso.cache import create_fail_dir_path\nfrom dijitso.cache import create_lib_filename, create_lib_basename, create_libname\nfrom dijitso.cache import create_src_filename, create_src_basename\nfrom dijitso.cache import create_inc_filename, create_inc_basename\nfrom dijitso.cache import create_log_filename\nfrom dijitso.cache import compress_source_code\n\n\ndef make_unique(dirs):\n    \"\"\"Take a sequence of hashable items and return a tuple including each\n    only once.\n\n    Preserves original ordering.\n\n    \"\"\"\n    udirs = []\n    found = set()\n    for d in dirs:\n        if d not in found:\n            udirs.append(d)\n            found.add(d)\n    return tuple(udirs)\n\n\ndef make_compile_command(src_filename, lib_filename, dependencies,\n                         build_params, cache_params):\n    \"\"\"Piece together the compile command from build params.\n\n    Returns the command as a list with the command and its arguments.\n    \"\"\"\n    # Get dijitso dirs based on cache_params\n    inc_dir = make_inc_dir(cache_params)\n    lib_dir = make_lib_dir(cache_params)\n\n    # Add dijitso directories to includes, libs, and rpaths\n    include_dirs = make_unique(build_params[\"include_dirs\"] + (inc_dir,))\n    lib_dirs = make_unique(build_params[\"lib_dirs\"] + (lib_dir,))\n    rpath_dirs = make_unique(build_params[\"rpath_dirs\"] + (lib_dir,))\n\n    # Make all paths absolute\n    include_dirs = [os.path.abspath(d) for d in include_dirs]\n    lib_dirs = [os.path.abspath(d) for d in lib_dirs]\n    rpath_dirs = [os.path.abspath(d) for d in rpath_dirs]\n\n    # Build options (defaults assume gcc compatibility)\n    cxxflags = list(build_params[\"cxxflags\"])\n    if build_params[\"debug\"]:\n        cxxflags.extend(build_params[\"cxxflags_debug\"])\n    else:\n        cxxflags.extend(build_params[\"cxxflags_opt\"])\n\n    # Create library names for all dependencies and additional given\n    # libs\n    deplibs = [create_libname(depsig, cache_params)\n               for depsig in dependencies]\n\n    deplibs.extend(build_params[\"libs\"])\n\n    # Get compiler name\n    args = [build_params[\"cxx\"]]\n\n    # Compiler args\n    args.extend(cxxflags)\n    args.extend(\"-I\" + path for path in include_dirs)\n\n    # The input source\n    args.append(src_filename)\n\n    # Linker args\n    args.extend(\"-L\" + path for path in lib_dirs)\n    args.extend(\"-Wl,-rpath,\" + path for path in rpath_dirs)\n    args.extend(\"-l\" + lib for lib in deplibs)\n\n    # OSX specific:\n    if sys.platform == \"darwin\":\n        full_lib_filename = os.path.join(cache_params[\"cache_dir\"],\n                                         cache_params[\"lib_dir\"],\n                                         os.path.basename(lib_filename))\n        args.append(\"-Wl,-install_name,%s\" % full_lib_filename)\n\n    # The output library\n    args.append(\"-o\" + lib_filename)\n\n    return args\n\n\ndef temp_dir(cache_params):\n    \"\"\"Return a uniquely named temp directory.\n\n    Optionally residing under temp_dir_root from cache_params.\n    \"\"\"\n    return tempfile.mkdtemp(dir=cache_params[\"temp_dir_root\"])\n\n\ndef build_shared_library(signature, header, source, dependencies, params):\n    \"\"\"Build shared library from a source file and store library in\n    cache.\n\n    \"\"\"\n    cache_params = params[\"cache\"]\n    build_params = params[\"build\"]\n\n    # Create basenames\n    inc_basename = create_inc_basename(signature, cache_params)\n    src_basename = create_src_basename(signature, cache_params)\n    lib_basename = create_lib_basename(signature, cache_params)\n\n    # Create a temp directory and filenames within it\n    tmpdir = temp_dir(cache_params)\n    temp_inc_filename = os.path.join(tmpdir, inc_basename)\n    temp_src_filename = os.path.join(tmpdir, src_basename)\n    temp_lib_filename = os.path.join(tmpdir, lib_basename)\n\n    # Store source and header in temp dir\n    if header:\n        store_textfile(temp_inc_filename, header)\n    store_textfile(temp_src_filename, source)\n\n    # Build final command as list of arguments\n    cmd = make_compile_command(temp_src_filename, temp_lib_filename,\n                               dependencies, build_params, cache_params)\n\n    # Execute command to compile generated source code to dynamic\n    # library\n    status, output = get_status_output(cmd)\n\n    # Move files to cache on success or a local dir on failure,\n    # using safe lockfree move\n    if status == 0:\n        # Ensure dirnames exist in cache dirs\n        ensure_dirs(cache_params)\n\n        # Move library first\n        lib_filename = create_lib_filename(signature, cache_params)\n        assert os.path.exists(os.path.dirname(lib_filename))\n        lockfree_move_file(temp_lib_filename, lib_filename)\n\n        # Write header only if there is one\n        if header:\n            inc_filename = create_inc_filename(signature, cache_params)\n            assert os.path.exists(os.path.dirname(inc_filename))\n            lockfree_move_file(temp_inc_filename, inc_filename)\n        else:\n            inc_filename = None\n\n        # Compress or delete source code based on params\n        temp_src_filename = compress_source_code(temp_src_filename, cache_params)\n        if temp_src_filename:\n            src_filename = create_src_filename(signature, cache_params)\n            if temp_src_filename.endswith(\".gz\"):\n                src_filename = src_filename + \".gz\"\n            assert os.path.exists(os.path.dirname(src_filename))\n            lockfree_move_file(temp_src_filename, src_filename)\n        else:\n            src_filename = None\n\n        # Write compiler command and output to log file\n        if cache_params[\"enable_build_log\"]:\n            # Recreate compiler command without the tempdir\n            cmd = make_compile_command(src_basename, lib_basename,\n                                       dependencies, build_params, cache_params)\n\n            log_contents = \"%s\\n\\n%s\" % (\" \".join(cmd), output)\n            log_filename = create_log_filename(signature, cache_params)\n            assert os.path.exists(os.path.dirname(log_filename))\n            store_textfile(log_filename, log_contents)\n        else:\n            log_filename = None\n\n        files = set((inc_filename, src_filename, lib_filename, log_filename))\n        files = files - set((None,))\n        files = sorted(files)\n        debug(\"Compilation succeeded. Files written to cache:\\n\" +\n              \"\\n\".join(files))\n        err_info = None\n    else:\n        # Create filenames in a local directory to store files for\n        # reproducing failure\n        fail_dir = create_fail_dir_path(signature, cache_params)\n        make_dirs(fail_dir)\n\n        # Library name is returned below\n        lib_filename = None\n\n        # Write header only if there is one\n        if header:\n            inc_filename = os.path.join(fail_dir, inc_basename)\n            lockfree_move_file(temp_inc_filename, inc_filename)\n\n        # Always write source for inspection after compilation failure\n        src_filename = os.path.join(fail_dir, src_basename)\n        lockfree_move_file(temp_src_filename, src_filename)\n\n        # Write compile command to failure dir, adjusted to use local\n        # source file name so it can be rerun\n        cmd = make_compile_command(src_basename, lib_basename, dependencies,\n                                   build_params, cache_params)\n        cmds = \" \".join(cmd)\n        script = \"#!/bin/bash\\n# Execute this file to recompile locally\\n\" + cmds\n        cmd_filename = os.path.join(fail_dir, \"recompile.sh\")\n        store_textfile(cmd_filename, script)\n        make_executable(cmd_filename)\n\n        # Write readme file with instructions\n        readme = \"Run or source recompile.sh to compile locally and reproduce the build failure.\\n\"\n        readme_filename = os.path.join(fail_dir, \"README\")\n        store_textfile(readme_filename, readme)\n\n        # Write compiler output to failure dir (will refer to temp paths)\n        log_filename = os.path.join(fail_dir, \"error.log\")\n        store_textfile(log_filename, output)\n\n        info(\"------------------- Start compiler output ------------------------\")\n        info(output)\n        info(\"-------------------  End compiler output  ------------------------\")\n        warning(\"Compilation failed! Sources, command, and \"\n                \"errors have been written to: %s\" % (fail_dir,))\n\n        err_info = {'src_filename': src_filename,\n                    'cmd_filename': cmd_filename,\n                    'readme_filename': readme_filename,\n                    'fail_dir': fail_dir,\n                    'log_filename': log_filename}\n\n    return status, output, lib_filename, err_info\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, 1, 1, 1, null, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, null, null, 1, null, null, null, null, null, null, 0, 0, 0, 0, 0, 0, 0, null, null, 1, null, null, null, null, null, null, 0, 0, null, null, 0, 0, 0, null, null, 0, 0, 0, null, null, 0, 0, 0, null, 0, null, null, null, 0, null, null, 0, null, null, 0, null, null, 0, 0, null, null, 0, null, null, 0, 0, 0, null, null, 0, 0, null, null, 0, null, null, 0, null, 0, null, null, 1, null, null, null, null, 0, null, null, 1, null, null, null, null, 0, 0, null, null, 0, 0, 0, null, null, 0, 0, 0, 0, null, null, 0, 0, 0, null, null, 0, null, null, null, null, 0, null, null, null, 0, null, 0, null, null, 0, 0, 0, null, null, 0, 0, 0, 0, null, 0, null, null, 0, 0, 0, 0, 0, 0, 0, null, 0, null, null, 0, null, 0, null, null, 0, 0, 0, 0, null, 0, null, 0, 0, 0, 0, null, 0, null, null, null, 0, 0, null, null, 0, null, null, 0, 0, 0, null, null, 0, 0, null, null, null, 0, null, 0, 0, 0, 0, 0, null, null, 0, 0, 0, null, null, 0, 0, null, 0, 0, 0, 0, null, null, 0, null, null, null, null, null, 0]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/cache.py", "source": "# -*- coding: utf-8 -*-\n# Copyright (C) 2015-2016 Martin Sandve Aln\u00e6s\n#\n# This file is part of DIJITSO.\n#\n# DIJITSO is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# DIJITSO is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"Utilities for disk cache features of dijitso.\"\"\"\n\nfrom glob import glob\nimport os\nimport re\nimport sys\nimport ctypes\nfrom dijitso.system import ldd\nfrom dijitso.system import make_dirs\nfrom dijitso.system import try_delete_file, try_copy_file\nfrom dijitso.system import gzip_file, gunzip_file\nfrom dijitso.system import read_textfile, store_textfile\nfrom dijitso.log import debug, error, warning\n\n\ndef extract_files(signature, cache_params, prefix=\"\", path=os.curdir,\n                  categories=(\"inc\", \"src\", \"lib\", \"log\")):\n    \"\"\"Make a copy of files stored under this signature.\n\n    Target filenames are '<path>/<prefix>-<signature>.*'\n    \"\"\"\n    path = os.path.join(path, prefix + signature)\n    make_dirs(path)\n\n    if \"inc\" in categories:\n        inc_filename = create_inc_filename(signature, cache_params)\n        try_copy_file(inc_filename, path)\n    if \"src\" in categories:\n        src_filename = create_src_filename(signature, cache_params)\n        if not os.path.exists(src_filename):\n            src_filename = src_filename + \".gz\"\n        if os.path.exists(src_filename):\n            try_copy_file(src_filename, path)\n            if src_filename.endswith(\".gz\"):\n                gunzip_file(os.path.join(path, os.path.basename(src_filename)))\n    if \"lib\" in categories:\n        lib_filename = create_lib_filename(signature, cache_params)\n        try_copy_file(lib_filename, path)\n    if \"log\" in categories:\n        log_filename = create_log_filename(signature, cache_params)\n        try_copy_file(log_filename, path)\n\n    return path\n\n\ndef extract_lib_signatures(cache_params):\n    \"Extract signatures from library files in cache.\"\n    p = os.path.join(cache_params[\"cache_dir\"], cache_params[\"lib_dir\"])\n    filenames = glob(os.path.join(p, \"*\"))\n\n    r = re.compile(create_lib_filename(\"(.*)\", cache_params))\n    sigs = []\n    for f in filenames:\n        m = r.match(f)\n        if m:\n            sigs.append(m.group(1))\n    return sigs\n\n\ndef clean_cache(cache_params, dryrun=True,\n                categories=(\"inc\", \"src\", \"lib\", \"log\")):\n    \"Delete files from cache.\"\n    gc = glob_cache(cache_params, categories=categories)\n    for category in gc:\n        for fn in gc[category]:\n            if dryrun:\n                print(\"rm %s\" % (fn,))\n            else:\n                try_delete_file(fn)\n\n\ndef glob_cache(cache_params, categories=(\"inc\", \"src\", \"lib\", \"log\")):\n    \"\"\"Return dict with contents of cache subdirectories.\"\"\"\n    g = {}\n    for foo in categories:\n        p = os.path.join(cache_params[\"cache_dir\"], cache_params[foo + \"_dir\"])\n        g[foo] = glob(os.path.join(p, \"*\"))\n    return g\n\n\ndef grep_cache(regex, cache_params,\n               linenumbers=False, countonly=False,\n               signature=None,\n               categories=(\"inc\", \"src\", \"log\")):\n    \"Search through files in cache for a pattern.\"\n    allmatches = {}\n    gc = glob_cache(cache_params, categories=categories)\n    for category in categories:\n        for fn in gc.get(category, ()):\n            # Skip non-matches if specific signature is specified\n            if signature is not None and signature not in fn:\n                continue\n\n            if countonly:\n                matches = 0\n            else:\n                matches = []\n\n            if category == \"lib\":\n                # If category is \"lib\", use ldd\n                # TODO: on mac need to use otool\n                libs = ldd(fn)\n                for k, libpath in sorted(libs.items()):\n                    if not libpath:\n                        continue\n                    m = regex.match(libpath)\n                    if m:\n                        if countonly:\n                            matches += 1\n                        else:\n                            line = \"%s => %s\" % (k, libpath)\n                            matches.append(line)\n            else:\n                content = read_textfile(fn)\n                lines = content.splitlines() if content else ()\n                for i, line in enumerate(lines):\n                    m = regex.match(line)\n                    if m:\n                        if countonly:\n                            matches += 1\n                        else:\n                            line = line.rstrip(\"\\n\\r\")\n                            if linenumbers:\n                                line = (i, line)\n                            matches.append(line)\n\n            if matches:\n                allmatches[fn] = matches\n    return allmatches\n\n\ndef extract_function(lines):\n    \"Extract function code starting at first line of lines.\"\n    n = len(lines)\n\n    # Function starts at line 0 by assumption\n    begin = 0\n\n    # Worst case body range\n    body_begin = begin\n    body_end = n\n\n    # Body starts at first {\n    for i in range(begin, n):\n        if \"{\" in lines[i]:\n            body_begin = i\n            break\n\n    # Body ends when {} are balanced back to 0\n    braces = 0\n    for i in range(body_begin, n):\n        if \"{\" in lines[i]:\n            braces += 1\n        if \"}\" in lines[i]:\n            braces -= 1\n        if braces == 0:\n            body_end = i\n            break\n\n    # Include the last line in range\n    end = body_end + 1\n    sublines = lines[begin:end]\n    return \"\".join(sublines)\n\n\ndef _create_basename(foo, signature, cache_params):\n    return \"\".join((cache_params.get(foo + \"_prefix\", \"\"),\n                    cache_params.get(foo + \"_basename\", \"\"),\n                    signature,\n                    cache_params.get(foo + \"_postfix\", \"\")))\n\n\ndef _create_filename(foo, signature, cache_params):\n    basename = _create_basename(foo, signature, cache_params)\n    return os.path.join(cache_params[\"cache_dir\"],\n                        cache_params[foo + \"_dir\"], basename)\n\n\ndef create_log_filename(signature, cache_params):\n    \"Create log filename based on signature and params.\"\n    return _create_filename(\"log\", signature, cache_params)\n\n\ndef create_inc_basename(signature, cache_params):\n    \"Create header filename based on signature and params.\"\n    return _create_basename(\"inc\", signature, cache_params)\n\n\ndef create_inc_filename(signature, cache_params):\n    \"Create header filename based on signature and params.\"\n    return _create_filename(\"inc\", signature, cache_params)\n\n\ndef create_src_filename(signature, cache_params):\n    \"Create source code filename based on signature and params.\"\n    return _create_filename(\"src\", signature, cache_params)\n\n\ndef create_src_basename(signature, cache_params):\n    \"Create source code filename based on signature and params.\"\n    return _create_basename(\"src\", signature, cache_params)\n\n\ndef create_lib_basename(signature, cache_params):\n    \"Create library filename based on signature and params.\"\n    return _create_basename(\"lib\", signature, cache_params)\n\n\ndef create_lib_filename(signature, cache_params):\n    \"Create library filename based on signature and params.\"\n    return _create_filename(\"lib\", signature, cache_params)\n\n\ndef create_libname(signature, cache_params):\n    \"\"\"Create library name based on signature and params,\n    without path, prefix 'lib', or extension '.so'.\"\"\"\n    return cache_params[\"lib_basename\"] + signature\n\n\ndef create_fail_dir_path(signature, cache_params):\n    \"Create path name to place files after a module build failure.\"\n    fail_root = cache_params[\"fail_dir_root\"] or os.curdir\n    fail_dir = os.path.join(fail_root, \"jitfailure-\" + signature)\n    return os.path.abspath(fail_dir)\n\n\ndef make_inc_dir(cache_params):\n    d = os.path.join(cache_params[\"cache_dir\"], cache_params[\"inc_dir\"])\n    make_dirs(d)\n    return d\n\n\ndef make_src_dir(cache_params):\n    d = os.path.join(cache_params[\"cache_dir\"], cache_params[\"src_dir\"])\n    make_dirs(d)\n    return d\n\n\ndef make_lib_dir(cache_params):\n    d = os.path.join(cache_params[\"cache_dir\"], cache_params[\"lib_dir\"])\n    make_dirs(d)\n    return d\n\n\ndef make_log_dir(cache_params):\n    d = os.path.join(cache_params[\"cache_dir\"], cache_params[\"log_dir\"])\n    make_dirs(d)\n    return d\n\n\n_ensure_dirs_called = {}\n\n\ndef ensure_dirs(cache_params):\n    global _ensure_dirs_called\n    # This ensures directories are created only once during a process\n    # for each value that cache_dir takes, in case it changes during\n    # the process lifetime.\n    c = cache_params[\"cache_dir\"]\n    if c not in _ensure_dirs_called:\n        make_inc_dir(cache_params)\n        make_src_dir(cache_params)\n        make_lib_dir(cache_params)\n        make_log_dir(cache_params)\n        _ensure_dirs_called[c] = True\n\n\ndef read_library_binary(lib_filename):\n    \"Read compiled shared library as binary blob into a numpy byte array.\"\n    import numpy\n    return numpy.fromfile(lib_filename, dtype=numpy.uint8)\n\n\ndef write_library_binary(lib_data, signature, cache_params):\n    \"Store compiled shared library from binary blob in numpy byte array to cache.\"\n    make_lib_dir(cache_params)\n    lib_filename = create_lib_filename(signature, cache_params)\n    lib_data.tofile(lib_filename)\n    # TODO: Set permissions?\n\n\ndef analyse_load_error(e, lib_filename, cache_params):\n    # Try to analyse error further for better error message:\n    msg = str(e)\n    r = re.compile(\"(\" + create_lib_basename(\".*\", cache_params) + \")\")\n    m = r.match(msg)\n    if m:\n        # Found libname mentioned in message\n        mlibname = m.group(1)\n        mlibname = os.path.join(cache_params[\"cache_dir\"],\n                                cache_params[\"lib_dir\"], mlibname)\n    else:\n        mlibname = lib_filename\n\n    if lib_filename != mlibname:\n        # Message mentions some other dijitso library,\n        # double check if this other file exists\n        # (if it does, could be paths or rpath issue)\n        if os.path.exists(mlibname):\n            emsg = (\"dijitso failed to load library:\\n\\t%s\\n\"\n                    \"but dependency file exists:\\n\\t%s\\nerror is:\\n\\t%s\" % (\n                        lib_filename, mlibname, str(e)))\n        else:\n            emsg = (\"dijitso failed to load library:\\n\\t%s\\n\"\n                    \"dependency file missing:\\n\\t%s\\nerror is:\\n\\t%s\" % (\n                        lib_filename, mlibname, str(e)))\n    else:\n        # Message doesn't mention another dijitso library,\n        # double check if library file we tried to load exists\n        # (if it does, could be paths issue)\n        if os.path.exists(lib_filename):\n            emsg = (\"dijitso failed to load existing file:\\n\"\n                    \"\\t%s\\nerror is:\\n\\t%s\" % (lib_filename, str(e)))\n        else:\n            emsg = (\"dijitso failed to load missing file:\\n\"\n                    \"\\t%s\\nerror is:\\n\\t%s\" % (lib_filename, str(e)))\n    return emsg\n\n\ndef load_library(signature, cache_params):\n    \"\"\"Load existing dynamic library from disk.\n\n    Returns library module if found, otherwise None.\n\n    If found, the module is placed in memory cache for later lookup_lib calls.\n    \"\"\"\n    lib_filename = create_lib_filename(signature, cache_params)\n    if not os.path.exists(lib_filename):\n        debug(\"File %s does not exist\" % (lib_filename,))\n        return None\n    debug(\"Loading %s from %s\" % (signature, lib_filename))\n\n    if cache_params[\"lib_loader\"] == \"ctypes\":\n        try:\n            lib = ctypes.cdll.LoadLibrary(lib_filename)\n        except os.error as e:\n            lib = None\n            emsg = analyse_load_error(e, lib_filename, cache_params)\n            warning(emsg)\n        else:\n            debug(\"Loaded %s from %s\" % (signature, lib_filename))\n    elif cache_params[\"lib_loader\"] == \"import\":\n        sys.path.append(os.path.dirname(lib_filename))\n        # Will raise an exception if it does not load correctly\n        lib = __import__(signature)\n        debug(\"Loaded %s from %s\" % (signature, lib_filename))\n    else:\n        error(\"Invalid loader: %s\" % cache_params[\"lib_loader\"])\n\n    if lib is not None:\n        # Disk loading succeeded, register loaded library in memory\n        # cache for next time\n        _lib_cache[signature] = lib\n    return lib\n\n\n# A cache is always something to be careful about.  This one stores\n# references to loaded jit-compiled libraries, which will stay in\n# memory unless manually unloaded anyway and should not cause any\n# trouble.\n_lib_cache = {}\n\n\ndef lookup_lib(lib_signature, cache_params):\n    \"\"\"Lookup library in memory cache then in disk cache.\n\n    Returns library module if found, otherwise None.\n    \"\"\"\n    # Look for already loaded library in memory cache\n    lib = _lib_cache.get(lib_signature)\n    if lib is None:\n        # Cache miss in memory, try looking on disk\n        lib = load_library(lib_signature, cache_params)\n    else:\n        debug(\"Fetched %s from memory cache\" % (lib_signature,))\n    # Return library or None\n    return lib\n\n\ndef read_src(signature, cache_params):\n    \"\"\"Lookup source code in disk cache and return file contents or None.\"\"\"\n    filename = create_src_filename(signature, cache_params)\n    return read_textfile(filename)\n\n\ndef read_inc(signature, cache_params):\n    \"\"\"Lookup header file in disk cache and return file contents or None.\"\"\"\n    filename = create_inc_filename(signature, cache_params)\n    return read_textfile(filename)\n\n\ndef read_log(signature, cache_params):\n    \"\"\"Lookup log file in disk cache and return file contents or None.\"\"\"\n    filename = create_log_filename(signature, cache_params)\n    return read_textfile(filename)\n\n\ndef store_src(signature, content, cache_params):\n    \"Store source code in file within dijitso directories.\"\n    make_src_dir(cache_params)\n    filename = create_src_filename(signature, cache_params)\n    store_textfile(filename, content)\n    return filename\n\n\ndef store_inc(signature, content, cache_params):\n    \"Store header file within dijitso directories.\"\n    make_inc_dir(cache_params)\n    filename = create_inc_filename(signature, cache_params)\n    store_textfile(filename, content)\n    return filename\n\n\ndef store_log(signature, content, cache_params):\n    \"Store log file within dijitso directories.\"\n    make_log_dir(cache_params)\n    filename = create_log_filename(signature, cache_params)\n    store_textfile(filename, content)\n    return filename\n\n\ndef compress_source_code(src_filename, cache_params):\n    \"\"\"Keep, delete or compress source code based on value of cache parameter 'src_storage'.\n\n    Can be \"keep\", \"delete\", or \"compress\".\n    \"\"\"\n    src_storage = cache_params[\"src_storage\"]\n    if src_storage == \"keep\":\n        filename = src_filename\n    elif src_storage == \"delete\":\n        try_delete_file(src_filename)\n        filename = None\n    elif src_storage == \"compress\":\n        filename = gzip_file(src_filename)\n        try_delete_file(src_filename)\n    else:\n        error(\"Invalid src_storage parameter. Expecting 'keep', 'delete', or 'compress'.\")\n    return filename\n\n\ndef get_dijitso_dependencies(libname, cache_params):\n    \"Run ldd and filter output to only include dijitso cache entries.\"\n    libs = ldd(libname)\n    dlibs = {}\n    for k in libs:\n        if k.startswith(cache_params[\"lib_prefix\"]):\n            dlibs[k] = libs[k]\n    return dlibs\n\n\n# TODO: Use this in command-line tools?\ndef check_cache_integrity(cache_params):\n    \"Check dijitso cache integrity.\"\n    libnames = set(glob(cache_params[\"lib_prefix\"] + \"*\" + cache_params[\"lib_postfix\"]))\n    dmissing = {}\n    for libname in libnames:\n        dlibs = get_dijitso_dependencies(libname, cache_params)\n        # Missing on file system:\n        missing = [k for k in dlibs if k not in libnames]\n        for k in dlibs:\n            if k not in missing:\n                # ldd thinks file is missing but it's there, linker issue?\n                pass\n        if missing:\n            dmissing[libname] = sorted(missing)\n    return dmissing\n\n\ndef report_cache_integrity(dmissing, out=warning):\n    \"Print cache integrity report.\"\n    if dmissing:\n        out(\"%d libraries are missing one or more dependencies:\" % len(dmissing))\n        for k in sorted(dmissing):\n            out(\"\\t%s depends on missing libraries:\" % k)\n            for m in dmissing[k]:\n                out(\"\\t\\t%s\" % m)\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, null, null, 1, null, null, null, null, null, 0, 0, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, null, 0, null, null, 1, null, 0, 0, null, 0, 0, 0, 0, 0, 0, 0, null, null, 1, null, null, 0, 0, 0, 0, 0, null, 0, null, null, 1, null, 0, 0, 0, 0, 0, null, null, 1, null, null, null, null, 0, 0, 0, 0, null, 0, 0, null, 0, 0, null, 0, null, 0, null, null, 0, 0, 0, 0, 0, 0, 0, 0, null, 0, 0, null, 0, 0, 0, 0, 0, 0, 0, null, 0, 0, 0, 0, null, 0, 0, 0, null, null, 1, null, 0, null, null, 0, null, null, 0, 0, null, null, 0, 0, 0, 0, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, null, null, 0, 0, 0, null, null, 1, 1, null, null, null, null, null, 1, 1, 1, null, null, null, 1, null, 0, null, null, 1, null, 0, null, null, 1, null, 0, null, null, 1, null, 0, null, null, 1, null, 0, null, null, 1, null, 0, null, null, 1, null, 1, null, null, 1, null, null, 0, null, null, 1, null, 0, 0, 0, null, null, 1, 0, 0, 0, null, null, 1, 0, 0, 0, null, null, 1, 0, 0, 0, null, null, 1, 0, 0, 0, null, null, 1, null, null, 1, null, null, null, null, 0, 0, 0, 0, 0, 0, 0, null, null, 1, null, 0, 0, null, null, 1, null, 0, 0, 0, null, null, null, 1, null, 0, 0, 0, 0, null, 0, 0, null, null, 0, null, 0, null, null, null, 0, 0, null, null, null, 0, null, null, null, null, null, null, 0, 0, null, null, 0, null, 0, null, null, 1, null, null, null, null, null, null, 1, 1, 0, 0, 1, null, 1, 1, 1, 0, 0, 0, 0, null, 1, 0, 0, null, 0, 0, null, 0, null, 1, null, null, 1, 1, null, null, null, null, null, null, 1, null, null, 1, null, null, null, null, null, 1, 1, null, 1, null, 1, null, 1, null, null, 1, null, 0, 0, null, null, 1, null, 0, 0, null, null, 1, null, 0, 0, null, null, 1, null, 0, 0, 0, 0, null, null, 1, null, 0, 0, 0, 0, null, null, 1, null, 0, 0, 0, 0, null, null, 1, null, null, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, null, 0, 0, null, null, 1, null, 0, 0, 0, 0, 0, 0, null, null, null, 1, null, 0, 0, 0, 0, null, 0, 0, 0, null, 0, 0, 0, 0, null, null, 1, null, 0, 0, 0, 0, 0, 0]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/cmdline.py", "source": "# -*- coding: utf-8 -*-\n# Copyright (C) 2015-2016 Martin Sandve Aln\u00e6s\n#\n# This file is part of DIJITSO.\n#\n# DIJITSO is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# DIJITSO is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"This file contains the commands available through command-line dijitso-cache.\n\nEach function cmd_<cmdname> becomes a subcommand invoked by::\n\n    dijitso-cache cmdname ...args\n\nThe docstrings in the cmd_<cmdname> are shown when running::\n\n    dijitso-cache cmdname --help\n\nThe 'args' argument to cmd_* is a Namespace object with the commandline arguments.\n\n\"\"\"\n\nimport os\nimport re\n\nfrom dijitso import __version__\nfrom dijitso.cache import glob_cache, grep_cache, clean_cache\nfrom dijitso.cache import extract_lib_signatures\nfrom dijitso.cache import extract_files, extract_function\nfrom dijitso.system import read_textfile\n\n\ndef parse_categories(categories):\n    if categories == \"all\":\n        return (\"inc\", \"src\", \"lib\", \"log\")\n    return categories.split(\",\")\n\n\ndef args_version(parser):\n    pass\n\n\ndef cmd_version(args, params):\n    \"print dijitso version\"\n    print(__version__)\n\n\ndef args_config(parser):\n    parser.add_argument(\"--key\", default=\"\", help=\"specific key to show (e.g. build.cxxflags)\")\n\n\ndef cmd_config(args, params):\n    \"show configuration\"\n    # Show single value if asked for\n    key = args.key\n    if key:\n        name = key\n        value = params\n        for k in key.split(\".\"):\n            value = value[k]\n        # Compiler flags etc are more useful in space separated form:\n        if isinstance(value, tuple):\n            value = \" \".join(value)\n        print(\"    %s: %s\" % (name, value))\n        return 0\n\n    # Pick non-empty categories\n    categories = sorted(c for c in params if params[c])\n    print(\"Showing default flags for dijitso:\")\n    for category in categories:\n        print(\"%s:\" % (category,))\n        for name in sorted(params[category]):\n            value = params[category][name]\n            # Compiler flags etc are more useful in space separated form:\n            if isinstance(value, tuple):\n                value = \" \".join(value)\n            print(\"    %s: %s\" % (name, value))\n    return 0\n\n\ndef args_show(parser):\n    parser.add_argument(\"--categories\", default=\"all\",\n                        help=\"comma separated list to enable file types (inc,src,lib,log)\")\n    parser.add_argument(\"--no-summary\", action=\"store_true\",\n                        help=\"don't show summary\")\n    parser.add_argument(\"--files\", action=\"store_true\",\n                        help=\"show file lists\")\n    parser.add_argument(\"--signatures\", action=\"store_true\",\n                        help=\"show library signatures\")\n\n\ndef cmd_show(args, params):\n    \"show lists of files in cache\"\n    cache_params = params[\"cache\"]\n\n    summary = not args.no_summary\n    files = args.files\n    signatures = args.signatures\n    categories = parse_categories(args.categories)\n\n    gc = glob_cache(cache_params, categories=categories)\n\n    if signatures:\n        sigs = extract_lib_signatures(cache_params)\n        print(\"\\n\".join(\"\\t\" + s for s in sorted(sigs)))\n    if files:\n        for cat in categories:\n            print(\"\\n\".join(\"\\t\" + f for f in sorted(gc.get(cat, ()))))\n    if summary:\n        print(\"dijitso cache summary (number of cached files):\")\n        for cat in categories:\n            print(\"%s: %d\" % (cat, len(gc.get(cat, ()))))\n        # TODO: Add summary of file sizes\n    return 0\n\n\ndef args_clean(parser):\n    parser.add_argument(\"--categories\", default=\"inc,src,lib,log\",\n                        help=\"comma separated list to enable file types (inc,src,lib,log)\")\n\n\ndef cmd_clean(args, params):\n    \"remove files from cache\"\n    cache_params = params[\"cache\"]\n\n    dryrun = args.dry_run\n    categories = parse_categories(args.categories)\n\n    clean_cache(cache_params, dryrun=dryrun, categories=categories)\n    return 0\n\n\ndef args_grep(parser):\n    parser.add_argument(\"--categories\", default=\"inc,src\",\n                        help=\"comma separated list to enable file types (inc,src,lib,log)\")\n    parser.add_argument(\"--pattern\", default=\"\",\n                        help=\"line search pattern\")\n    parser.add_argument(\"--regexmode\", action=\"store_true\",\n                        help=\"pattern is a regular expression (python style)\")\n    parser.add_argument(\"--linenumbers\", action=\"store_true\",\n                        help=\"show linenumbers on matches\")\n    parser.add_argument(\"--countonly\", action=\"store_true\",\n                        help=\"show only match line count for each file\")\n    parser.add_argument(\"--filesonly\", action=\"store_true\",\n                        help=\"show only filenames with matches\")\n    parser.add_argument(\"--signature\", default=\"\",\n                        help=\"look for module with this signature (default all)\")\n\n\ndef cmd_grep(args, params):\n    \"grep content of header and source file(s) in cache\"\n    cache_params = params[\"cache\"]\n\n    # Get command-line arguments\n    pattern = args.pattern\n    signature = args.signature\n    regexmode = args.regexmode\n    linenumbers = args.linenumbers\n    countonly = args.countonly\n    filesonly = args.filesonly\n    categories = parse_categories(args.categories)\n\n    if not regexmode:\n        pattern = \".*(\" + pattern + \").*\"\n    regex = re.compile(pattern)\n    allmatches = grep_cache(regex, cache_params,\n                            linenumbers=linenumbers, countonly=countonly,\n                            signature=signature,\n                            categories=categories)\n    if filesonly:\n        print(\"\\n\".join(sorted(allmatches)))\n    elif countonly:\n        print(\"\\n\".join(\"%s: %d\" % (k, v) for k, v in sorted(allmatches.items())))\n    else:\n        for fn in sorted(allmatches):\n            print(\"\\nFile '%s' matches:\" % (fn,))\n            if linenumbers:\n                print(\"\\n\".join(\"%5d:\\t%s\" % line for line in allmatches[fn]))\n            else:\n                print(\"\\n\".join(\"\\t\" + line for line in allmatches[fn]))\n    return 0\n\n\ndef args_grepfunction(parser):\n    parser.add_argument(\"--categories\", default=\"src\",\n                        help=\"comma separated list to enable file types (inc,src,lib,log)\")\n    parser.add_argument(\"--name\", default=\"\",\n                        help=\"function name to search for\")\n    parser.add_argument(\"--signature\", default=\"\",\n                        help=\"look for module with this signature (default all)\")\n    parser.add_argument(\"--no-body\", action=\"store_true\",\n                        help=\"don't show function bodies\")\n\n\ndef cmd_grepfunction(args, params):\n    \"search for function name in source files in cache\"\n    cache_params = params[\"cache\"]\n\n    name = args.name\n    signature = args.signature\n    categories = parse_categories(args.categories)\n    no_body = args.no_body\n\n    pattern = r\".*(\" + name + r\")[ ]*\\((.*)\"\n    regex = re.compile(pattern)\n\n    allmatches = grep_cache(regex, cache_params,\n                            linenumbers=True, countonly=False,\n                            signature=signature,\n                            categories=categories)\n    for fn in sorted(allmatches):\n        if no_body:\n            # Just print signature lines\n            print(\"\\nFile '%s' matches:\" % (fn,))\n            for i, line in allmatches[fn]:\n                print(\"%5d: %s\" % (i, line))\n        else:\n            # Print function bodies\n            content = read_textfile(fn)\n            lines = content.splitlines() if content else ()\n            for i, line in allmatches[fn]:\n                print(\"%s:%d\" % (fn, i))\n                assert name in lines[i]\n                print(extract_function(lines[i:]))\n                print()\n    return 0\n\n\ndef args_checkout(parser):\n    parser.add_argument(\"--categories\", default=\"inc,src,lib,log\",\n                        help=\"comma separated list to enable file types (inc,src,lib,log)\")\n    parser.add_argument(\"--signature\",\n                        help=\"module signature (required)\")\n\n\ndef cmd_checkout(args, params):\n    \"copy files from cache to a directory\"\n    cache_params = params[\"cache\"]\n\n    signature = args.signature\n    categories = parse_categories(args.categories)\n\n    prefix = \"jitcheckout-\"\n    path = os.curdir\n    path = extract_files(signature, cache_params,\n                         prefix=prefix, path=path,\n                         categories=categories)\n    print(\"Extracted files to '%s'.\" % (path,))\n    return 0\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, 1, null, 1, 1, 1, 1, 1, null, null, 1, 0, 0, 0, null, null, 1, 0, null, null, 1, null, 0, null, null, 1, 0, null, null, 1, null, null, 0, 0, 0, 0, 0, 0, null, 0, 0, 0, 0, null, null, 0, 0, 0, 0, 0, 0, null, 0, 0, 0, 0, null, null, 1, 0, null, 0, null, 0, null, 0, null, null, null, 1, null, 0, null, 0, 0, 0, 0, null, 0, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, null, 0, null, null, 1, 0, null, null, null, 1, null, 0, null, 0, 0, null, 0, 0, null, null, 1, 0, null, 0, null, 0, null, 0, null, 0, null, 0, null, 0, null, null, null, 1, null, 0, null, null, 0, 0, 0, 0, 0, 0, 0, null, 0, 0, 0, 0, null, null, null, 0, 0, 0, 0, null, 0, 0, 0, 0, null, 0, 0, null, null, 1, 0, null, 0, null, 0, null, 0, null, null, null, 1, null, 0, null, 0, 0, 0, 0, null, 0, 0, null, 0, null, null, null, 0, 0, null, 0, 0, 0, null, null, 0, 0, 0, 0, 0, 0, 0, 0, null, null, 1, 0, null, 0, null, null, null, 1, null, 0, null, 0, 0, null, 0, 0, 0, null, null, 0, 0]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/jit.py", "source": "# -*- coding: utf-8 -*-\n# Copyright (C) 2015-2016 Martin Sandve Aln\u00e6s\n#\n# This file is part of DIJITSO.\n#\n# DIJITSO is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# DIJITSO is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"This module contains the main jit() function and related utilities.\"\"\"\n\nimport ctypes\nimport numpy\n\nfrom dijitso.log import error\nfrom dijitso.params import validate_params\nfrom dijitso.str import as_unicode\nfrom dijitso.cache import lookup_lib, load_library\nfrom dijitso.cache import write_library_binary, read_library_binary\nfrom dijitso.build import build_shared_library\nfrom dijitso.signatures import hash_params\n\n\nclass DijitsoError(RuntimeError):\n    def __init__(self, message, err_info):\n        super(DijitsoError, self).__init__(message)\n        self.err_info = err_info\n\n\ndef extract_factory_function(lib, name):\n    \"\"\"Extract function from loaded library.\n\n    Assuming signature ``(void *)()``, for anything else use look at\n    ctypes documentation.\n\n    Returns the factory function or raises error.\n    \"\"\"\n    function = getattr(lib, name)\n    function.restype = ctypes.c_void_p\n    return function\n\n\ndef jit_signature(name, params):  # TODO: Unused?\n    \"\"\"Compute the signature that jit will use for given name and params.\"\"\"\n\n    # Validation and completion with defaults for missing parameters\n    params = validate_params(params)\n\n    # Extend provided name of jitable with hash of relevant parameters\n    signature_params = {\n        \"generator\": params[\"generator\"],\n        \"build\": params[\"build\"]\n    }\n\n    signature = \"%s_%s\" % (name, hash_params(signature_params))\n    return signature\n\n\n# TODO: send, receive, wait functionality is not currently in use,\n# decide to use it from dolfin or clean up the code and comments here.\ndef jit(jitable, name, params, generate=None,\n        send=None, receive=None, wait=None):\n    \"\"\"Just-in-time compile and import of a shared library with a cache mechanism.\n\n    A signature is computed from the name, params[\"generator\"],\n    and params[\"build\"]. The name should be a unique identifier\n    for the jitable, preferrably produced by a good hash function.\n\n    The signature is used to identity if the library has already been\n    compiled and cached. A two-level memory and disk cache ensures good\n    performance for repeated lookups within a single program as well as\n    persistence across program runs.\n\n    If no library has been cached, the passed 'generate' function is\n    called to generate the source code:\n\n        header, source, dependencies = \\\n            generate(jitable, name, signature, params[\"generator\"])\n\n    It is expected to translate the 'jitable' object into\n    C or C++ (default) source code which will subsequently be\n    compiled as a shared library and stored in the disk cache.\n    The returned 'dependencies' should be a tuple of signatures\n    returned from other completed dijitso.jit calls, and are\n    linked to when building.\n\n    The compiled shared library is then loaded with ctypes and returned.\n\n    For use in a parallel (MPI) context, three functions send, receive,\n    and wait can be provided. Each process can take on a different role\n    depending on whether generate, or receive, or neither is provided.\n\n      * Every process that gets a generate function is called a 'builder',\n        and will generate and compile code as described above on a cache miss.\n        If the function send is provided, it will then send the shared library\n        binary file as a binary blob by calling send(numpy_array).\n\n      * Every process that gets a receive function is called a 'receiver',\n        and will call 'numpy_array = receive()' expecting the binary blob\n        with a compiled binary shared library which will subsequently be\n        written to file in the local disk cache.\n\n      * The rest of the processes are called 'waiters' and will do nothing.\n\n      * If provided, all processes will call wait() before attempting to\n        load the freshly compiled library from disk cache.\n\n    The intention of the above pattern is to be flexible, allowing several\n    different strategies for sharing build results. The user of dijitso\n    can determine groups of processes that share a disk cache, and assign\n    one process per physical disk cache directory to write to that directory,\n    avoiding multiple processes writing to the same files.\n\n    This forms the basis for three main strategies:\n\n      * Build on every process.\n\n      * Build on one process per physical cache directory.\n\n      * Build on a single global root node and send a copy of\n        the binary to one process per physical cache directory.\n\n    It is highly recommended to avoid have multiple builder processes\n    sharing a physical cache directory.\n    \"\"\"\n    # TODO: Could simplify interface here and roll\n    #   (jitable, name, params[\"generator\"]) into a single jitobject?\n    # TODO: send/receive doesn't combine well with generate\n    #   triggering additional jit calls for dependencies.\n    #   It's possible that dependencies are hard to determine without\n    #   generate doing some analysis that we want to avoid.\n    #   Drop send/receive? Probably not that useful anyway.\n\n    # Complete params with hardcoded defaults and config file defaults\n    params = validate_params(params)\n\n    # 0) Look for library in memory or disk cache\n    # FIXME: use only name as signature for now\n    # TODO: just remove one of signature or name from API?\n    # signature = jit_signature(name, params)\n    name = as_unicode(name)\n    signature = name\n    cache_params = params[\"cache\"]\n    lib = lookup_lib(signature, cache_params)\n    err_info = None\n\n    if lib is None:\n        # Since we didn't find the library in cache, we must build it.\n\n        if receive and generate:\n            # We're not supposed to generate if we're receiving\n            error(\"Please provide only one of generate or receive.\")\n\n        elif generate:\n            # 1) Generate source code\n            header, source, dependencies = generate(jitable, name, signature, params[\"generator\"])\n            # Ensure we got unicode from generate\n            header = as_unicode(header)\n            source = as_unicode(source)\n            dependencies = [as_unicode(dep) for dep in dependencies]\n\n            # 2) Compile shared library and 3) store in dijitso\n            # inc/src/lib dir on success\n            # NB! It's important to not raise exception on compilation\n            # failure, such that we can reach wait() together with\n            # other processes if any.\n            status, output, lib_filename, err_info = \\\n                build_shared_library(signature, header, source, dependencies,\n                                     params)\n\n            # 4a) Send library over network if we have a send function\n            if send:\n                if status == 0:\n                    lib_data = read_library_binary(lib_filename)\n                else:\n                    lib_data = numpy.zeros((1,))\n                send(lib_data)\n\n        elif receive:\n            # 4b) Get library as binary blob from given receive\n            # function and store in cache\n            lib_data = receive()\n            # Empty if compilation failed\n            status = -1 if lib_data.shape == (1,) else 0\n            if status == 0:\n                write_library_binary(lib_data, signature, cache_params)\n\n        else:\n            # Do nothing (we'll be waiting below for other process to\n            # build)\n            if not wait:\n                error(\"Please provide wait if not providing one of generate or receive.\")\n\n        # 5) Notify waiters that we're done / wait for builder to\n        # notify us\n        if wait:\n            wait()\n\n        # Finally load library from disk cache (places in memory\n        # cache)\n        # NB! This returns None if the file does not exist,\n        # i.e. if compilation failed on builder process\n        lib = load_library(signature, cache_params)\n\n    if err_info:\n        # TODO: Parse output to find error(s) for better error messages\n        raise DijitsoError(\"Dijitso JIT compilation failed, see '%s' for details\"\n                           % err_info['fail_dir'], err_info)\n\n    # Return built library and its signature\n    return lib, signature\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, 1, 1, null, 1, 1, 1, 1, 1, 1, 1, null, null, 1, 1, 0, 0, null, null, 1, null, null, null, null, null, null, null, 1, 1, 1, null, null, 1, null, null, null, 0, null, null, 0, null, null, null, null, 0, 0, null, null, null, null, 1, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, null, null, null, null, 1, 1, 1, 1, 1, null, 1, null, null, 0, null, 0, null, 0, null, 0, null, 0, 0, 0, null, null, null, null, null, null, 0, null, null, null, null, 0, 0, 0, null, 0, 0, null, 0, null, null, 0, null, 0, 0, 0, null, null, null, null, 0, 0, null, null, null, 0, 0, null, null, null, null, null, 0, null, 1, null, 0, null, null, null, 1]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/log.py", "source": "# -*- coding: utf-8 -*-\n# Copyright (C) 2015-2016 Martin Sandve Aln\u00e6s, Jan Blechta\n#\n# This file is part of DIJITSO.\n#\n# DIJITSO is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# DIJITSO is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\n\nimport logging\n\n__all__ = ['set_log_level', 'get_logger', 'get_log_handler', 'set_log_handler']\n\n\n_log = logging.getLogger(\"dijitso\")\n_loghandler = logging.StreamHandler()\n_log.addHandler(_loghandler)\n_log.setLevel(logging.INFO)\n\n\ndef get_log_handler():\n    return _loghandler\n\n\ndef get_logger():\n    return _log\n\n\ndef set_log_handler(handler):\n    global _loghandler\n    _log.removeHandler(_loghandler)\n    _loghandler = handler\n    _log.addHandler(_loghandler)\n\n\ndef set_log_level(level):\n    \"\"\"Set verbosity of logging. Argument is int or one of \"INFO\", \"WARNING\",\n    \"ERROR\", or \"DEBUG\".\n    \"\"\"\n    if isinstance(level, str):\n        level = level.upper()\n        assert level in (\"INFO\", \"WARNING\", \"ERROR\", \"DEBUG\")\n        level = getattr(logging, level)\n    else:\n        assert isinstance(level, int)\n    _log.setLevel(level)\n\n\n# Logging interface for dijitso library\n\ndef debug(*message):\n    _log.debug(*message)\n\n\ndef info(*message):\n    _log.info(*message)\n\n\ndef warning(*message):\n    _log.warning(*message)\n\n\ndef error(*message):\n    _log.error(*message)\n    text = message[0] % message[1:]\n    raise RuntimeError(text)\n\n\ndef dijitso_assert(condition, *message):\n    if not condition:\n        _log.error(*message)\n        text = message[0] % message[1:]\n        raise AssertionError(text)\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, 1, null, null, 1, 1, 1, 1, null, null, 1, 0, null, null, 1, 0, null, null, 1, null, 0, 0, 0, null, null, 1, null, null, null, 0, 0, 0, 0, null, 0, 0, null, null, null, null, 1, 1, null, null, 1, 0, null, null, 1, 0, null, null, 1, 0, 0, 0, null, null, 1, 0, 0, 0, 0]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/mpi.py", "source": "# -*- coding: utf-8 -*-\n# Copyright (C) 2015-2016 Martin Sandve Aln\u00e6s\n#\n# This file is part of DIJITSO.\n#\n# DIJITSO is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# DIJITSO is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"Utilities for mpi features of dijitso.\"\"\"\n\nimport io\nimport os\nimport uuid\nfrom glob import glob\n\nimport numpy\n\nfrom dijitso.log import info, error\nfrom dijitso.system import try_delete_file\n\n\ndef bcast_uuid(comm):\n    \"Create a unique id shared across all processes in comm.\"\n    guid = numpy.ndarray((1,), dtype=numpy.uint64)\n    if comm.rank == 0:\n        # uuid creates a unique 128 bit id, we just pick the low 64 bits\n        guid[0] = numpy.uint64(uuid.uuid4().int & ((1 << 64) - 1))\n    comm.Bcast(guid, root=0)\n    return int(guid[0])\n\n\ndef discover_path_access_ranks(comm, path):\n    \"\"\"Discover which ranks share access to the same directory.\n\n    This cannot be done by comparing paths, because\n    a path string can represent a local work directory\n    or a network mapped directory, depending on cluster\n    configuration.\n\n    Current approach is that each process touches a\n    filename with its own rank in their given path.\n    By reading in the filelist from the same path,\n    we'll find which ranks have access to the same\n    directory.\n\n    To avoid problems with leftover files from previous\n    program crashes, or collisions between simultaneously\n    running programs, we use a random uuid in the filenames\n    written.\n    \"\"\"\n    # Create a unique basename for rank files of this program\n    guid = bcast_uuid(comm)  # TODO: Run this in an init function and store for program duration?\n    basename = os.path.join(path, \"rank.%d.\" % guid)\n\n    # Write the rank of this process to a filename\n    filename = basename + str(comm.rank)\n    with io.open(filename, \"wb\"):\n        pass\n\n    # Wait for all writes to take place. Don't know how robust this is\n    # with nfs!!!\n    comm.Barrier()\n\n    # Read filelist\n    noderanks = sorted([int(fn.replace(basename, \"\")) for fn in glob(basename + \"*\")])\n\n    # Wait for everyone to finish reading filelist\n    comm.Barrier()\n\n    # Clean up our own rank file. If the process is aborted,\n    # this may fail to happen and leave a dangling file!\n    # However the file takes no space, and the guid ensures\n    # it won't be colliding with other filenames.\n    # TODO: Include a gc command in dijitso to clean up this and other stuff.\n    try_delete_file(filename)\n    return noderanks\n\n\ndef gather_global_partitions(comm, partition):\n    \"\"\"Gather an ordered list of unique partition values within comm.\"\"\"\n    global_partitions = numpy.ndarray((comm.size,), dtype=numpy.uint64)\n    local_partition = numpy.ndarray((1,), dtype=numpy.uint64)\n    local_partition[0] = partition\n    comm.Allgather(local_partition, global_partitions)\n    return sorted(set(global_partitions))\n\n\ndef create_subcomm(comm, ranks):\n    \"Create a communicator for a set of ranks.\"\n    group = comm.Get_group()\n    subgroup = group.Incl(ranks)\n    subcomm = comm.Create(subgroup)\n    subgroup.Free()\n    group.Free()\n    return subcomm\n\n\ndef create_node_comm(comm, comm_dir):\n    \"\"\"Create comms for communicating within a node.\"\"\"\n    # Find ranks that share this physical comm_dir (physical dir, not same path string)\n    node_ranks = discover_path_access_ranks(comm, comm_dir)\n\n    # Partition comm into one communicator for each physical comm_dir\n    assert len(node_ranks) >= 1\n    node_root = min(node_ranks)\n    node_comm = comm.Split(node_root, node_ranks.index(comm.rank))\n    return node_comm, node_root\n\n\ndef create_node_roots_comm(comm, node_root):\n    \"\"\"Build comm for communicating among the node roots.\"\"\"\n    unique_global_node_roots = gather_global_partitions(comm, node_root)\n    roots_comm = create_subcomm(comm, unique_global_node_roots)\n    return roots_comm\n\n\ndef create_comms_and_role_root(comm, node_comm, node_root):\n    \"\"\"Approach: global root builds and sends binary to node roots,\n    everyone waits on their node group.\"\"\"\n    copy_comm = create_node_roots_comm(comm, node_root)\n    wait_comm = node_comm\n    if comm.rank == 0:\n        role = \"builder\"\n    elif node_comm.rank == 0:\n        assert comm.rank == node_root\n        role = \"receiver\"\n    else:\n        assert comm.rank != node_root\n        role = \"waiter\"\n    return copy_comm, wait_comm, role\n\n\ndef create_comms_and_role_node(comm, node_comm, node_root):\n    \"\"\"Approach: each node root builds, everyone waits on their node group.\"\"\"\n    copy_comm = None\n    wait_comm = node_comm\n    if node_comm.rank == 0:\n        assert comm.rank == node_root\n        role = \"builder\"\n    else:\n        assert comm.rank != node_root\n        role = \"waiter\"\n    return copy_comm, wait_comm, role\n\n\ndef create_comms_and_role_process(comm, node_comm, node_root):\n    \"\"\"Approach: each process builds its own module, no communication.\n\n    To ensure no race conditions in this case independently of cache dir setup,\n    we include an error check on the size of the autodetected node_comm.\n    This should always be 1, or we provide the user with an informative message.\n    TODO: Append program uid and process rank to basedir instead?\n    \"\"\"\n    if node_comm.size > 1:\n        error(\"Asking for per-process building but processes share cache dir.\"\n              \" Please configure dijitso dirs to be distinct per process.\")\n    copy_comm = None\n    wait_comm = None\n    assert node_comm.rank == 0\n    assert comm.rank == node_root\n    role = \"builder\"\n    return copy_comm, wait_comm, role\n\n\ndef create_comms_and_role(comm, comm_dir, buildon):\n    \"\"\"Determine which role each process should take, and create\n    the right copy_comm and wait_comm for the build strategy.\n\n    buildon must be one of \"root\", \"node\", or \"process\".\n\n    Returns (copy_comm, wait_comm, role).\n    \"\"\"\n    # Now assign values to the copy_comm, wait_comm, and role,\n    # depending on buildon strategy chosen.  If we have no comm,\n    # always return the builder role\n    if comm is None:\n        copy_comm, wait_comm, role = None, None, \"builder\"\n    else:\n        node_comm, node_root = create_node_comm(comm, comm_dir)\n        if buildon == \"root\":\n            copy_comm, wait_comm, role = create_comms_and_role_root(comm,\n                                                                    node_comm,\n                                                                    node_root)\n        elif buildon == \"node\":\n            copy_comm, wait_comm, role = create_comms_and_role_node(comm,\n                                                                    node_comm,\n                                                                    node_root)\n        elif buildon == \"process\":\n            copy_comm, wait_comm, role = create_comms_and_role_process(comm,\n                                                                       node_comm,\n                                                                       node_root)\n        else:\n            error(\"Invalid parameter buildon=%s\" % (buildon,))\n    return copy_comm, wait_comm, role\n\n\ndef send_binary(comm, lib_data):\n    \"Send compiled library as binary blob over MPI.\"\n    # TODO: Test this in parallel locally.\n    # TODO: Test this in parallel on clusters.\n    # http://mpi4py.scipy.org/docs/usrman/tutorial.html\n    # Check that we are the root\n    root = 0\n    assert comm.rank == root\n\n    # Send file size\n    lib_size = numpy.ndarray((1,), dtype=numpy.uint32)\n    lib_size[0] = lib_data.shape[0]\n    info(\"rank %d: send size with root=%d.\" % (comm.rank, root))\n    comm.--- Logging error ---
Traceback (most recent call last):
  File "/usr/lib/python3.6/logging/__init__.py", line 994, in emit
    stream.write(msg)
BlockingIOError: [Errno 11] write could not complete without blocking
Call stack:
  File "/usr/local/bin/coveralls", line 11, in <module>
    sys.exit(main())
  File "/usr/local/lib/python3.6/dist-packages/coveralls/cli.py", line 76, in main
    result = coverallz.wear()
  File "/usr/local/lib/python3.6/dist-packages/coveralls/api.py", line 160, in wear
    json_string = self.create_report()
  File "/usr/local/lib/python3.6/dist-packages/coveralls/api.py", line 188, in create_report
    log.debug(log_string)
Message: '{"source_files": [{"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/__init__.py", "source": "# -*- coding: utf-8 -*-\\n# Copyright (C) 2015-2016 Martin Sandve Aln\\u00e6s\\n#\\n# This file is part of DIJITSO.\\n#\\n# DIJITSO is free software: you can redistribute it and/or modify\\n# it under the terms of the GNU Lesser General Public License as published by\\n# the Free Software Foundation, either version 3 of the License, or\\n# (at your option) any later version.\\n#\\n# DIJITSO is distributed in the hope that it will be useful,\\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\\n# GNU Lesser General Public License for more details.\\n#\\n# You should have received a copy of the GNU Lesser General Public License\\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\\n\\nfrom pkg_resources import get_distribution\\n\\n\\"\\"\\"This is dijitso -- a lightweight distributed just-in-time shared\\nlibrary builder.\\"\\"\\"\\n\\n__author__ = \\"Martin Sandve Aln\\u00e6s\\"\\n__version__ = get_distribution(\'fenics-dijitso\').version\\n\\n__all__ = [\\"validate_params\\", \\"jit\\", \\"extract_factory_function\\",\\n           \\"set_log_level\\"]\\n\\nfrom dijitso.params import validate_params\\nfrom dijitso.jit import jit, DijitsoError\\nfrom dijitso.jit import extract_factory_function\\nfrom dijitso.log import set_log_level\\n\\n# Import main function, entry point to script\\nfrom dijitso.__main__ import main\\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, null, null, null, 1, 1, null, 1, null, null, 1, 1, 1, 1, null, null, 1]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/__main__.py", "source": "# -*- coding: utf-8 -*-\\n# Copyright (C) 2015-2016 Martin Sandve Aln\\u00e6s\\n#\\n# This file is part of DIJITSO.\\n#\\n# DIJITSO is free software: you can redistribute it and/or modify\\n# it under the terms of the GNU Lesser General Public License as published by\\n# the Free Software Foundation, either version 3 of the License, or\\n# (at your option) any later version.\\n#\\n# DIJITSO is distributed in the hope that it will be useful,\\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\\n# GNU Lesser General Public License for more details.\\n#\\n# You should have received a copy of the GNU Lesser General Public License\\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\\n\\n\\"\\"\\"This is the commandline interface to dijitso. For usage help, run \'dijitso --help\'.\\"\\"\\"\\n\\nimport sys\\nimport argparse\\n\\nfrom dijitso.params import validate_params\\nimport dijitso.cmdline as cmd_namespace\\n\\n\\ndef build_commands(cmd_namespace):\\n    \\"\\"\\"Collects functions called cmd_<basename> from given namespace.\\n\\n    Returns dict {basename: function}.\\n    \\"\\"\\"\\n    commands = {}\\n    cmd_args = {}\\n    for name in list(cmd_namespace.keys()):\\n        if name.startswith(\\"cmd_\\"):\\n            cmd_name = name.replace(\\"cmd_\\", \\"\\")\\n            cmd = cmd_namespace[name]\\n            commands[cmd_name] = cmd\\n            args_name = \\"args_\\" + cmd_name\\n            if args_name in cmd_namespace:\\n                cmd_args[cmd_name] = cmd_namespace.get(args_name)\\n    return commands, cmd_args\\n\\n\\ndef add_top_arguments(parser):\\n    \\"Add arguments to top level parser.\\"\\n    parser.add_argument(\\"--verbose\\", \\"-v\\", default=False,\\n                        help=\\"set logging level\\")\\n    parser.add_argument(\\"--cache-dir\\", \\"-r\\", default=None,\\n                        help=\\"use non-default cache root path\\")\\n    parser.add_argument(\\"--dry-run\\", \\"-n\\", default=False,\\n                        help=\\"only show what would be done, don\'t modify filesystem\\")\\n\\n\\ndef extract_params_from_args(args):\\n    p = {}\\n    p[\\"cache\\"] = {}\\n    if args.cache_dir is not None:\\n        p[\\"cache\\"][\\"cache_dir\\"] = args.cache_dir\\n    return p\\n\\n\\ndef add_common_arguments(parser):\\n    \\"Add arguments to each subparser.\\"\\n    pass\\n\\n\\ndef add_cmd_arguments(cmd, parser, args):\\n    \\"Add arguments specific to a command.\\"\\n    if hasattr(cmd, \\"add_arguments\\"):\\n        cmd.add_arguments(parser)\\n\\n\\ndef build_parsers(commands, args):\\n    \\"\\"\\"Builds a top parser with subparsers for each command.\\"\\"\\"\\n    top_parser = argparse.ArgumentParser()\\n    add_top_arguments(top_parser)\\n\\n    subparsers = top_parser.add_subparsers(help=\\"command description\\", dest=\\"cmd_name\\")\\n    cmd_parsers = {}\\n    for cmd_name, cmd in commands.items():\\n        parser = subparsers.add_parser(cmd_name, help=cmd.__doc__)\\n        add_common_arguments(parser)\\n        if cmd_name in args:\\n            args[cmd_name](parser)\\n        cmd_parsers[cmd_name] = parser\\n\\n    return top_parser, subparsers, cmd_parsers\\n\\n\\ndef main(args=None):\\n    \\"\\"\\"This is the commandline tool for the python module dijitso.\\"\\"\\"\\n\\n    if args is None:\\n        args = sys.argv[1:]\\n\\n    # Build subparsers for each command\\n    commands, cmd_args = build_commands(vars(cmd_namespace))\\n    top_parser, subparsers, cmd_parsers = build_parsers(commands, cmd_args)\\n\\n    # Populate args namespace\\n    args_ns = argparse.Namespace()\\n    top_parser.parse_args(args, namespace=args_ns)\\n\\n    # Extract generic params\\n    params = extract_params_from_args(args_ns)\\n    params = validate_params(params)\\n\\n    # Run the chosen command (argparse doesn\'t allow\\n    # getting to this point with an invalid cmd_name)\\n    assert args_ns.cmd_name in commands\\n    cmd = commands[args_ns.cmd_name]\\n    return cmd(args_ns, params)\\n\\n\\nif __name__ == \\"__main__\\":\\n    sys.exit(main())\\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, 1, 1, null, 1, 1, null, null, 1, null, null, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, null, null, 1, null, 0, null, 0, null, 0, null, null, null, 1, 0, 0, 0, 0, 0, null, null, 1, null, 0, null, null, 1, null, 0, 0, null, null, 1, null, 0, 0, null, 0, 0, 0, 0, 0, 0, 0, 0, null, 0, null, null, 1, null, null, 0, 0, null, null, 0, 0, null, null, 0, 0, null, null, 0, 0, null, null, null, 0, 0, 0, null, null, 1, 0]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/build.py", "source": "# -*- coding: utf-8 -*-\\n# Copyright (C) 2015-2016 Martin Sandve Aln\\u00e6s\\n#\\n# This file is part of DIJITSO.\\n#\\n# DIJITSO is free software: you can redistribute it and/or modify\\n# it under the terms of the GNU Lesser General Public License as published by\\n# the Free Software Foundation, either version 3 of the License, or\\n# (at your option) any later version.\\n#\\n# DIJITSO is distributed in the hope that it will be useful,\\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\\n# GNU Lesser General Public License for more details.\\n#\\n# You should have received a copy of the GNU Lesser General Public License\\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\\n\\n\\"\\"\\"Utilities for building libraries with dijitso.\\"\\"\\"\\n\\nimport tempfile\\nimport os\\nimport sys\\n\\nfrom dijitso.system import get_status_output, lockfree_move_file\\nfrom dijitso.system import make_dirs, make_executable, store_textfile\\nfrom dijitso.log import warning, info, debug\\nfrom dijitso.cache import ensure_dirs, make_lib_dir, make_inc_dir\\nfrom dijitso.cache import create_fail_dir_path\\nfrom dijitso.cache import create_lib_filename, create_lib_basename, create_libname\\nfrom dijitso.cache import create_src_filename, create_src_basename\\nfrom dijitso.cache import create_inc_filename, create_inc_basename\\nfrom dijitso.cache import create_log_filename\\nfrom dijitso.cache import compress_source_code\\n\\n\\ndef make_unique(dirs):\\n    \\"\\"\\"Take a sequence of hashable items and return a tuple including each\\n    only once.\\n\\n    Preserves original ordering.\\n\\n    \\"\\"\\"\\n    udirs = []\\n    found = set()\\n    for d in dirs:\\n        if d not in found:\\n            udirs.append(d)\\n            found.add(d)\\n    return tuple(udirs)\\n\\n\\ndef make_compile_command(src_filename, lib_filename, dependencies,\\n                         build_params, cache_params):\\n    \\"\\"\\"Piece together the compile command from build params.\\n\\n    Returns the command as a list with the command and its arguments.\\n    \\"\\"\\"\\n    # Get dijitso dirs based on cache_params\\n    inc_dir = make_inc_dir(cache_params)\\n    lib_dir = make_lib_dir(cache_params)\\n\\n    # Add dijitso directories to includes, libs, and rpaths\\n    include_dirs = make_unique(build_params[\\"include_dirs\\"] + (inc_dir,))\\n    lib_dirs = make_unique(build_params[\\"lib_dirs\\"] + (lib_dir,))\\n    rpath_dirs = make_unique(build_params[\\"rpath_dirs\\"] + (lib_dir,))\\n\\n    # Make all paths absolute\\n    include_dirs = [os.path.abspath(d) for d in include_dirs]\\n    lib_dirs = [os.path.abspath(d) for d in lib_dirs]\\n    rpath_dirs = [os.path.abspath(d) for d in rpath_dirs]\\n\\n    # Build options (defaults assume gcc compatibility)\\n    cxxflags = list(build_params[\\"cxxflags\\"])\\n    if build_params[\\"debug\\"]:\\n        cxxflags.extend(build_params[\\"cxxflags_debug\\"])\\n    else:\\n        cxxflags.extend(build_params[\\"cxxflags_opt\\"])\\n\\n    # Create library names for all dependencies and additional given\\n    # libs\\n    deplibs = [create_libname(depsig, cache_params)\\n               for depsig in dependencies]\\n\\n    deplibs.extend(build_params[\\"libs\\"])\\n\\n    # Get compiler name\\n    args = [build_params[\\"cxx\\"]]\\n\\n    # Compiler args\\n    args.extend(cxxflags)\\n    args.extend(\\"-I\\" + path for path in include_dirs)\\n\\n    # The input source\\n    args.append(src_filename)\\n\\n    # Linker args\\n    args.extend(\\"-L\\" + path for path in lib_dirs)\\n    args.extend(\\"-Wl,-rpath,\\" + path for path in rpath_dirs)\\n    args.extend(\\"-l\\" + lib for lib in deplibs)\\n\\n    # OSX specific:\\n    if sys.platform == \\"darwin\\":\\n        full_lib_filename = os.path.join(cache_params[\\"cache_dir\\"],\\n                                         cache_params[\\"lib_dir\\"],\\n                                         os.path.basename(lib_filename))\\n        args.append(\\"-Wl,-install_name,%s\\" % full_lib_filename)\\n\\n    # The output library\\n    args.append(\\"-o\\" + lib_filename)\\n\\n    return args\\n\\n\\ndef temp_dir(cache_params):\\n    \\"\\"\\"Return a uniquely named temp directory.\\n\\n    Optionally residing under temp_dir_root from cache_params.\\n    \\"\\"\\"\\n    return tempfile.mkdtemp(dir=cache_params[\\"temp_dir_root\\"])\\n\\n\\ndef build_shared_library(signature, header, source, dependencies, params):\\n    \\"\\"\\"Build shared library from a source file and store library in\\n    cache.\\n\\n    \\"\\"\\"\\n    cache_params = params[\\"cache\\"]\\n    build_params = params[\\"build\\"]\\n\\n    # Create basenames\\n    inc_basename = create_inc_basename(signature, cache_params)\\n    src_basename = create_src_basename(signature, cache_params)\\n    lib_basename = create_lib_basename(signature, cache_params)\\n\\n    # Create a temp directory and filenames within it\\n    tmpdir = temp_dir(cache_params)\\n    temp_inc_filename = os.path.join(tmpdir, inc_basename)\\n    temp_src_filename = os.path.join(tmpdir, src_basename)\\n    temp_lib_filename = os.path.join(tmpdir, lib_basename)\\n\\n    # Store source and header in temp dir\\n    if header:\\n        store_textfile(temp_inc_filename, header)\\n    store_textfile(temp_src_filename, source)\\n\\n    # Build final command as list of arguments\\n    cmd = make_compile_command(temp_src_filename, temp_lib_filename,\\n                               dependencies, build_params, cache_params)\\n\\n    # Execute command to compile generated source code to dynamic\\n    # library\\n    status, output = get_status_output(cmd)\\n\\n    # Move files to cache on success or a local dir on failure,\\n    # using safe lockfree move\\n    if status == 0:\\n        # Ensure dirnames exist in cache dirs\\n        ensure_dirs(cache_params)\\n\\n        # Move library first\\n        lib_filename = create_lib_filename(signature, cache_params)\\n        assert os.path.exists(os.path.dirname(lib_filename))\\n        lockfree_move_file(temp_lib_filename, lib_filename)\\n\\n        # Write header only if there is one\\n        if header:\\n            inc_filename = create_inc_filename(signature, cache_params)\\n            assert os.path.exists(os.path.dirname(inc_filename))\\n            lockfree_move_file(temp_inc_filename, inc_filename)\\n        else:\\n            inc_filename = None\\n\\n        # Compress or delete source code based on params\\n        temp_src_filename = compress_source_code(temp_src_filename, cache_params)\\n        if temp_src_filename:\\n            src_filename = create_src_filename(signature, cache_params)\\n            if temp_src_filename.endswith(\\".gz\\"):\\n                src_filename = src_filename + \\".gz\\"\\n            assert os.path.exists(os.path.dirname(src_filename))\\n            lockfree_move_file(temp_src_filename, src_filename)\\n        else:\\n            src_filename = None\\n\\n        # Write compiler command and output to log file\\n        if cache_params[\\"enable_build_log\\"]:\\n            # Recreate compiler command without the tempdir\\n            cmd = make_compile_command(src_basename, lib_basename,\\n                                       dependencies, build_params, cache_params)\\n\\n            log_contents = \\"%s\\\\n\\\\n%s\\" % (\\" \\".join(cmd), output)\\n            log_filename = create_log_filename(signature, cache_params)\\n            assert os.path.exists(os.path.dirname(log_filename))\\n            store_textfile(log_filename, log_contents)\\n        else:\\n            log_filename = None\\n\\n        files = set((inc_filename, src_filename, lib_filename, log_filename))\\n        files = files - set((None,))\\n        files = sorted(files)\\n        debug(\\"Compilation succeeded. Files written to cache:\\\\n\\" +\\n              \\"\\\\n\\".join(files))\\n        err_info = None\\n    else:\\n        # Create filenames in a local directory to store files for\\n        # reproducing failure\\n        fail_dir = create_fail_dir_path(signature, cache_params)\\n        make_dirs(fail_dir)\\n\\n        # Library name is returned below\\n        lib_filename = None\\n\\n        # Write header only if there is one\\n        if header:\\n            inc_filename = os.path.join(fail_dir, inc_basename)\\n            lockfree_move_file(temp_inc_filename, inc_filename)\\n\\n        # Always write source for inspection after compilation failure\\n        src_filename = os.path.join(fail_dir, src_basename)\\n        lockfree_move_file(temp_src_filename, src_filename)\\n\\n        # Write compile command to failure dir, adjusted to use local\\n        # source file name so it can be rerun\\n        cmd = make_compile_command(src_basename, lib_basename, dependencies,\\n                                   build_params, cache_params)\\n        cmds = \\" \\".join(cmd)\\n        script = \\"#!/bin/bash\\\\n# Execute this file to recompile locally\\\\n\\" + cmds\\n        cmd_filename = os.path.join(fail_dir, \\"recompile.sh\\")\\n        store_textfile(cmd_filename, script)\\n        make_executable(cmd_filename)\\n\\n        # Write readme file with instructions\\n        readme = \\"Run or source recompile.sh to compile locally and reproduce the build failure.\\\\n\\"\\n        readme_filename = os.path.join(fail_dir, \\"README\\")\\n        store_textfile(readme_filename, readme)\\n\\n        # Write compiler output to failure dir (will refer to temp paths)\\n        log_filename = os.path.join(fail_dir, \\"error.log\\")\\n        store_textfile(log_filename, output)\\n\\n        info(\\"------------------- Start compiler output ------------------------\\")\\n        info(output)\\n        info(\\"-------------------  End compiler output  ------------------------\\")\\n        warning(\\"Compilation failed! Sources, command, and \\"\\n                \\"errors have been written to: %s\\" % (fail_dir,))\\n\\n        err_info = {\'src_filename\': src_filename,\\n                    \'cmd_filename\': cmd_filename,\\n                    \'readme_filename\': readme_filename,\\n                    \'fail_dir\': fail_dir,\\n                    \'log_filename\': log_filename}\\n\\n    return status, output, lib_filename, err_info\\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, 1, 1, 1, null, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, null, null, 1, null, null, null, null, null, null, 0, 0, 0, 0, 0, 0, 0, null, null, 1, null, null, null, null, null, null, 0, 0, null, null, 0, 0, 0, null, null, 0, 0, 0, null, null, 0, 0, 0, null, 0, null, null, null, 0, null, null, 0, null, null, 0, null, null, 0, 0, null, null, 0, null, null, 0, 0, 0, null, null, 0, 0, null, null, 0, null, null, 0, null, 0, null, null, 1, null, null, null, null, 0, null, null, 1, null, null, null, null, 0, 0, null, null, 0, 0, 0, null, null, 0, 0, 0, 0, null, null, 0, 0, 0, null, null, 0, null, null, null, null, 0, null, null, null, 0, null, 0, null, null, 0, 0, 0, null, null, 0, 0, 0, 0, null, 0, null, null, 0, 0, 0, 0, 0, 0, 0, null, 0, null, null, 0, null, 0, null, null, 0, 0, 0, 0, null, 0, null, 0, 0, 0, 0, null, 0, null, null, null, 0, 0, null, null, 0, null, null, 0, 0, 0, null, null, 0, 0, null, null, null, 0, null, 0, 0, 0, 0, 0, null, null, 0, 0, 0, null, null, 0, 0, null, 0, 0, 0, 0, null, null, 0, null, null, null, null, null, 0]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/cache.py", "source": "# -*- coding: utf-8 -*-\\n# Copyright (C) 2015-2016 Martin Sandve Aln\\u00e6s\\n#\\n# This file is part of DIJITSO.\\n#\\n# DIJITSO is free software: you can redistribute it and/or modify\\n# it under the terms of the GNU Lesser General Public License as published by\\n# the Free Software Foundation, either version 3 of the License, or\\n# (at your option) any later version.\\n#\\n# DIJITSO is distributed in the hope that it will be useful,\\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\\n# GNU Lesser General Public License for more details.\\n#\\n# You should have received a copy of the GNU Lesser General Public License\\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\\n\\n\\"\\"\\"Utilities for disk cache features of dijitso.\\"\\"\\"\\n\\nfrom glob import glob\\nimport os\\nimport re\\nimport sys\\nimport ctypes\\nfrom dijitso.system import ldd\\nfrom dijitso.system import make_dirs\\nfrom dijitso.system import try_delete_file, try_copy_file\\nfrom dijitso.system import gzip_file, gunzip_file\\nfrom dijitso.system import read_textfile, store_textfile\\nfrom dijitso.log import debug, error, warning\\n\\n\\ndef extract_files(signature, cache_params, prefix=\\"\\", path=os.curdir,\\n                  categories=(\\"inc\\", \\"src\\", \\"lib\\", \\"log\\")):\\n    \\"\\"\\"Make a copy of files stored under this signature.\\n\\n    Target filenames are \'<path>/<prefix>-<signature>.*\'\\n    \\"\\"\\"\\n    path = os.path.join(path, prefix + signature)\\n    make_dirs(path)\\n\\n    if \\"inc\\" in categories:\\n        inc_filename = create_inc_filename(signature, cache_params)\\n        try_copy_file(inc_filename, path)\\n    if \\"src\\" in categories:\\n        src_filename = create_src_filename(signature, cache_params)\\n        if not os.path.exists(src_filename):\\n            src_filename = src_filename + \\".gz\\"\\n        if os.path.exists(src_filename):\\n            try_copy_file(src_filename, path)\\n            if src_filename.endswith(\\".gz\\"):\\n                gunzip_file(os.path.join(path, os.path.basename(src_filename)))\\n    if \\"lib\\" in categories:\\n        lib_filename = create_lib_filename(signature, cache_params)\\n        try_copy_file(lib_filename, path)\\n    if \\"log\\" in categories:\\n        log_filename = create_log_filename(signature, cache_params)\\n        try_copy_file(log_filename, path)\\n\\n    return path\\n\\n\\ndef extract_lib_signatures(cache_params):\\n    \\"Extract signatures from library files in cache.\\"\\n    p = os.path.join(cache_params[\\"cache_dir\\"], cache_params[\\"lib_dir\\"])\\n    filenames = glob(os.path.join(p, \\"*\\"))\\n\\n    r = re.compile(create_lib_filename(\\"(.*)\\", cache_params))\\n    sigs = []\\n    for f in filenames:\\n        m = r.match(f)\\n        if m:\\n            sigs.append(m.group(1))\\n    return sigs\\n\\n\\ndef clean_cache(cache_params, dryrun=True,\\n                categories=(\\"inc\\", \\"src\\", \\"lib\\", \\"log\\")):\\n    \\"Delete files from cache.\\"\\n    gc = glob_cache(cache_params, categories=categories)\\n    for category in gc:\\n        for fn in gc[category]:\\n            if dryrun:\\n                print(\\"rm %s\\" % (fn,))\\n            else:\\n                try_delete_file(fn)\\n\\n\\ndef glob_cache(cache_params, categories=(\\"inc\\", \\"src\\", \\"lib\\", \\"log\\")):\\n    \\"\\"\\"Return dict with contents of cache subdirectories.\\"\\"\\"\\n    g = {}\\n    for foo in categories:\\n        p = os.path.join(cache_params[\\"cache_dir\\"], cache_params[foo + \\"_dir\\"])\\n        g[foo] = glob(os.path.join(p, \\"*\\"))\\n    return g\\n\\n\\ndef grep_cache(regex, cache_params,\\n               linenumbers=False, countonly=False,\\n               signature=None,\\n               categories=(\\"inc\\", \\"src\\", \\"log\\")):\\n    \\"Search through files in cache for a pattern.\\"\\n    allmatches = {}\\n    gc = glob_cache(cache_params, categories=categories)\\n    for category in categories:\\n        for fn in gc.get(category, ()):\\n            # Skip non-matches if specific signature is specified\\n            if signature is not None and signature not in fn:\\n                continue\\n\\n            if countonly:\\n                matches = 0\\n            else:\\n                matches = []\\n\\n            if category == \\"lib\\":\\n                # If category is \\"lib\\", use ldd\\n                # TODO: on mac need to use otool\\n                libs = ldd(fn)\\n                for k, libpath in sorted(libs.items()):\\n                    if not libpath:\\n                        continue\\n                    m = regex.match(libpath)\\n                    if m:\\n                        if countonly:\\n                            matches += 1\\n                        else:\\n                            line = \\"%s => %s\\" % (k, libpath)\\n                            matches.append(line)\\n            else:\\n                content = read_textfile(fn)\\n                lines = content.splitlines() if content else ()\\n                for i, line in enumerate(lines):\\n                    m = regex.match(line)\\n                    if m:\\n                        if countonly:\\n                            matches += 1\\n                        else:\\n                            line = line.rstrip(\\"\\\\n\\\\r\\")\\n                            if linenumbers:\\n                                line = (i, line)\\n                            matches.append(line)\\n\\n            if matches:\\n                allmatches[fn] = matches\\n    return allmatches\\n\\n\\ndef extract_function(lines):\\n    \\"Extract function code starting at first line of lines.\\"\\n    n = len(lines)\\n\\n    # Function starts at line 0 by assumption\\n    begin = 0\\n\\n    # Worst case body range\\n    body_begin = begin\\n    body_end = n\\n\\n    # Body starts at first {\\n    for i in range(begin, n):\\n        if \\"{\\" in lines[i]:\\n            body_begin = i\\n            break\\n\\n    # Body ends when {} are balanced back to 0\\n    braces = 0\\n    for i in range(body_begin, n):\\n        if \\"{\\" in lines[i]:\\n            braces += 1\\n        if \\"}\\" in lines[i]:\\n            braces -= 1\\n        if braces == 0:\\n            body_end = i\\n            break\\n\\n    # Include the last line in range\\n    end = body_end + 1\\n    sublines = lines[begin:end]\\n    return \\"\\".join(sublines)\\n\\n\\ndef _create_basename(foo, signature, cache_params):\\n    return \\"\\".join((cache_params.get(foo + \\"_prefix\\", \\"\\"),\\n                    cache_params.get(foo + \\"_basename\\", \\"\\"),\\n                    signature,\\n                    cache_params.get(foo + \\"_postfix\\", \\"\\")))\\n\\n\\ndef _create_filename(foo, signature, cache_params):\\n    basename = _create_basename(foo, signature, cache_params)\\n    return os.path.join(cache_params[\\"cache_dir\\"],\\n                        cache_params[foo + \\"_dir\\"], basename)\\n\\n\\ndef create_log_filename(signature, cache_params):\\n    \\"Create log filename based on signature and params.\\"\\n    return _create_filename(\\"log\\", signature, cache_params)\\n\\n\\ndef create_inc_basename(signature, cache_params):\\n    \\"Create header filename based on signature and params.\\"\\n    return _create_basename(\\"inc\\", signature, cache_params)\\n\\n\\ndef create_inc_filename(signature, cache_params):\\n    \\"Create header filename based on signature and params.\\"\\n    return _create_filename(\\"inc\\", signature, cache_params)\\n\\n\\ndef create_src_filename(signature, cache_params):\\n    \\"Create source code filename based on signature and params.\\"\\n    return _create_filename(\\"src\\", signature, cache_params)\\n\\n\\ndef create_src_basename(signature, cache_params):\\n    \\"Create source code filename based on signature and params.\\"\\n    return _create_basename(\\"src\\", signature, cache_params)\\n\\n\\ndef create_lib_basename(signature, cache_params):\\n    \\"Create library filename based on signature and params.\\"\\n    return _create_basename(\\"lib\\", signature, cache_params)\\n\\n\\ndef create_lib_filename(signature, cache_params):\\n    \\"Create library filename based on signature and params.\\"\\n    return _create_filename(\\"lib\\", signature, cache_params)\\n\\n\\ndef create_libname(signature, cache_params):\\n    \\"\\"\\"Create library name based on signature and params,\\n    without path, prefix \'lib\', or extension \'.so\'.\\"\\"\\"\\n    return cache_params[\\"lib_basename\\"] + signature\\n\\n\\ndef create_fail_dir_path(signature, cache_params):\\n    \\"Create path name to place files after a module build failure.\\"\\n    fail_root = cache_params[\\"fail_dir_root\\"] or os.curdir\\n    fail_dir = os.path.join(fail_root, \\"jitfailure-\\" + signature)\\n    return os.path.abspath(fail_dir)\\n\\n\\ndef make_inc_dir(cache_params):\\n    d = os.path.join(cache_params[\\"cache_dir\\"], cache_params[\\"inc_dir\\"])\\n    make_dirs(d)\\n    return d\\n\\n\\ndef make_src_dir(cache_params):\\n    d = os.path.join(cache_params[\\"cache_dir\\"], cache_params[\\"src_dir\\"])\\n    make_dirs(d)\\n    return d\\n\\n\\ndef make_lib_dir(cache_params):\\n    d = os.path.join(cache_params[\\"cache_dir\\"], cache_params[\\"lib_dir\\"])\\n    make_dirs(d)\\n    return d\\n\\n\\ndef make_log_dir(cache_params):\\n    d = os.path.join(cache_params[\\"cache_dir\\"], cache_params[\\"log_dir\\"])\\n    make_dirs(d)\\n    return d\\n\\n\\n_ensure_dirs_called = {}\\n\\n\\ndef ensure_dirs(cache_params):\\n    global _ensure_dirs_called\\n    # This ensures directories are created only once during a process\\n    # for each value that cache_dir takes, in case it changes during\\n    # the process lifetime.\\n    c = cache_params[\\"cache_dir\\"]\\n    if c not in _ensure_dirs_called:\\n        make_inc_dir(cache_params)\\n        make_src_dir(cache_params)\\n        make_lib_dir(cache_params)\\n        make_log_dir(cache_params)\\n        _ensure_dirs_called[c] = True\\n\\n\\ndef read_library_binary(lib_filename):\\n    \\"Read compiled shared library as binary blob into a numpy byte array.\\"\\n    import numpy\\n    return numpy.fromfile(lib_filename, dtype=numpy.uint8)\\n\\n\\ndef write_library_binary(lib_data, signature, cache_params):\\n    \\"Store compiled shared library from binary blob in numpy byte array to cache.\\"\\n    make_lib_dir(cache_params)\\n    lib_filename = create_lib_filename(signature, cache_params)\\n    lib_data.tofile(lib_filename)\\n    # TODO: Set permissions?\\n\\n\\ndef analyse_load_error(e, lib_filename, cache_params):\\n    # Try to analyse error further for better error message:\\n    msg = str(e)\\n    r = re.compile(\\"(\\" + create_lib_basename(\\".*\\", cache_params) + \\")\\")\\n    m = r.match(msg)\\n    if m:\\n        # Found libname mentioned in message\\n        mlibname = m.group(1)\\n        mlibname = os.path.join(cache_params[\\"cache_dir\\"],\\n                                cache_params[\\"lib_dir\\"], mlibname)\\n    else:\\n        mlibname = lib_filename\\n\\n    if lib_filename != mlibname:\\n        # Message mentions some other dijitso library,\\n        # double check if this other file exists\\n        # (if it does, could be paths or rpath issue)\\n        if os.path.exists(mlibname):\\n            emsg = (\\"dijitso failed to load library:\\\\n\\\\t%s\\\\n\\"\\n                    \\"but dependency file exists:\\\\n\\\\t%s\\\\nerror is:\\\\n\\\\t%s\\" % (\\n                        lib_filename, mlibname, str(e)))\\n        else:\\n            emsg = (\\"dijitso failed to load library:\\\\n\\\\t%s\\\\n\\"\\n                    \\"dependency file missing:\\\\n\\\\t%s\\\\nerror is:\\\\n\\\\t%s\\" % (\\n                        lib_filename, mlibname, str(e)))\\n    else:\\n        # Message doesn\'t mention another dijitso library,\\n        # double check if library file we tried to load exists\\n        # (if it does, could be paths issue)\\n        if os.path.exists(lib_filename):\\n            emsg = (\\"dijitso failed to load existing file:\\\\n\\"\\n                    \\"\\\\t%s\\\\nerror is:\\\\n\\\\t%s\\" % (lib_filename, str(e)))\\n        else:\\n            emsg = (\\"dijitso failed to load missing file:\\\\n\\"\\n                    \\"\\\\t%s\\\\nerror is:\\\\n\\\\t%s\\" % (lib_filename, str(e)))\\n    return emsg\\n\\n\\ndef load_library(signature, cache_params):\\n    \\"\\"\\"Load existing dynamic library from disk.\\n\\n    Returns library module if found, otherwise None.\\n\\n    If found, the module is placed in memory cache for later lookup_lib calls.\\n    \\"\\"\\"\\n    lib_filename = create_lib_filename(signature, cache_params)\\n    if not os.path.exists(lib_filename):\\n        debug(\\"File %s does not exist\\" % (lib_filename,))\\n        return None\\n    debug(\\"Loading %s from %s\\" % (signature, lib_filename))\\n\\n    if cache_params[\\"lib_loader\\"] == \\"ctypes\\":\\n        try:\\n            lib = ctypes.cdll.LoadLibrary(lib_filename)\\n        except os.error as e:\\n            lib = None\\n            emsg = analyse_load_error(e, lib_filename, cache_params)\\n            warning(emsg)\\n        else:\\n            debug(\\"Loaded %s from %s\\" % (signature, lib_filename))\\n    elif cache_params[\\"lib_loader\\"] == \\"import\\":\\n        sys.path.append(os.path.dirname(lib_filename))\\n        # Will raise an exception if it does not load correctly\\n        lib = __import__(signature)\\n        debug(\\"Loaded %s from %s\\" % (signature, lib_filename))\\n    else:\\n        error(\\"Invalid loader: %s\\" % cache_params[\\"lib_loader\\"])\\n\\n    if lib is not None:\\n        # Disk loading succeeded, register loaded library in memory\\n        # cache for next time\\n        _lib_cache[signature] = lib\\n    return lib\\n\\n\\n# A cache is always something to be careful about.  This one stores\\n# references to loaded jit-compiled libraries, which will stay in\\n# memory unless manually unloaded anyway and should not cause any\\n# trouble.\\n_lib_cache = {}\\n\\n\\ndef lookup_lib(lib_signature, cache_params):\\n    \\"\\"\\"Lookup library in memory cache then in disk cache.\\n\\n    Returns library module if found, otherwise None.\\n    \\"\\"\\"\\n    # Look for already loaded library in memory cache\\n    lib = _lib_cache.get(lib_signature)\\n    if lib is None:\\n        # Cache miss in memory, try looking on disk\\n        lib = load_library(lib_signature, cache_params)\\n    else:\\n        debug(\\"Fetched %s from memory cache\\" % (lib_signature,))\\n    # Return library or None\\n    return lib\\n\\n\\ndef read_src(signature, cache_params):\\n    \\"\\"\\"Lookup source code in disk cache and return file contents or None.\\"\\"\\"\\n    filename = create_src_filename(signature, cache_params)\\n    return read_textfile(filename)\\n\\n\\ndef read_inc(signature, cache_params):\\n    \\"\\"\\"Lookup header file in disk cache and return file contents or None.\\"\\"\\"\\n    filename = create_inc_filename(signature, cache_params)\\n    return read_textfile(filename)\\n\\n\\ndef read_log(signature, cache_params):\\n    \\"\\"\\"Lookup log file in disk cache and return file contents or None.\\"\\"\\"\\n    filename = create_log_filename(signature, cache_params)\\n    return read_textfile(filename)\\n\\n\\ndef store_src(signature, content, cache_params):\\n    \\"Store source code in file within dijitso directories.\\"\\n    make_src_dir(cache_params)\\n    filename = create_src_filename(signature, cache_params)\\n    store_textfile(filename, content)\\n    return filename\\n\\n\\ndef store_inc(signature, content, cache_params):\\n    \\"Store header file within dijitso directories.\\"\\n    make_inc_dir(cache_params)\\n    filename = create_inc_filename(signature, cache_params)\\n    store_textfile(filename, content)\\n    return filename\\n\\n\\ndef store_log(signature, content, cache_params):\\n    \\"Store log file within dijitso directories.\\"\\n    make_log_dir(cache_params)\\n    filename = create_log_filename(signature, cache_params)\\n    store_textfile(filename, content)\\n    return filename\\n\\n\\ndef compress_source_code(src_filename, cache_params):\\n    \\"\\"\\"Keep, delete or compress source code based on value of cache parameter \'src_storage\'.\\n\\n    Can be \\"keep\\", \\"delete\\", or \\"compress\\".\\n    \\"\\"\\"\\n    src_storage = cache_params[\\"src_storage\\"]\\n    if src_storage == \\"keep\\":\\n        filename = src_filename\\n    elif src_storage == \\"delete\\":\\n        try_delete_file(src_filename)\\n        filename = None\\n    elif src_storage == \\"compress\\":\\n        filename = gzip_file(src_filename)\\n        try_delete_file(src_filename)\\n    else:\\n        error(\\"Invalid src_storage parameter. Expecting \'keep\', \'delete\', or \'compress\'.\\")\\n    return filename\\n\\n\\ndef get_dijitso_dependencies(libname, cache_params):\\n    \\"Run ldd and filter output to only include dijitso cache entries.\\"\\n    libs = ldd(libname)\\n    dlibs = {}\\n    for k in libs:\\n        if k.startswith(cache_params[\\"lib_prefix\\"]):\\n            dlibs[k] = libs[k]\\n    return dlibs\\n\\n\\n# TODO: Use this in command-line tools?\\ndef check_cache_integrity(cache_params):\\n    \\"Check dijitso cache integrity.\\"\\n    libnames = set(glob(cache_params[\\"lib_prefix\\"] + \\"*\\" + cache_params[\\"lib_postfix\\"]))\\n    dmissing = {}\\n    for libname in libnames:\\n        dlibs = get_dijitso_dependencies(libname, cache_params)\\n        # Missing on file system:\\n        missing = [k for k in dlibs if k not in libnames]\\n        for k in dlibs:\\n            if k not in missing:\\n                # ldd thinks file is missing but it\'s there, linker issue?\\n                pass\\n        if missing:\\n            dmissing[libname] = sorted(missing)\\n    return dmissing\\n\\n\\ndef report_cache_integrity(dmissing, out=warning):\\n    \\"Print cache integrity report.\\"\\n    if dmissing:\\n        out(\\"%d libraries are missing one or more dependencies:\\" % len(dmissing))\\n        for k in sorted(dmissing):\\n            out(\\"\\\\t%s depends on missing libraries:\\" % k)\\n            for m in dmissing[k]:\\n                out(\\"\\\\t\\\\t%s\\" % m)\\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, null, null, 1, null, null, null, null, null, 0, 0, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, null, 0, null, null, 1, null, 0, 0, null, 0, 0, 0, 0, 0, 0, 0, null, null, 1, null, null, 0, 0, 0, 0, 0, null, 0, null, null, 1, null, 0, 0, 0, 0, 0, null, null, 1, null, null, null, null, 0, 0, 0, 0, null, 0, 0, null, 0, 0, null, 0, null, 0, null, null, 0, 0, 0, 0, 0, 0, 0, 0, null, 0, 0, null, 0, 0, 0, 0, 0, 0, 0, null, 0, 0, 0, 0, null, 0, 0, 0, null, null, 1, null, 0, null, null, 0, null, null, 0, 0, null, null, 0, 0, 0, 0, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, null, null, 0, 0, 0, null, null, 1, 1, null, null, null, null, null, 1, 1, 1, null, null, null, 1, null, 0, null, null, 1, null, 0, null, null, 1, null, 0, null, null, 1, null, 0, null, null, 1, null, 0, null, null, 1, null, 0, null, null, 1, null, 1, null, null, 1, null, null, 0, null, null, 1, null, 0, 0, 0, null, null, 1, 0, 0, 0, null, null, 1, 0, 0, 0, null, null, 1, 0, 0, 0, null, null, 1, 0, 0, 0, null, null, 1, null, null, 1, null, null, null, null, 0, 0, 0, 0, 0, 0, 0, null, null, 1, null, 0, 0, null, null, 1, null, 0, 0, 0, null, null, null, 1, null, 0, 0, 0, 0, null, 0, 0, null, null, 0, null, 0, null, null, null, 0, 0, null, null, null, 0, null, null, null, null, null, null, 0, 0, null, null, 0, null, 0, null, null, 1, null, null, null, null, null, null, 1, 1, 0, 0, 1, null, 1, 1, 1, 0, 0, 0, 0, null, 1, 0, 0, null, 0, 0, null, 0, null, 1, null, null, 1, 1, null, null, null, null, null, null, 1, null, null, 1, null, null, null, null, null, 1, 1, null, 1, null, 1, null, 1, null, null, 1, null, 0, 0, null, null, 1, null, 0, 0, null, null, 1, null, 0, 0, null, null, 1, null, 0, 0, 0, 0, null, null, 1, null, 0, 0, 0, 0, null, null, 1, null, 0, 0, 0, 0, null, null, 1, null, null, null, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, null, 0, 0, null, null, 1, null, 0, 0, 0, 0, 0, 0, null, null, null, 1, null, 0, 0, 0, 0, null, 0, 0, 0, null, 0, 0, 0, 0, null, null, 1, null, 0, 0, 0, 0, 0, 0]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/cmdline.py", "source": "# -*- coding: utf-8 -*-\\n# Copyright (C) 2015-2016 Martin Sandve Aln\\u00e6s\\n#\\n# This file is part of DIJITSO.\\n#\\n# DIJITSO is free software: you can redistribute it and/or modify\\n# it under the terms of the GNU Lesser General Public License as published by\\n# the Free Software Foundation, either version 3 of the License, or\\n# (at your option) any later version.\\n#\\n# DIJITSO is distributed in the hope that it will be useful,\\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\\n# GNU Lesser General Public License for more details.\\n#\\n# You should have received a copy of the GNU Lesser General Public License\\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\\n\\n\\"\\"\\"This file contains the commands available through command-line dijitso-cache.\\n\\nEach function cmd_<cmdname> becomes a subcommand invoked by::\\n\\n    dijitso-cache cmdname ...args\\n\\nThe docstrings in the cmd_<cmdname> are shown when running::\\n\\n    dijitso-cache cmdname --help\\n\\nThe \'args\' argument to cmd_* is a Namespace object with the commandline arguments.\\n\\n\\"\\"\\"\\n\\nimport os\\nimport re\\n\\nfrom dijitso import __version__\\nfrom dijitso.cache import glob_cache, grep_cache, clean_cache\\nfrom dijitso.cache import extract_lib_signatures\\nfrom dijitso.cache import extract_files, extract_function\\nfrom dijitso.system import read_textfile\\n\\n\\ndef parse_categories(categories):\\n    if categories == \\"all\\":\\n        return (\\"inc\\", \\"src\\", \\"lib\\", \\"log\\")\\n    return categories.split(\\",\\")\\n\\n\\ndef args_version(parser):\\n    pass\\n\\n\\ndef cmd_version(args, params):\\n    \\"print dijitso version\\"\\n    print(__version__)\\n\\n\\ndef args_config(parser):\\n    parser.add_argument(\\"--key\\", default=\\"\\", help=\\"specific key to show (e.g. build.cxxflags)\\")\\n\\n\\ndef cmd_config(args, params):\\n    \\"show configuration\\"\\n    # Show single value if asked for\\n    key = args.key\\n    if key:\\n        name = key\\n        value = params\\n        for k in key.split(\\".\\"):\\n            value = value[k]\\n        # Compiler flags etc are more useful in space separated form:\\n        if isinstance(value, tuple):\\n            value = \\" \\".join(value)\\n        print(\\"    %s: %s\\" % (name, value))\\n        return 0\\n\\n    # Pick non-empty categories\\n    categories = sorted(c for c in params if params[c])\\n    print(\\"Showing default flags for dijitso:\\")\\n    for category in categories:\\n        print(\\"%s:\\" % (category,))\\n        for name in sorted(params[category]):\\n            value = params[category][name]\\n            # Compiler flags etc are more useful in space separated form:\\n            if isinstance(value, tuple):\\n                value = \\" \\".join(value)\\n            print(\\"    %s: %s\\" % (name, value))\\n    return 0\\n\\n\\ndef args_show(parser):\\n    parser.add_argument(\\"--categories\\", default=\\"all\\",\\n                        help=\\"comma separated list to enable file types (inc,src,lib,log)\\")\\n    parser.add_argument(\\"--no-summary\\", action=\\"store_true\\",\\n                        help=\\"don\'t show summary\\")\\n    parser.add_argument(\\"--files\\", action=\\"store_true\\",\\n                        help=\\"show file lists\\")\\n    parser.add_argument(\\"--signatures\\", action=\\"store_true\\",\\n                        help=\\"show library signatures\\")\\n\\n\\ndef cmd_show(args, params):\\n    \\"show lists of files in cache\\"\\n    cache_params = params[\\"cache\\"]\\n\\n    summary = not args.no_summary\\n    files = args.files\\n    signatures = args.signatures\\n    categories = parse_categories(args.categories)\\n\\n    gc = glob_cache(cache_params, categories=categories)\\n\\n    if signatures:\\n        sigs = extract_lib_signatures(cache_params)\\n        print(\\"\\\\n\\".join(\\"\\\\t\\" + s for s in sorted(sigs)))\\n    if files:\\n        for cat in categories:\\n            print(\\"\\\\n\\".join(\\"\\\\t\\" + f for f in sorted(gc.get(cat, ()))))\\n    if summary:\\n        print(\\"dijitso cache summary (number of cached files):\\")\\n        for cat in categories:\\n            print(\\"%s: %d\\" % (cat, len(gc.get(cat, ()))))\\n        # TODO: Add summary of file sizes\\n    return 0\\n\\n\\ndef args_clean(parser):\\n    parser.add_argument(\\"--categories\\", default=\\"inc,src,lib,log\\",\\n                        help=\\"comma separated list to enable file types (inc,src,lib,log)\\")\\n\\n\\ndef cmd_clean(args, params):\\n    \\"remove files from cache\\"\\n    cache_params = params[\\"cache\\"]\\n\\n    dryrun = args.dry_run\\n    categories = parse_categories(args.categories)\\n\\n    clean_cache(cache_params, dryrun=dryrun, categories=categories)\\n    return 0\\n\\n\\ndef args_grep(parser):\\n    parser.add_argument(\\"--categories\\", default=\\"inc,src\\",\\n                        help=\\"comma separated list to enable file types (inc,src,lib,log)\\")\\n    parser.add_argument(\\"--pattern\\", default=\\"\\",\\n                        help=\\"line search pattern\\")\\n    parser.add_argument(\\"--regexmode\\", action=\\"store_true\\",\\n                        help=\\"pattern is a regular expression (python style)\\")\\n    parser.add_argument(\\"--linenumbers\\", action=\\"store_true\\",\\n                        help=\\"show linenumbers on matches\\")\\n    parser.add_argument(\\"--countonly\\", action=\\"store_true\\",\\n                        help=\\"show only match line count for each file\\")\\n    parser.add_argument(\\"--filesonly\\", action=\\"store_true\\",\\n                        help=\\"show only filenames with matches\\")\\n    parser.add_argument(\\"--signature\\", default=\\"\\",\\n                        help=\\"look for module with this signature (default all)\\")\\n\\n\\ndef cmd_grep(args, params):\\n    \\"grep content of header and source file(s) in cache\\"\\n    cache_params = params[\\"cache\\"]\\n\\n    # Get command-line arguments\\n    pattern = args.pattern\\n    signature = args.signature\\n    regexmode = args.regexmode\\n    linenumbers = args.linenumbers\\n    countonly = args.countonly\\n    filesonly = args.filesonly\\n    categories = parse_categories(args.categories)\\n\\n    if not regexmode:\\n        pattern = \\".*(\\" + pattern + \\").*\\"\\n    regex = re.compile(pattern)\\n    allmatches = grep_cache(regex, cache_params,\\n                            linenumbers=linenumbers, countonly=countonly,\\n                            signature=signature,\\n                            categories=categories)\\n    if filesonly:\\n        print(\\"\\\\n\\".join(sorted(allmatches)))\\n    elif countonly:\\n        print(\\"\\\\n\\".join(\\"%s: %d\\" % (k, v) for k, v in sorted(allmatches.items())))\\n    else:\\n        for fn in sorted(allmatches):\\n            print(\\"\\\\nFile \'%s\' matches:\\" % (fn,))\\n            if linenumbers:\\n                print(\\"\\\\n\\".join(\\"%5d:\\\\t%s\\" % line for line in allmatches[fn]))\\n            else:\\n                print(\\"\\\\n\\".join(\\"\\\\t\\" + line for line in allmatches[fn]))\\n    return 0\\n\\n\\ndef args_grepfunction(parser):\\n    parser.add_argument(\\"--categories\\", default=\\"src\\",\\n                        help=\\"comma separated list to enable file types (inc,src,lib,log)\\")\\n    parser.add_argument(\\"--name\\", default=\\"\\",\\n                        help=\\"function name to search for\\")\\n    parser.add_argument(\\"--signature\\", default=\\"\\",\\n                        help=\\"look for module with this signature (default all)\\")\\n    parser.add_argument(\\"--no-body\\", action=\\"store_true\\",\\n                        help=\\"don\'t show function bodies\\")\\n\\n\\ndef cmd_grepfunction(args, params):\\n    \\"search for function name in source files in cache\\"\\n    cache_params = params[\\"cache\\"]\\n\\n    name = args.name\\n    signature = args.signature\\n    categories = parse_categories(args.categories)\\n    no_body = args.no_body\\n\\n    pattern = r\\".*(\\" + name + r\\")[ ]*\\\\((.*)\\"\\n    regex = re.compile(pattern)\\n\\n    allmatches = grep_cache(regex, cache_params,\\n                            linenumbers=True, countonly=False,\\n                            signature=signature,\\n                            categories=categories)\\n    for fn in sorted(allmatches):\\n        if no_body:\\n            # Just print signature lines\\n            print(\\"\\\\nFile \'%s\' matches:\\" % (fn,))\\n            for i, line in allmatches[fn]:\\n                print(\\"%5d: %s\\" % (i, line))\\n        else:\\n            # Print function bodies\\n            content = read_textfile(fn)\\n            lines = content.splitlines() if content else ()\\n            for i, line in allmatches[fn]:\\n                print(\\"%s:%d\\" % (fn, i))\\n                assert name in lines[i]\\n                print(extract_function(lines[i:]))\\n                print()\\n    return 0\\n\\n\\ndef args_checkout(parser):\\n    parser.add_argument(\\"--categories\\", default=\\"inc,src,lib,log\\",\\n                        help=\\"comma separated list to enable file types (inc,src,lib,log)\\")\\n    parser.add_argument(\\"--signature\\",\\n                        help=\\"module signature (required)\\")\\n\\n\\ndef cmd_checkout(args, params):\\n    \\"copy files from cache to a directory\\"\\n    cache_params = params[\\"cache\\"]\\n\\n    signature = args.signature\\n    categories = parse_categories(args.categories)\\n\\n    prefix = \\"jitcheckout-\\"\\n    path = os.curdir\\n    path = extract_files(signature, cache_params,\\n                         prefix=prefix, path=path,\\n                         categories=categories)\\n    print(\\"Extracted files to \'%s\'.\\" % (path,))\\n    return 0\\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, 1, null, 1, 1, 1, 1, 1, null, null, 1, 0, 0, 0, null, null, 1, 0, null, null, 1, null, 0, null, null, 1, 0, null, null, 1, null, null, 0, 0, 0, 0, 0, 0, null, 0, 0, 0, 0, null, null, 0, 0, 0, 0, 0, 0, null, 0, 0, 0, 0, null, null, 1, 0, null, 0, null, 0, null, 0, null, null, null, 1, null, 0, null, 0, 0, 0, 0, null, 0, null, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, null, 0, null, null, 1, 0, null, null, null, 1, null, 0, null, 0, 0, null, 0, 0, null, null, 1, 0, null, 0, null, 0, null, 0, null, 0, null, 0, null, 0, null, null, null, 1, null, 0, null, null, 0, 0, 0, 0, 0, 0, 0, null, 0, 0, 0, 0, null, null, null, 0, 0, 0, 0, null, 0, 0, 0, 0, null, 0, 0, null, null, 1, 0, null, 0, null, 0, null, 0, null, null, null, 1, null, 0, null, 0, 0, 0, 0, null, 0, 0, null, 0, null, null, null, 0, 0, null, 0, 0, 0, null, null, 0, 0, 0, 0, 0, 0, 0, 0, null, null, 1, 0, null, 0, null, null, null, 1, null, 0, null, 0, 0, null, 0, 0, 0, null, null, 0, 0]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/jit.py", "source": "# -*- coding: utf-8 -*-\\n# Copyright (C) 2015-2016 Martin Sandve Aln\\u00e6s\\n#\\n# This file is part of DIJITSO.\\n#\\n# DIJITSO is free software: you can redistribute it and/or modify\\n# it under the terms of the GNU Lesser General Public License as published by\\n# the Free Software Foundation, either version 3 of the License, or\\n# (at your option) any later version.\\n#\\n# DIJITSO is distributed in the hope that it will be useful,\\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\\n# GNU Lesser General Public License for more details.\\n#\\n# You should have received a copy of the GNU Lesser General Public License\\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\\n\\n\\"\\"\\"This module contains the main jit() function and related utilities.\\"\\"\\"\\n\\nimport ctypes\\nimport numpy\\n\\nfrom dijitso.log import error\\nfrom dijitso.params import validate_params\\nfrom dijitso.str import as_unicode\\nfrom dijitso.cache import lookup_lib, load_library\\nfrom dijitso.cache import write_library_binary, read_library_binary\\nfrom dijitso.build import build_shared_library\\nfrom dijitso.signatures import hash_params\\n\\n\\nclass DijitsoError(RuntimeError):\\n    def __init__(self, message, err_info):\\n        super(DijitsoError, self).__init__(message)\\n        self.err_info = err_info\\n\\n\\ndef extract_factory_function(lib, name):\\n    \\"\\"\\"Extract function from loaded library.\\n\\n    Assuming signature ``(void *)()``, for anything else use look at\\n    ctypes documentation.\\n\\n    Returns the factory function or raises error.\\n    \\"\\"\\"\\n    function = getattr(lib, name)\\n    function.restype = ctypes.c_void_p\\n    return function\\n\\n\\ndef jit_signature(name, params):  # TODO: Unused?\\n    \\"\\"\\"Compute the signature that jit will use for given name and params.\\"\\"\\"\\n\\n    # Validation and completion with defaults for missing parameters\\n    params = validate_params(params)\\n\\n    # Extend provided name of jitable with hash of relevant parameters\\n    signature_params = {\\n        \\"generator\\": params[\\"generator\\"],\\n        \\"build\\": params[\\"build\\"]\\n    }\\n\\n    signature = \\"%s_%s\\" % (name, hash_params(signature_params))\\n    return signature\\n\\n\\n# TODO: send, receive, wait functionality is not currently in use,\\n# decide to use it from dolfin or clean up the code and comments here.\\ndef jit(jitable, name, params, generate=None,\\n        send=None, receive=None, wait=None):\\n    \\"\\"\\"Just-in-time compile and import of a shared library with a cache mechanism.\\n\\n    A signature is computed from the name, params[\\"generator\\"],\\n    and params[\\"build\\"]. The name should be a unique identifier\\n    for the jitable, preferrably produced by a good hash function.\\n\\n    The signature is used to identity if the library has already been\\n    compiled and cached. A two-level memory and disk cache ensures good\\n    performance for repeated lookups within a single program as well as\\n    persistence across program runs.\\n\\n    If no library has been cached, the passed \'generate\' function is\\n    called to generate the source code:\\n\\n        header, source, dependencies = \\\\\\n            generate(jitable, name, signature, params[\\"generator\\"])\\n\\n    It is expected to translate the \'jitable\' object into\\n    C or C++ (default) source code which will subsequently be\\n    compiled as a shared library and stored in the disk cache.\\n    The returned \'dependencies\' should be a tuple of signatures\\n    returned from other completed dijitso.jit calls, and are\\n    linked to when building.\\n\\n    The compiled shared library is then loaded with ctypes and returned.\\n\\n    For use in a parallel (MPI) context, three functions send, receive,\\n    and wait can be provided. Each process can take on a different role\\n    depending on whether generate, or receive, or neither is provided.\\n\\n      * Every process that gets a generate function is called a \'builder\',\\n        and will generate and compile code as described above on a cache miss.\\n        If the function send is provided, it will then send the shared library\\n        binary file as a binary blob by calling send(numpy_array).\\n\\n      * Every process that gets a receive function is called a \'receiver\',\\n        and will call \'numpy_array = receive()\' expecting the binary blob\\n        with a compiled binary shared library which will subsequently be\\n        written to file in the local disk cache.\\n\\n      * The rest of the processes are called \'waiters\' and will do nothing.\\n\\n      * If provided, all processes will call wait() before attempting to\\n        load the freshly compiled library from disk cache.\\n\\n    The intention of the above pattern is to be flexible, allowing several\\n    different strategies for sharing build results. The user of dijitso\\n    can determine groups of processes that share a disk cache, and assign\\n    one process per physical disk cache directory to write to that directory,\\n    avoiding multiple processes writing to the same files.\\n\\n    This forms the basis for three main strategies:\\n\\n      * Build on every process.\\n\\n      * Build on one process per physical cache directory.\\n\\n      * Build on a single global root node and send a copy of\\n        the binary to one process per physical cache directory.\\n\\n    It is highly recommended to avoid have multiple builder processes\\n    sharing a physical cache directory.\\n    \\"\\"\\"\\n    # TODO: Could simplify interface here and roll\\n    #   (jitable, name, params[\\"generator\\"]) into a single jitobject?\\n    # TODO: send/receive doesn\'t combine well with generate\\n    #   triggering additional jit calls for dependencies.\\n    #   It\'s possible that dependencies are hard to determine without\\n    #   generate doing some analysis that we want to avoid.\\n    #   Drop send/receive? Probably not that useful anyway.\\n\\n    # Complete params with hardcoded defaults and config file defaults\\n    params = validate_params(params)\\n\\n    # 0) Look for library in memory or disk cache\\n    # FIXME: use only name as signature for now\\n    # TODO: just remove one of signature or name from API?\\n    # signature = jit_signature(name, params)\\n    name = as_unicode(name)\\n    signature = name\\n    cache_params = params[\\"cache\\"]\\n    lib = lookup_lib(signature, cache_params)\\n    err_info = None\\n\\n    if lib is None:\\n        # Since we didn\'t find the library in cache, we must build it.\\n\\n        if receive and generate:\\n            # We\'re not supposed to generate if we\'re receiving\\n            error(\\"Please provide only one of generate or receive.\\")\\n\\n        elif generate:\\n            # 1) Generate source code\\n            header, source, dependencies = generate(jitable, name, signature, params[\\"generator\\"])\\n            # Ensure we got unicode from generate\\n            header = as_unicode(header)\\n            source = as_unicode(source)\\n            dependencies = [as_unicode(dep) for dep in dependencies]\\n\\n            # 2) Compile shared library and 3) store in dijitso\\n            # inc/src/lib dir on success\\n            # NB! It\'s important to not raise exception on compilation\\n            # failure, such that we can reach wait() together with\\n            # other processes if any.\\n            status, output, lib_filename, err_info = \\\\\\n                build_shared_library(signature, header, source, dependencies,\\n                                     params)\\n\\n            # 4a) Send library over network if we have a send function\\n            if send:\\n                if status == 0:\\n                    lib_data = read_library_binary(lib_filename)\\n                else:\\n                    lib_data = numpy.zeros((1,))\\n                send(lib_data)\\n\\n        elif receive:\\n            # 4b) Get library as binary blob from given receive\\n            # function and store in cache\\n            lib_data = receive()\\n            # Empty if compilation failed\\n            status = -1 if lib_data.shape == (1,) else 0\\n            if status == 0:\\n                write_library_binary(lib_data, signature, cache_params)\\n\\n        else:\\n            # Do nothing (we\'ll be waiting below for other process to\\n            # build)\\n            if not wait:\\n                error(\\"Please provide wait if not providing one of generate or receive.\\")\\n\\n        # 5) Notify waiters that we\'re done / wait for builder to\\n        # notify us\\n        if wait:\\n            wait()\\n\\n        # Finally load library from disk cache (places in memory\\n        # cache)\\n        # NB! This returns None if the file does not exist,\\n        # i.e. if compilation failed on builder process\\n        lib = load_library(signature, cache_params)\\n\\n    if err_info:\\n        # TODO: Parse output to find error(s) for better error messages\\n        raise DijitsoError(\\"Dijitso JIT compilation failed, see \'%s\' for details\\"\\n                           % err_info[\'fail_dir\'], err_info)\\n\\n    # Return built library and its signature\\n    return lib, signature\\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, 1, 1, null, 1, 1, 1, 1, 1, 1, 1, null, null, 1, 1, 0, 0, null, null, 1, null, null, null, null, null, null, null, 1, 1, 1, null, null, 1, null, null, null, 0, null, null, 0, null, null, null, null, 0, 0, null, null, null, null, 1, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, null, null, null, null, 1, 1, 1, 1, 1, null, 1, null, null, 0, null, 0, null, 0, null, 0, null, 0, 0, 0, null, null, null, null, null, null, 0, null, null, null, null, 0, 0, 0, null, 0, 0, null, 0, null, null, 0, null, 0, 0, 0, null, null, null, null, 0, 0, null, null, null, 0, 0, null, null, null, null, null, 0, null, 1, null, 0, null, null, null, 1]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/log.py", "source": "# -*- coding: utf-8 -*-\\n# Copyright (C) 2015-2016 Martin Sandve Aln\\u00e6s, Jan Blechta\\n#\\n# This file is part of DIJITSO.\\n#\\n# DIJITSO is free software: you can redistribute it and/or modify\\n# it under the terms of the GNU Lesser General Public License as published by\\n# the Free Software Foundation, either version 3 of the License, or\\n# (at your option) any later version.\\n#\\n# DIJITSO is distributed in the hope that it will be useful,\\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\\n# GNU Lesser General Public License for more details.\\n#\\n# You should have received a copy of the GNU Lesser General Public License\\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\\n\\nimport logging\\n\\n__all__ = [\'set_log_level\', \'get_logger\', \'get_log_handler\', \'set_log_handler\']\\n\\n\\n_log = logging.getLogger(\\"dijitso\\")\\n_loghandler = logging.StreamHandler()\\n_log.addHandler(_loghandler)\\n_log.setLevel(logging.INFO)\\n\\n\\ndef get_log_handler():\\n    return _loghandler\\n\\n\\ndef get_logger():\\n    return _log\\n\\n\\ndef set_log_handler(handler):\\n    global _loghandler\\n    _log.removeHandler(_loghandler)\\n    _loghandler = handler\\n    _log.addHandler(_loghandler)\\n\\n\\ndef set_log_level(level):\\n    \\"\\"\\"Set verbosity of logging. Argument is int or one of \\"INFO\\", \\"WARNING\\",\\n    \\"ERROR\\", or \\"DEBUG\\".\\n    \\"\\"\\"\\n    if isinstance(level, str):\\n        level = level.upper()\\n        assert level in (\\"INFO\\", \\"WARNING\\", \\"ERROR\\", \\"DEBUG\\")\\n        level = getattr(logging, level)\\n    else:\\n        assert isinstance(level, int)\\n    _log.setLevel(level)\\n\\n\\n# Logging interface for dijitso library\\n\\ndef debug(*message):\\n    _log.debug(*message)\\n\\n\\ndef info(*message):\\n    _log.info(*message)\\n\\n\\ndef warning(*message):\\n    _log.warning(*message)\\n\\n\\ndef error(*message):\\n    _log.error(*message)\\n    text = message[0] % message[1:]\\n    raise RuntimeError(text)\\n\\n\\ndef dijitso_assert(condition, *message):\\n    if not condition:\\n        _log.error(*message)\\n        text = message[0] % message[1:]\\n        raise AssertionError(text)\\n", "coverage": [null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, 1, null, 1, null, null, 1, 1, 1, 1, null, null, 1, 0, null, null, 1, 0, null, null, 1, null, 0, 0, 0, null, null, 1, null, null, null, 0, 0, 0, 0, null, 0, 0, null, null, null, null, 1, 1, null, null, 1, 0, null, null, 1, 0, null, null, 1, 0, 0, 0, null, null, 1, 0, 0, 0, 0]}, {"name": "/home/fenics/local/lib/python3.6/site-packages/dijitso/mpi.py", "source": "# -*- coding: utf-8 -*-\\n# Copyright (C) 2015-2016 Martin Sandve Aln\\u00e6s\\n#\\n# This file is part of DIJITSO.\\n#\\n# DIJITSO is free software: you can redistribute it and/or modify\\n# it under the terms of the GNU Lesser General Public License as published by\\n# the Free Software Foundation, either version 3 of the License, or\\n# (at your option) any later version.\\n#\\n# DIJITSO is distributed in the hope that it will be useful,\\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\\n# GNU Lesser General Public License for more details.\\n#\\n# You should have received a copy of the GNU Lesser General Public License\\n# along with DIJITSO. If not, see <http://www.gnu.org/licenses/>.\\n\\n\\"\\"\\"Utilities for mpi features of dijitso.\\"\\"\\"\\n\\nimport io\\nimport os\\nimport uuid\\nfrom glob import glob\\n\\nimport numpy\\n\\nfrom dijitso.log import info, error\\nfrom dijitso.system import try_delete_file\\n\\n\\ndef bcast_uuid(comm):\\n    \\"Create a unique id shared across all processes in comm.\\"\\n    guid = numpy.ndarray((1,), dtype=numpy.uint64)\\n    if comm.rank == 0:\\n        Unable to print the message and arguments - possible formatting error.
Use the traceback above to help find the error.
==
Reporting 12 files
==

/home/fenics/local/lib/python3.6/site-packages/dijitso/__init__.py - 9/36
/home/fenics/local/lib/python3.6/site-packages/dijitso/__main__.py - 13/118
/home/fenics/local/lib/python3.6/site-packages/dijitso/build.py - 18/253
/home/fenics/local/lib/python3.6/site-packages/dijitso/cache.py - 70/494
/home/fenics/local/lib/python3.6/site-packages/dijitso/cmdline.py - 23/259
/home/fenics/local/lib/python3.6/site-packages/dijitso/jit.py - 27/220
/home/fenics/local/lib/python3.6/site-packages/dijitso/log.py - 16/82
/home/fenics/local/lib/python3.6/site-packages/dijitso/mpi.py - 84/271
/home/fenics/local/lib/python3.6/site-packages/dijitso/params.py - 111/284
/home/fenics/local/lib/python3.6/site-packages/dijitso/signatures.py - 8/53
/home/fenics/local/lib/python3.6/site-packages/dijitso/str.py - 4/28
/home/fenics/local/lib/python3.6/site-packages/dijitso/system.py - 52/354
Coverage submitted!
{'message': 'Job ##88.1', 'url': 'https://coveralls.io/jobs/42454006'}
Job ##88.1
https://coveralls.io/jobs/42454006