From 2616c3f5efb378befff77f723e308bbb414bb1f8 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Wed, 8 Nov 2023 13:48:02 +0100 Subject: [PATCH 1/5] A bunch of updates --- .flake8 | 5 - .github/workflows/build_docs.yml | 33 ++- .gitignore | 13 +- .pre-commit-config.yaml | 35 ++-- README.md | 2 +- _config.yml | 22 +- _toc.yml | 2 +- code/demo.ipynb | 193 ------------------ code/demo.py | 107 ++++++++++ code/postprocess.py | 3 +- code/pre_processing.py | 2 +- code/run_fiber_generation.py | 3 +- data/README.md | 2 + docker/Dockerfile | 2 +- docs/reproducing.md | 4 +- index.md | 10 - pyproject.toml | 73 +++++++ requirements-docs.txt | 336 ++++++++++++++++++++++++++++++- requirements.txt | 198 ++++-------------- 19 files changed, 634 insertions(+), 411 deletions(-) delete mode 100644 .flake8 delete mode 100644 code/demo.ipynb create mode 100644 code/demo.py delete mode 100644 index.md create mode 100644 pyproject.toml diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 0b791a0..0000000 --- a/.flake8 +++ /dev/null @@ -1,5 +0,0 @@ -# flake8 does not support pyproject.toml, see: -# https://github.com/PyCQA/flake8/issues/234 -[flake8] -exclude = docs,venv -max-line-length = 100 diff --git a/.github/workflows/build_docs.yml b/.github/workflows/build_docs.yml index c59c212..7b8f4c4 100644 --- a/.github/workflows/build_docs.yml +++ b/.github/workflows/build_docs.yml @@ -23,14 +23,14 @@ jobs: build: runs-on: ubuntu-22.04 container: - image: finsberg/fenics-gmsh:latest + image: ghcr.io/scientificcomputing/fenics-gmsh:2023-08-16 env: # Directory that will be published on github pages PUBLISH_DIR: ./_build/html steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install dependencies run: python3 -m pip install -r requirements.txt @@ -38,12 +38,29 @@ jobs: - name: Build docs run: jupyter book build . + + - name: Cache + id: cache + uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + ~/_build + key: cache_v1 + restore-keys: | + cache_v1 + + - name: Install dependencies + run: python3 -m pip install -r requirements-docs.txt + + - name: Build docs + run: jupyter book build . + - name: Upload artifact - uses: actions/upload-pages-artifact@v1 + uses: actions/upload-pages-artifact@v2 with: path: ${{ env.PUBLISH_DIR }} - # Single deploy job since we're just deploying deploy: if: github.ref == 'refs/heads/main' @@ -53,13 +70,15 @@ jobs: url: ${{ steps.deployment.outputs.page_url }} runs-on: ubuntu-latest + steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Pages - uses: actions/configure-pages@v2 + uses: actions/configure-pages@v3 + - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v1 \ No newline at end of file + uses: actions/deploy-pages@v2 diff --git a/.gitignore b/.gitignore index da65a63..f2b9ba6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,9 @@ -data/data.tar -data/mesh -data/._mesh -data/*.json -data/*.h5 -code/results/*.h5 -code/results/*.xdmf +data.tar +mesh +._mesh +*.json +*.h5 +*.xdmf # Created by https://www.toptal.com/developers/gitignore/api/python,visualstudiocode # Edit at https://www.toptal.com/developers/gitignore?templates=python,visualstudiocode diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 925332f..74159ec 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.5.0 hooks: - id: check-yaml - id: end-of-file-fixer @@ -8,34 +8,35 @@ repos: - id: check-docstring-first - id: debug-statements - id: requirements-txt-fixer + - id: check-added-large-files + - id: check-toml - - repo: https://github.com/asottile/reorder_python_imports - rev: v3.8.3 + - repo: https://github.com/asottile/add-trailing-comma + rev: v3.1.0 hooks: - - id: reorder-python-imports + - id: add-trailing-comma - repo: https://github.com/psf/black - rev: 22.10.0 + rev: 23.10.1 hooks: - id: black - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.0.0 + - repo: https://github.com/charliermarsh/ruff-pre-commit + # Ruff version. + rev: 'v0.1.4' hooks: - - id: flake8 + - id: ruff - - repo: https://github.com/asottile/add-trailing-comma - rev: v2.3.0 - hooks: - - id: add-trailing-comma - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.982 + rev: v1.6.1 hooks: - id: mypy + additional_dependencies: + - types-requests - - repo: https://github.com/asottile/pyupgrade - rev: v3.1.0 + - repo: https://github.com/streetsidesoftware/cspell-cli + rev: v7.3.2 hooks: - - id: pyupgrade - args: [--py38-plus] + - id: cspell + files: docs/(.+).md|README.md|code/demo.py diff --git a/README.md b/README.md index 1302acb..86cb9e4 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ This repository contains supplementary code for the paper > Finsberg, H., Dokken, J. 2022. -> Title of paper, Journal of blabla, volume, page, url +> Title of paper, Journal of ..., volume, page, url ## Abstract diff --git a/_config.yml b/_config.yml index 1987a20..07efaba 100644 --- a/_config.yml +++ b/_config.yml @@ -1,10 +1,11 @@ # Book settings # Learn more at https://jupyterbook.org/customize/config.html -title: Example FEniCS paper -author: Henrik Finsberg +title: Example paper FEniCS +author: Henrik Finsberg and Jørgen Dokken logo: "docs/logo.png" -copyright: "2022" +copyright: "2023" +only_build_toc_files: true # Force re-execution of notebooks on each build. # See https://jupyterbook.org/content/execute.html @@ -14,12 +15,11 @@ execute: # Information about where the book exists on the web repository: url: https://github.com/scientificcomputing/example-paper-fenics # Online location of your book - branch: main + branch: main # Which branch of the repository should be used when creating links (optional) launch_buttons: notebook_interface: "jupyterlab" # The interface interactive links will activate ["classic", "jupyterlab"] binderhub_url: "https://mybinder.org" - thebe: true html: use_issues_button: true @@ -31,13 +31,25 @@ parse: - dollarmath - linkify + sphinx: config: bibtex_bibfiles: ["docs/refs.bib"] suppress_warnings: ["bibtex.duplicate_citation"] # If the same paper is cited in multiple files + nb_execution_show_tb: True + html_theme_options: + navigation_with_keys: false + html_last_updated_fmt: "%b %d, %Y" + nb_custom_formats: # https://jupyterbook.org/en/stable/file-types/jupytext.html#file-types-custom + .py: + - jupytext.reads + - fmt: py extra_extensions: - 'sphinx.ext.autodoc' - 'sphinx.ext.napoleon' - 'sphinx.ext.viewcode' - "sphinxcontrib.bibtex" + + +exclude_patterns: [".pytest_cache/*", ".github/*"] diff --git a/_toc.yml b/_toc.yml index 2d53f71..42ce42b 100644 --- a/_toc.yml +++ b/_toc.yml @@ -1,5 +1,5 @@ format: jb-book -root: index +root: README chapters: - file: docs/abstract diff --git a/code/demo.ipynb b/code/demo.ipynb deleted file mode 100644 index 47689be..0000000 --- a/code/demo.ipynb +++ /dev/null @@ -1,193 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "997a012a", - "metadata": {}, - "source": [ - "# Demo\n", - "\n", - "This notebook contains a simple demo on how to work with the code. Before running this code you should also make sure to download the data." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9f95b9ba", - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "!curl -L 'https://www.dropbox.com/s/6bkbw6v269dyfie/data.tar' -o data.tar && tar -xvf data.tar" - ] - }, - { - "cell_type": "markdown", - "id": "a1d51753", - "metadata": {}, - "source": [ - "Just make sure that the meshes are downloaded by listing them" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c434ed20", - "metadata": {}, - "outputs": [], - "source": [ - "!ls mesh" - ] - }, - { - "cell_type": "markdown", - "id": "4610db02", - "metadata": {}, - "source": [ - "In the dataset, there are two meshes. Let us pick the smallest heart which is heart number 1" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5967f9e1", - "metadata": {}, - "outputs": [], - "source": [ - "heart_nr = 1" - ] - }, - { - "cell_type": "markdown", - "id": "d93e8561", - "metadata": {}, - "source": [ - "Let us specify the path to gmsh file and make sure ths file exist" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a14a60fa", - "metadata": {}, - "outputs": [], - "source": [ - "from pathlib import Path\n", - "\n", - "msh_file = Path(\"mesh\") / f\"heart{heart_nr:02}.msh\"\n", - "\n", - "assert msh_file.is_file()" - ] - }, - { - "cell_type": "markdown", - "id": "a3ac2010", - "metadata": {}, - "source": [ - "## Pre-processing\n", - "\n", - "First step is to convert the gmsh file to dolfin format. We can do this with the `convert_mesh` function from the `pre-processing` module" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d3e73084", - "metadata": {}, - "outputs": [], - "source": [ - "import pre_processing\n", - "\n", - "outfile = Path(f\"heart{heart_nr:02}.h5\")\n", - "pre_processing.convert_mesh(msh_file=msh_file, outfile=outfile)" - ] - }, - { - "cell_type": "markdown", - "id": "0fb08f87", - "metadata": {}, - "source": [ - "## Fiber generation \n", - "\n", - "We can now take the mesh in dolfin format and generate the fiber orientations using the ldrb algorithm." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "54775f6d", - "metadata": {}, - "outputs": [], - "source": [ - "import run_fiber_generation\n", - "\n", - "\n", - "microstructure_path = Path(f\"microstructure{heart_nr:02}.h5\")\n", - "run_fiber_generation.generate_fibers(outfile=outfile, microstructure_path=microstructure_path)" - ] - }, - { - "cell_type": "markdown", - "id": "3c33711c", - "metadata": {}, - "source": [ - "## Post processing\n", - "\n", - "Finally we will run the postprocessing script where we convert the fiber fields to a file we can visualize in paraview, and compare some feautres against the features preseted in an artificial paper\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "911ffb19", - "metadata": {}, - "outputs": [], - "source": [ - "from postprocess import generate_fiber_xdmf_file\n", - "\n", - "fiber_path = Path(f\"fiber_{heart_nr:02}.xdmf\")\n", - "features = generate_fiber_xdmf_file(\n", - " outfile=outfile, \n", - " microstructure_path=microstructure_path, \n", - " fiber_path=fiber_path\n", - ")\n", - "print(features)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5976c9e8", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.6" - }, - "vscode": { - "interpreter": { - "hash": "dd2e87c03715f01502b947bc3422e58c2fbb143b3c580415b94b839f5ea0598b" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/code/demo.py b/code/demo.py new file mode 100644 index 0000000..5ee001c --- /dev/null +++ b/code/demo.py @@ -0,0 +1,107 @@ +# # Demo +# +# This is a simple demo on how to work with the code. +# Before running this code you should also make sure to download the data. +# We will do this using python. First let us check if we already have the data +# + +from pathlib import Path + + +msh_dir = Path("mesh") +if not msh_dir.exists(): + # We need to download the data + + import tarfile + from pathlib import Path + import requests + from tqdm import tqdm + + def download(path, link, desc=None): + if desc is None: + desc = f"Download data to {path}" + + response = requests.get(link, stream=True) + total_size_in_bytes = int(response.headers.get("content-length", 0)) + progress_bar = tqdm( + total=total_size_in_bytes, + unit="iB", + unit_scale=True, + desc=desc, + ) + + with open(path, "wb") as handle: + for data in response.iter_content(chunk_size=1000 * 1024): + progress_bar.update(len(data)) + handle.write(data) + progress_bar.close() + + # We download the file + download("data.tar", link="https://www.dropbox.com/s/6bkbw6v269dyfie/data.tar?dl=1") + + # and extract it + tar = tarfile.open("data.tar") + tar.extractall() + +# The data should contain a folder called `mesh`, let us see what is inside it + +# !ls mesh + +# In the dataset, there are two meshes. Let us pick the smallest heart which +# is heart number 1 + +heart_nr = 1 + +# Let us specify the path to gmsh file and make sure ths file exist + + +msh_file = msh_dir / f"heart{heart_nr:02}.msh" + +assert msh_file.is_file() + +# ## Pre-processing +# +# First step is to convert the gmsh file to dolfin format. We can do this with +# the `convert_mesh` function from the `pre-processing` module + +# + +import pre_processing + +outfile = Path(f"heart{heart_nr:02}.h5") +pre_processing.convert_mesh(msh_file=msh_file, outfile=outfile) +# - + +# ## Fiber generation +# +# We can now take the mesh in dolfin format and generate the fiber +# orientations using the ldrb algorithm. + +# + +import run_fiber_generation + + +microstructure_path = Path(f"microstructure{heart_nr:02}.h5") +run_fiber_generation.generate_fibers( + outfile=outfile, + microstructure_path=microstructure_path, +) +# - + +# ## Post processing +# +# Finally we will run the postprocessing script where we convert the +# fiber fields to a file we can visualize in paraview, and compare some +# features against the features presented in an artificial paper +# + +# + +from postprocess import generate_fiber_xdmf_file + +fiber_path = Path(f"fiber_{heart_nr:02}.xdmf") +features = generate_fiber_xdmf_file( + outfile=outfile, + microstructure_path=microstructure_path, + fiber_path=fiber_path, +) +print(features) +# - diff --git a/code/postprocess.py b/code/postprocess.py index 836df04..ca5f208 100644 --- a/code/postprocess.py +++ b/code/postprocess.py @@ -16,7 +16,7 @@ logger = logging.Logger(__name__, logging.INFO) ch = logging.StreamHandler(sys.stdout) -FORMAT = '%(levelname)-5s [%(filename)s:%(lineno)d] %(message)s' +FORMAT = "%(levelname)-5s [%(filename)s:%(lineno)d] %(message)s" ch.setFormatter(logging.Formatter(FORMAT)) logger.addHandler(ch) @@ -26,7 +26,6 @@ def generate_fiber_xdmf_file( microstructure_path: Path, fiber_path: Path, ) -> typing.Dict[str, float]: - geo = Geometry.from_file(outfile, schema_path=outfile.with_suffix(".json")) f0, _, _ = load_microstructure( diff --git a/code/pre_processing.py b/code/pre_processing.py index 6c99f5e..ce57af0 100644 --- a/code/pre_processing.py +++ b/code/pre_processing.py @@ -18,7 +18,7 @@ # We set default log level to be info logger = logging.Logger(__name__, logging.INFO) ch = logging.StreamHandler(sys.stdout) -FORMAT = '%(levelname)-5s [%(filename)s:%(lineno)d] %(message)s' +FORMAT = "%(levelname)-5s [%(filename)s:%(lineno)d] %(message)s" ch.setFormatter(logging.Formatter(FORMAT)) logger.addHandler(ch) diff --git a/code/run_fiber_generation.py b/code/run_fiber_generation.py index cf775e8..e728c8a 100644 --- a/code/run_fiber_generation.py +++ b/code/run_fiber_generation.py @@ -17,13 +17,12 @@ logger = logging.Logger(__name__, logging.INFO) ch = logging.StreamHandler(sys.stdout) -FORMAT = '%(levelname)-5s [%(filename)s:%(lineno)d] %(message)s' +FORMAT = "%(levelname)-5s [%(filename)s:%(lineno)d] %(message)s" ch.setFormatter(logging.Formatter(FORMAT)) logger.addHandler(ch) def generate_fibers(outfile: Path, microstructure_path: Path) -> None: - geo = Geometry.from_file(outfile, schema_path=outfile.with_suffix(".json")) # Markers are a dictionary with values [marker, dim] diff --git a/data/README.md b/data/README.md index b573501..d9f9358 100644 --- a/data/README.md +++ b/data/README.md @@ -12,3 +12,5 @@ Data is available in a dropbox folder. Use the script `download_data.sh` to down These meshes are originally taken from , but since the original data is about 26GB we decided to make a smaller dataset for this example. Eventually when you publish a paper you could put this data on e.g [Zenodo](https://zenodo.org). That will make sure the data gets it's own DOI. + +In the [demo](../code/demo.py) there is also a recipe for how to download the data using python diff --git a/docker/Dockerfile b/docker/Dockerfile index 7c53ed1..4ab4025 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM finsberg/fenics-gmsh +FROM ghcr.io/scientificcomputing/fenics-gmsh:2023-08-16 ARG REPO_BRANCH="main" diff --git a/docs/reproducing.md b/docs/reproducing.md index c249067..6297532 100644 --- a/docs/reproducing.md +++ b/docs/reproducing.md @@ -31,7 +31,7 @@ In order to reproduce the results you need to first run the pre-processing scrip ``` python3 pre_processing.py ``` -This will convert the meshes from Gmsh to a Dolfin format. +This will convert the meshes from Gmsh to a dolfin format. ### Fiber generation The next step is to run the fiber generation. You can do this by running the script @@ -49,4 +49,4 @@ This will generate a file for visualizing the fibers in the Paraview (inside `co ```{bibliography} :filter: docname in docnames -``` \ No newline at end of file +``` diff --git a/index.md b/index.md deleted file mode 100644 index 891ba84..0000000 --- a/index.md +++ /dev/null @@ -1,10 +0,0 @@ -# Supplementary code for the paper: Title of paper - -This repository contains supplementary code for the paper -> Finsberg, H., Dokken, J. 2022. -> Title of paper, Journal of blabla, volume, page, url - - -## Contents -```{tableofcontents} -``` diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..57da309 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,73 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "my-paper" +classifiers = ["Private :: Do Not Upload"] +version = "0" +dependencies = [ + "h5py==3.9.0", # Pin to same version that is allready installed in the docker image + "cardiac-geometries", + "ldrb", + "requests", + "tqdm", +] + + +[project.optional-dependencies] +dev = [ + "pdbpp", + "pre-commit", +] +docs = [ + "jupyter-book", + "jupytext", + "sphinxcontrib-bibtex", +] + +[tool.ruff] +# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. +select = ["E", "F"] +ignore = ["E402", "E741", "E731", "E743"] + +# Allow autofix for all enabled rules (when `--fix`) is provided. +fixable = ["A", "B", "C", "D", "E", "F"] +unfixable = [] + +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "venv", +] + +# Same as Black. +line-length = 100 + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +# Assume Python 3.10. +target-version = "py310" + +[tool.ruff.mccabe] +# Unlike Flake8, default to a complexity level of 10. +max-complexity = 10 diff --git a/requirements-docs.txt b/requirements-docs.txt index a42f57e..5c4c045 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -1,2 +1,334 @@ -jupyter-book -sphinxcontrib-bibtex +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --extra=docs --output-file=requirements-docs.txt pyproject.toml +# +accessible-pygments==0.0.4 + # via pydata-sphinx-theme +alabaster==0.7.13 + # via sphinx +asttokens==2.4.1 + # via stack-data +attrs==23.1.0 + # via + # jsonschema + # jupyter-cache + # referencing +babel==2.13.1 + # via + # pydata-sphinx-theme + # sphinx +beautifulsoup4==4.12.2 + # via pydata-sphinx-theme +cardiac-geometries==0.11.0 + # via my-paper (pyproject.toml) +certifi==2023.7.22 + # via requests +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via + # jupyter-book + # jupyter-cache + # rich-click + # sphinx-external-toc +comm==0.2.0 + # via ipykernel +debugpy==1.8.0 + # via ipykernel +decorator==5.1.1 + # via ipython +docutils==0.18.1 + # via + # jupyter-book + # myst-parser + # pybtex-docutils + # pydata-sphinx-theme + # sphinx + # sphinx-togglebutton + # sphinxcontrib-bibtex +exceptiongroup==1.1.3 + # via ipython +executing==2.0.1 + # via stack-data +fastjsonschema==2.18.1 + # via nbformat +greenlet==3.0.1 + # via sqlalchemy +h5py==3.9.0 + # via + # ldrb + # my-paper (pyproject.toml) +idna==3.4 + # via requests +imagesize==1.4.1 + # via sphinx +importlib-metadata==6.8.0 + # via + # jupyter-cache + # myst-nb +ipykernel==6.26.0 + # via myst-nb +ipython==8.17.2 + # via + # ipykernel + # myst-nb +jedi==0.19.1 + # via ipython +jinja2==3.1.2 + # via + # jupyter-book + # myst-parser + # sphinx +jsonschema==4.19.2 + # via + # jupyter-book + # nbformat +jsonschema-specifications==2023.7.1 + # via jsonschema +jupyter-book==0.15.1 + # via my-paper (pyproject.toml) +jupyter-cache==0.6.1 + # via myst-nb +jupyter-client==8.6.0 + # via + # ipykernel + # nbclient +jupyter-core==5.5.0 + # via + # ipykernel + # jupyter-client + # nbclient + # nbformat +jupytext==1.15.2 + # via my-paper (pyproject.toml) +latexcodec==2.0.1 + # via pybtex +ldrb==2023.4.0 + # via my-paper (pyproject.toml) +linkify-it-py==2.0.2 + # via jupyter-book +llvmlite==0.40.1 + # via numba +markdown-it-py==2.2.0 + # via + # jupytext + # mdit-py-plugins + # myst-parser + # rich +markupsafe==2.1.3 + # via jinja2 +matplotlib-inline==0.1.6 + # via + # ipykernel + # ipython +mdit-py-plugins==0.3.5 + # via + # jupytext + # myst-parser +mdurl==0.1.2 + # via markdown-it-py +meshio==5.3.4 + # via cardiac-geometries +myst-nb==0.17.2 + # via jupyter-book +myst-parser==0.18.1 + # via myst-nb +nbclient==0.7.4 + # via + # jupyter-cache + # myst-nb +nbformat==5.9.2 + # via + # jupyter-cache + # jupytext + # myst-nb + # nbclient +nest-asyncio==1.5.8 + # via ipykernel +numba==0.57.1 + # via ldrb +numpy==1.24.4 + # via + # cardiac-geometries + # h5py + # ldrb + # meshio + # numba +packaging==23.2 + # via + # ipykernel + # pydata-sphinx-theme + # sphinx +parso==0.8.3 + # via jedi +pexpect==4.8.0 + # via ipython +platformdirs==3.11.0 + # via jupyter-core +prompt-toolkit==3.0.39 + # via ipython +psutil==5.9.6 + # via ipykernel +ptyprocess==0.7.0 + # via pexpect +pure-eval==0.2.2 + # via stack-data +pybtex==0.24.0 + # via + # pybtex-docutils + # sphinxcontrib-bibtex +pybtex-docutils==1.0.3 + # via sphinxcontrib-bibtex +pydata-sphinx-theme==0.14.3 + # via sphinx-book-theme +pygments==2.16.1 + # via + # accessible-pygments + # ipython + # pydata-sphinx-theme + # rich + # sphinx +python-dateutil==2.8.2 + # via jupyter-client +pyyaml==6.0.1 + # via + # jupyter-book + # jupyter-cache + # jupytext + # myst-nb + # myst-parser + # pybtex + # sphinx-external-toc +pyzmq==25.1.1 + # via + # ipykernel + # jupyter-client +referencing==0.30.2 + # via + # jsonschema + # jsonschema-specifications +requests==2.31.0 + # via + # my-paper (pyproject.toml) + # sphinx +rich==13.6.0 + # via + # meshio + # rich-click +rich-click==1.7.1 + # via cardiac-geometries +rpds-py==0.12.0 + # via + # jsonschema + # referencing +six==1.16.0 + # via + # asttokens + # latexcodec + # pybtex + # python-dateutil +snowballstemmer==2.2.0 + # via sphinx +soupsieve==2.5 + # via beautifulsoup4 +sphinx==5.0.2 + # via + # jupyter-book + # myst-nb + # myst-parser + # pydata-sphinx-theme + # sphinx-book-theme + # sphinx-comments + # sphinx-copybutton + # sphinx-design + # sphinx-external-toc + # sphinx-jupyterbook-latex + # sphinx-multitoc-numbering + # sphinx-thebe + # sphinx-togglebutton + # sphinxcontrib-applehelp + # sphinxcontrib-bibtex + # sphinxcontrib-devhelp + # sphinxcontrib-htmlhelp + # sphinxcontrib-qthelp + # sphinxcontrib-serializinghtml +sphinx-book-theme==1.0.1 + # via jupyter-book +sphinx-comments==0.0.3 + # via jupyter-book +sphinx-copybutton==0.5.2 + # via jupyter-book +sphinx-design==0.3.0 + # via jupyter-book +sphinx-external-toc==0.3.1 + # via jupyter-book +sphinx-jupyterbook-latex==0.5.2 + # via jupyter-book +sphinx-multitoc-numbering==0.1.3 + # via jupyter-book +sphinx-thebe==0.2.1 + # via jupyter-book +sphinx-togglebutton==0.3.2 + # via jupyter-book +sphinxcontrib-applehelp==1.0.7 + # via sphinx +sphinxcontrib-bibtex==2.5.0 + # via + # jupyter-book + # my-paper (pyproject.toml) +sphinxcontrib-devhelp==1.0.5 + # via sphinx +sphinxcontrib-htmlhelp==2.0.4 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.6 + # via sphinx +sphinxcontrib-serializinghtml==1.1.9 + # via sphinx +sqlalchemy==2.0.23 + # via jupyter-cache +stack-data==0.6.3 + # via ipython +tabulate==0.9.0 + # via jupyter-cache +toml==0.10.2 + # via jupytext +tornado==6.3.3 + # via + # ipykernel + # jupyter-client +tqdm==4.66.1 + # via my-paper (pyproject.toml) +traitlets==5.13.0 + # via + # comm + # ipykernel + # ipython + # jupyter-client + # jupyter-core + # matplotlib-inline + # nbclient + # nbformat +typing-extensions==4.8.0 + # via + # myst-nb + # myst-parser + # pydata-sphinx-theme + # rich-click + # sqlalchemy +uc-micro-py==1.0.2 + # via linkify-it-py +urllib3==2.0.7 + # via requests +wcwidth==0.2.9 + # via prompt-toolkit +wheel==0.41.3 + # via sphinx-togglebutton +zipp==3.17.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements.txt b/requirements.txt index 35e38d7..c6b5542 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,163 +1,51 @@ -alabaster==0.7.12 -anyio==3.6.2 -argon2-cffi==21.3.0 -argon2-cffi-bindings==21.2.0 -asttokens==2.1.0 -attrs==21.4.0 -Babel==2.11.0 -backcall==0.2.0 -beautifulsoup4==4.11.1 -bleach==5.0.1 +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --output-file=requirements.txt pyproject.toml +# cardiac-geometries==0.3.11 -certifi==2022.9.24 -cffi==1.15.1 -charset-normalizer==2.1.1 + # via my-paper (pyproject.toml) +certifi==2023.7.22 + # via requests +charset-normalizer==3.3.2 + # via requests click==8.1.3 -cmake==3.22.5 -colorama==0.4.6 + # via rich-click commonmark==0.9.1 -contourpy==1.0.5 -cycler==0.11.0 -debugpy==1.6.3 -decorator==5.1.1 -defusedxml==0.7.1 -dev-fenics-dijitso==2019.2.1 -dev-fenics-ffc==2019.2.2 -dev-fenics-fiat==2019.2.1 -dev-fenics-ufl==2021.1.1 -docutils==0.17.1 -entrypoints==0.4 -executing==1.2.0 -fancycompleter==0.9.1 -fastjsonschema==2.16.2 -fonttools==4.37.4 -gitdb==4.0.9 -GitPython==3.1.29 -greenlet==2.0.1 -h5py==3.7.0 + # via rich +h5py==3.9.0 + # via + # ldrb + # my-paper (pyproject.toml) idna==3.4 -imagesize==1.4.1 -importlib-metadata==5.0.0 -ipykernel==6.17.1 -ipython==8.6.0 -ipython-genutils==0.2.0 -ipywidgets==7.7.2 -jedi==0.18.1 -Jinja2==3.1.2 -json5==0.9.10 -jsonschema==3.2.0 -jupyter==1.0.0 -jupyter-book==0.13.1 -jupyter-cache==0.4.3 -jupyter-console==6.4.4 -jupyter-server==1.23.2 -jupyter-server-mathjax==0.2.6 -jupyter-sphinx==0.3.2 -jupyter_client==7.4.7 -jupyter_core==5.0.0 -jupyterlab==3.5.0 -jupyterlab-pygments==0.2.2 -jupyterlab-widgets==1.1.1 -jupyterlab_server==2.16.3 -kiwisolver==1.4.4 -latexcodec==2.0.1 -ldrb==2022.5.0 -linkify-it-py==1.0.3 -llvmlite==0.39.1 -lxml==4.9.1 -markdown-it-py==1.1.0 -MarkupSafe==2.1.1 -matplotlib==3.6.1 -matplotlib-inline==0.1.6 -mdit-py-plugins==0.2.8 + # via requests +ldrb==2023.4.0 + # via my-paper (pyproject.toml) +llvmlite==0.40.1 + # via numba meshio==5.3.4 -mistune==0.8.4 -mpi4py==3.1.3 -mpmath==1.2.1 -myst-nb==0.13.2 -myst-parser==0.15.2 -nbclassic==0.4.8 -nbclient==0.5.13 -nbconvert==6.5.4 -nbdime==3.1.1 -nbformat==5.7.0 -nest-asyncio==1.5.6 -notebook==6.5.2 -notebook_shim==0.2.2 -numba==0.56.4 + # via cardiac-geometries +numba==0.57.1 + # via ldrb numpy==1.21.6 -numpy-quaternion==2022.4.2 -packaging==21.3 -pandocfilters==1.5.0 -parso==0.8.3 -pdbpp==0.10.3 -petsc4py==3.17.2 -pexpect==4.8.0 -pickleshare==0.7.5 -Pillow==9.2.0 -pkgconfig==1.5.5 -platformdirs==2.5.4 -prometheus-client==0.15.0 -prompt-toolkit==3.0.32 -psutil==5.9.4 -ptyprocess==0.7.0 -pure-eval==0.2.2 -pybind11==2.9.2 -pybtex==0.24.0 -pybtex-docutils==1.0.2 -pycparser==2.21 -pydata-sphinx-theme==0.8.1 -Pygments==2.13.0 -pyparsing==3.0.9 -pyrepl==0.9.0 -pyrsistent==0.19.2 -python-dateutil==2.8.2 -pytz==2022.6 -PyYAML==6.0 -pyzmq==24.0.1 -qtconsole==5.4.0 -QtPy==2.3.0 -requests==2.28.1 + # via + # cardiac-geometries + # h5py + # ldrb + # meshio + # numba +pygments==2.13.0 + # via rich +requests==2.31.0 + # via my-paper (pyproject.toml) rich==12.6.0 + # via + # meshio + # rich-click rich-click==1.5.2 -scipy==1.9.2 -Send2Trash==1.8.0 -six==1.16.0 -slepc4py==3.17.1 -smmap==5.0.0 -sniffio==1.3.0 -snowballstemmer==2.2.0 -soupsieve==2.3.2.post1 -Sphinx==4.5.0 -sphinx-book-theme==0.3.3 -sphinx-comments==0.0.3 -sphinx-copybutton==0.5.1 -sphinx-external-toc==0.2.4 -sphinx-jupyterbook-latex==0.4.7 -sphinx-multitoc-numbering==0.1.3 -sphinx-thebe==0.1.2 -sphinx-togglebutton==0.3.2 -sphinx_design==0.1.0 -sphinxcontrib-applehelp==1.0.2 -sphinxcontrib-bibtex==2.5.0 -sphinxcontrib-devhelp==1.0.2 -sphinxcontrib-htmlhelp==2.0.0 -sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.3 -sphinxcontrib-serializinghtml==1.1.5 -SQLAlchemy==1.4.44 -stack-data==0.6.1 -sympy==1.10.1 -terminado==0.17.0 -tinycss2==1.2.1 -tomli==2.0.1 -tornado==6.2 -traitlets==5.5.0 -uc-micro-py==1.0.1 -urllib3==1.26.12 -wcwidth==0.2.5 -webencodings==0.5.1 -websocket-client==1.4.2 -widgetsnbextension==3.6.1 -wmctrl==0.4 -zipp==3.10.0 + # via cardiac-geometries +tqdm==4.66.1 + # via my-paper (pyproject.toml) +urllib3==2.0.7 + # via requests From 03d231d243fbb02eeb814cf557c163d1bf11c262 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Wed, 8 Nov 2023 13:51:31 +0100 Subject: [PATCH 2/5] Use correct requirements when building docs --- .github/workflows/build_docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_docs.yml b/.github/workflows/build_docs.yml index 7b8f4c4..7223861 100644 --- a/.github/workflows/build_docs.yml +++ b/.github/workflows/build_docs.yml @@ -33,7 +33,7 @@ jobs: - uses: actions/checkout@v4 - name: Install dependencies - run: python3 -m pip install -r requirements.txt + run: python3 -m pip install -r requirements-docs.txt - name: Build docs run: jupyter book build . From 4b845f7d8ed0512c9e5f6347d7acaaa6e31a3116 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Wed, 8 Nov 2023 13:51:51 +0100 Subject: [PATCH 3/5] Use correct requirements when building docs and turn warnings into errors --- .github/workflows/build_docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_docs.yml b/.github/workflows/build_docs.yml index 7223861..9f197a6 100644 --- a/.github/workflows/build_docs.yml +++ b/.github/workflows/build_docs.yml @@ -36,7 +36,7 @@ jobs: run: python3 -m pip install -r requirements-docs.txt - name: Build docs - run: jupyter book build . + run: jupyter book build -W . - name: Cache From f4db65f5a26fa046ec2b0de9bbebe6f2c73b9a79 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Wed, 8 Nov 2023 14:10:56 +0100 Subject: [PATCH 4/5] Add workflow for reproducing results and update results due to changes in the underlying ldrb package --- .github/workflows/reproduce_results.yml | 56 +++++++++++++++++++++++++ code/postprocess.py | 2 +- code/results/features_01.json | 8 ++-- code/results/features_02.json | 8 ++-- data/download_data.sh | 3 +- 5 files changed, 67 insertions(+), 10 deletions(-) create mode 100644 .github/workflows/reproduce_results.yml diff --git a/.github/workflows/reproduce_results.yml b/.github/workflows/reproduce_results.yml new file mode 100644 index 0000000..3af23d4 --- /dev/null +++ b/.github/workflows/reproduce_results.yml @@ -0,0 +1,56 @@ +# Simple workflow for deploying static content to GitHub Pages +name: Reproduce results + +on: + push: + branches: ["main"] + pull_request: + branches: ["main"] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + workflow_call: + + +jobs: + run: + runs-on: ubuntu-22.04 + container: + image: ghcr.io/scientificcomputing/fenics-gmsh:2023-08-16 + + steps: + - uses: actions/checkout@v4 + + - name: Cache + id: cache + uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + key: cache_v1 + restore-keys: | + cache_v1 + + - name: Install dependencies + run: python3 -m pip install -r requirements.txt + + - name: Download data + run: cd data && bash download_data.sh && cd .. + + - name: Run pre-processing + run: cd code && python3 pre_processing.py && cd .. + + - name: Run simulation + run: cd code && python3 run_fiber_generation.py && cd .. + + - name: Postprocess + run: cd code && python3 postprocess.py && cd .. + + - name: Upload artifact + if: always() + uses: actions/upload-artifact@v3 + with: + path: | + ./data/mesh + ./code/results + if-no-files-found: error diff --git a/code/postprocess.py b/code/postprocess.py index ca5f208..4d7dfea 100644 --- a/code/postprocess.py +++ b/code/postprocess.py @@ -64,7 +64,7 @@ def check_results(features_path: Path, features: dict): # Check each (key, value) pair in the features and check they are # within machine precision for key in expected_features.keys(): - if not np.isclose(expected_features[key], features[key]): + if not np.isclose(expected_features[key], features[key], rtol=1e-8): logger.error(f"{key}: {expected_features[key]}!={features[key]}") reproducible = False diff --git a/code/results/features_01.json b/code/results/features_01.json index 2ab514f..22dd63e 100644 --- a/code/results/features_01.json +++ b/code/results/features_01.json @@ -1,7 +1,7 @@ { "size": 4713, - "min": -0.9986740049294516, - "max": 0.9997579018328593, - "mean": 0.08134810142530069, - "std": 0.5715906050031196 + "min": -0.9989439046376276, + "max": 0.9997658401474652, + "mean": -0.001603664479271975, + "std": 0.5773480419933641 } diff --git a/code/results/features_02.json b/code/results/features_02.json index 0d5c7f6..8f4089c 100644 --- a/code/results/features_02.json +++ b/code/results/features_02.json @@ -1,7 +1,7 @@ { "size": 139263, - "min": -0.9999307486841351, - "max": 0.99999840073201, - "mean": 0.05442689976746109, - "std": 0.5747791279396253 + "min": -0.9999985280667274, + "max": 0.9999988537980897, + "mean": -0.07252592808131289, + "std": 0.57277685148967 } diff --git a/data/download_data.sh b/data/download_data.sh index 07c6259..f3fe135 100644 --- a/data/download_data.sh +++ b/data/download_data.sh @@ -1,5 +1,6 @@ #!/usr/bin/env bash # Removing the dl=0 from the end of the shared link from dropbox -curl -L https://www.dropbox.com/s/6bkbw6v269dyfie/data.tar -o data.tar +# curl -L https://www.dropbox.com/s/6bkbw6v269dyfie/data.tar -o data.tar +wget https://www.dropbox.com/s/6bkbw6v269dyfie/data.tar tar -xvf data.tar From e1f8277a8f74a6230740b1c5cb4c17d8ad5a22f6 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Wed, 8 Nov 2023 14:18:31 +0100 Subject: [PATCH 5/5] Set lower bound on cardiac-geometries --- pyproject.toml | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 57da309..8564da9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ classifiers = ["Private :: Do Not Upload"] version = "0" dependencies = [ "h5py==3.9.0", # Pin to same version that is allready installed in the docker image - "cardiac-geometries", + "cardiac-geometries>=0.11.0", "ldrb", "requests", "tqdm", diff --git a/requirements.txt b/requirements.txt index c6b5542..ac03bb4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements.txt pyproject.toml # -cardiac-geometries==0.3.11 +cardiac-geometries==0.11.0 # via my-paper (pyproject.toml) certifi==2023.7.22 # via requests