diff --git a/SConstruct b/SConstruct
index 3724bda2..11a59a8d 100644
--- a/SConstruct
+++ b/SConstruct
@@ -1,439 +1,446 @@
 # -*- mode:python; coding: utf-8 -*-
 # vim: set ft=python:
 
 # @file
 # @section LICENSE
 #
 # Copyright (©) 2016-2020 EPFL (École Polytechnique Fédérale de Lausanne),
 # Laboratory (LSMS - Laboratoire de Simulation en Mécanique des Solides)
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as published
 # by the Free Software Foundation, either version 3 of the License, or
 # (at your option) any later version.
 #
 # This program is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU Affero General Public License for more details.
 #
 # You should have received a copy of the GNU Affero General Public License
 # along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 # ------------------------------------------------------------------------------
 # Imports
 # ------------------------------------------------------------------------------
 from __future__ import print_function
 
 import os
 
 import SCons
 
 from os.path import join, abspath
 
 # Import below not strictly necessary, but good for pep8
 from SCons.Script import (
     EnsurePythonVersion,
     EnsureSConsVersion,
     Help,
     Environment,
     Variables,
     EnumVariable,
     PathVariable,
     BoolVariable,
     Split,
     SConscript,
     Export,
     Dir
 )
 
 from version import get_git_subst
 from detect import (
     FindFFTW,
     FindBoost,
     FindThrust,
     FindCuda,
     FindExpolit,
     FindPybind11
 )
 
 # ------------------------------------------------------------------------------
 
 EnsurePythonVersion(2, 7)
 EnsureSConsVersion(2, 4)
 
 # ------------------------------------------------------------------------------
 
 tamaas_info = dict(
     version="2.1.1",
     authors=[
         u'Lucas Frérot',
         'Guillaume Anciaux',
         'Valentine Rey',
         'Son Pham-Ba',
         u'Jean-François Molinari'
     ],
     maintainer=u'Lucas Frérot',
     email='lucas.frerot@epfl.ch',
     copyright=u"Copyright (©) 2016-2020 EPFL "
     + u"(École Polytechnique Fédérale de Lausanne), "
     + u"Laboratory (LSMS - Laboratoire de Simulation en Mécanique des Solides)"
 )
 
 
 # ------------------------------------------------------------------------------
 def detect_dependencies(env):
     """Detect all dependencies"""
     FindFFTW(env, ['omp'], precision=env['real_type'])
     FindBoost(env, ['boost/preprocessor/seq.hpp'])
     FindExpolit(env)
 
     thrust_var = 'THRUST_ROOT'
 
     # Take cuda version of thrust if available
     if 'CUDA_ROOT' in env['ENV']:
         thrust_var = 'CUDA_ROOT'
 
     FindThrust(env, env['backend'], thrust_var)
 
     # Activate cuda if needed
     if env['backend'] == 'cuda':
         FindCuda(env)
 
     if env['build_python']:
         FindPybind11(env)
 
 
 # ------------------------------------------------------------------------------
 # Main compilation
 # ------------------------------------------------------------------------------
 
 # Compilation colors
 colors = {
   'cyan': '\033[96m',
   'purple': '\033[95m',
   'blue': '\033[94m',
   'green': '\033[92m',
   'yellow': '\033[93m',
   'red': '\033[91m',
   'end': '\033[0m'
 }
 
 # Inherit all environment variables (for CXX detection, etc.)
 main_env = Environment(
     ENV=os.environ,
 )
 
 # Set tamaas information
 for k, v in tamaas_info.items():
     main_env[k] = v
 
 main_env['COLOR_DICT'] = colors
 
 # Compiler detection
 compiler_default = os.getenv('CXX', 'g++')
 
 # Build variables
 vars = Variables('build-setup.conf')
 vars.AddVariables(
     EnumVariable('build_type', 'Build type', 'release',
                  allowed_values=('release', 'profiling', 'debug'),
                  ignorecase=2),
     EnumVariable('backend', 'Thrust backend', 'omp',
                  allowed_values=('omp', 'tbb'),
                  # allowed_values=('omp', 'cuda'),
                  ignorecase=2),
     EnumVariable('sanitizer', 'Sanitizer type', 'none',
                  allowed_values=('none', 'memory', 'leaks', 'address'),
                  ignorecase=2),
 
     PathVariable('prefix',
                  'Prefix where to install', '/usr/local'),
 
     # Dependencies paths
     PathVariable('FFTW_ROOT',
                  'FFTW custom path', os.getenv('FFTW_ROOT', ''),
                  PathVariable.PathAccept),
     PathVariable('THRUST_ROOT',
                  'Thrust custom path', os.getenv('THRUST_ROOT', ''),
                  PathVariable.PathAccept),
     PathVariable('BOOST_ROOT',
                  'Boost custom path', os.getenv('BOOST_ROOT', ''),
                  PathVariable.PathAccept),
     PathVariable('CUDA_ROOT',
                  'Cuda custom path', os.getenv('CUDA_ROOT', ''),
                  PathVariable.PathAccept),
     # Dependencies provided as submodule get different default
     PathVariable('GTEST_ROOT',
                  'Googletest custom path',
                  os.getenv('GTEST_ROOT', '#third-party/googletest/googletest'),
                  PathVariable.PathAccept),
     PathVariable('PYBIND11_ROOT',
                  'Pybind11 custom path',
                  os.getenv('PYBIND11_ROOT', '#third-party/pybind11/include'),
                  PathVariable.PathAccept),
     PathVariable('EXPOLIT_ROOT',
                  'Expolit custom path',
                  os.getenv('EXPOLIT_ROOT', '#third-party/expolit/include'),
                  PathVariable.PathAccept),
 
     # Executables
     ('CXX', 'Compiler', compiler_default),
     ('py_exec', 'Python executable', 'python3'),
 
     # Compiler flags
     ('CXXFLAGS', 'C++ compiler flags', os.getenv('CXXFLAGS', "")),
 
     # Compile legacy code
     BoolVariable('legacy_bem', 'Compile legacy BEM code', False),
     # Cosmetic
     BoolVariable('verbose', 'Activate verbosity', False),
     BoolVariable('color', 'Color the non-verbose compilation output', False),
 
     # Tamaas components
     BoolVariable('build_doc', 'Build documentation', False),
     BoolVariable('build_tests', 'Build test suite', False),
     BoolVariable('build_python', 'Build python wrapper', True),
 
     # Dependencies
     BoolVariable('use_googletest', 'Build tests using GTest', False),
 
     # Strip binary of extra info
     BoolVariable('strip_info', 'Strip binary of added information', False),
 
     # Type variables
     EnumVariable('real_type', 'Type for real precision variables', 'double',
                  allowed_values=('double', 'long double')),
     EnumVariable('integer_type', 'Type for integer variables', 'int',
                  allowed_values=('int', 'long')),
 )
 
 # Set variables of environment
 vars.Update(main_env)
 help_text = vars.GenerateHelpText(main_env)
 help_text += """
 Commands:
     scons [build] [options]...                            Compile Tamaas (and additional modules/tests)
     scons install [prefix=/your/prefix] [options]...      Install Tamaas to prefix
     scons dev                                             Install symlink to Tamaas python module (useful to development purposes)
     scons test                                            Run tests with pytest
     scons doc                                             Compile documentation with Doxygen and Sphinx+Breathe
     scons archive                                         Create a gzipped archive from source
 """  # noqa
 Help(help_text)
 
 # Save all options, not just those that differ from default
 with open('build-setup.conf', 'w') as setup:
     for option in vars.options:
         setup.write("# " + option.help + "\n")
         setup.write("{} = '{}'\n".format(option.key, main_env[option.key]))
 
 main_env['should_configure'] = \
     not main_env.GetOption('clean') and not main_env.GetOption('help')
 
 build_type = main_env['build_type']
 build_dir = 'build-' + main_env['build_type']
 main_env['build_dir'] = build_dir
 
 if main_env['should_configure']:
     print("Building in " + build_dir)
 
 verbose = main_env['verbose']
 
 # Remove colors if not set
 if not main_env['color']:
     for key in colors:
         colors[key] = ''
 
 if not verbose:
     main_env['CXXCOMSTR'] = main_env['SHCXXCOMSTR'] = \
         u'{0}[Compiling ($SHCXX)] {1}$SOURCE'.format(colors['green'],
                                                      colors['end'])
     main_env['LINKCOMSTR'] = main_env['SHLINKCOMSTR'] = \
         u'{0}[Linking] {1}$TARGET'.format(colors['purple'],
                                           colors['end'])
     main_env['PRINT_CMD_LINE_FUNC'] = pretty_cmd_print
 
 # Include paths
 main_env.AppendUnique(CPPPATH=['#/src',
                                '#/src/core',
                                '#/src/bem',
                                '#/src/surface',
                                '#/src/python',
                                '#/src/percolation',
                                '#/src/model',
                                '#/src/model/elasto_plastic',
                                '#/src/solvers',
                                '#/src/gpu',
                                '#/python'])
 
 # Changing the shared object extension
 main_env['SHOBJSUFFIX'] = '.o'
 
 # Back to gcc if cuda is activated
 if main_env['backend'] == "cuda" and "g++" not in main_env['CXX']:
     raise SCons.Errors.StopError('GCC should be used when compiling with CUDA')
 
 # OpenMP flags - compiler dependent
 omp_flags = {
     "g++": ["-fopenmp"],
     "clang++": ["-fopenmp"],
     "icpc": ["-qopenmp"]
 }
 
 
 def cxx_alias(cxx):
     for k in omp_flags.keys():
         if k in cxx:
             return k
 
     raise SCons.Errors.StopError('Unsupported compiler: ' + cxx)
 
 
 cxx = cxx_alias(main_env['CXX'])
 main_env['CXXFLAGS'] = Split(main_env['CXXFLAGS'])
 main_env.AppendUnique(CXXFLAGS='-std=c++14 -Wall -Wextra -pedantic'.split())
 
 # Append openmp flags regardless of backend for fftw_omp
 main_env.AppendUnique(CXXFLAGS=omp_flags[cxx])
 main_env.AppendUnique(LINKFLAGS=omp_flags[cxx])
 
 # Correct bug in clang?
 if main_env['backend'] == 'omp' and cxx == "clang++":
     main_env.AppendUnique(LIBS=["atomic"])
 elif main_env['backend'] == 'tbb':
     main_env.AppendUnique(LIBS=['tbb'])
 
 # Force deactivate legacy when using intel
 # Intel does not like old openmp loops
 if cxx == 'icpc' and main_env['legacy_bem']:
     print("Using intel compiler => deactivating legacy code")
     main_env['legacy_bem'] = False
 
 # Flags and options
 if main_env['build_type'] == 'debug':
     main_env.AppendUnique(CPPDEFINES=['TAMAAS_DEBUG'])
 
 # Define the scalar types
 main_env.AppendUnique(CPPDEFINES={'REAL_TYPE': '${real_type}',
                                   'INT_TYPE': '${integer_type}'})
 
 # Compilation flags
 cxxflags_dict = {
     "debug": Split("-g -O0"),
     "profiling": Split("-g -O3 -fno-omit-frame-pointer"),
     "release": Split("-O3")
 }
 
 # Link flags for shared libs
 shlinkflags_dict = {
     "debug": [],
     "profiling": Split("-g -O3 -fno-omit-frame-pointer"),
     "release": []
 }
 
 if main_env['sanitizer'] != 'none':
     if main_env['backend'] == 'cuda':
         raise SCons.Errors.StopError(
             "Sanitizers with cuda are not yet supported!")
     cxxflags_dict[build_type].append('-fsanitize=${sanitizer}')
     shlinkflags_dict[build_type].append('-fsanitize=${sanitizer}')
 
 main_env.AppendUnique(CXXFLAGS=cxxflags_dict[build_type])
 main_env.AppendUnique(SHLINKFLAGS=shlinkflags_dict[build_type])
 main_env.AppendUnique(LINKFLAGS=shlinkflags_dict[build_type])
 
 # Adding
 main_env['LIBPATH'] = [abspath(join(build_dir, 'src')),
                        abspath(join(main_env['prefix'], 'lib'))]
 main_env['RPATH'] = "$LIBPATH"
 
 if main_env['should_configure']:
     detect_dependencies(main_env)
 
 # Writing information file
 main_env.Tool('textfile')
 main_env['SUBST_DICT'] = get_git_subst()
 
 # Empty values if requested
 if main_env['strip_info']:
     for k in main_env['SUBST_DICT']:
         main_env['SUBST_DICT'][k] = ""
 
 # Substitution of environment file
 main_env['SUBST_DICT'].update({
     '@build_type@': '$build_type',
     '@build_dir@': abspath(build_dir),
 })
 
 # Environment file content
 env_content = """export PYTHONPATH=@build_dir@/python:$$PYTHONPATH
 export LD_LIBRARY_PATH=@build_dir@/src:$$LD_LIBRARY_PATH
 """
 
 # Writing environment file
 env_file = main_env.Textfile(join(build_dir, 'tamaas_environment.sh'),
                              env_content)
 
 
 # Building sub-directories
 def subdir(dir):
     return SConscript(join(dir, 'SConscript'),
                       variant_dir=join(build_dir, dir),
                       duplicate=True)
 
 
 # Building Tamaas library
 Export('main_env')
 subdir('src')
 
 build_targets = ['build-cpp', env_file]
 install_targets = ['install-lib']
 
 # Building Tamaas extra components
 for dir in ['python', 'tests']:
     if main_env['build_{}'.format(dir)] and not main_env.GetOption('help'):
         subdir(dir)
         build_targets.append('build-{}'.format(dir))
 
 # Building API + Sphinx documentation if requested
 if main_env['build_doc']:
-    doc = main_env.Command('#.phony_doc', '',
-                           'make -C {}'.format(Dir('#/doc')))
+    doc_env = main_env.Clone()
+    doc = doc_env.Command('#.phony_doc', '',
+                          'make -C {doc} clean && make -C {doc}'
+                          .format(doc=Dir('#/doc')))
+    if doc_env['build_python']:
+        doc_env.PrependENVPath('PYTHONPATH',
+                               doc_env.subst('../${build_dir}/python'))
+
+        doc_env.Depends(doc, 'build-python')
     main_env.Alias('doc', doc)
 else:
     dummy_command(main_env, 'doc', 'Command "doc" does not do anything'
                   ' without documentation activated ("build_doc=True")')
 
 # Define dummy dev command when python is deactivated
 if not main_env['build_python']:
     dummy_command(main_env, 'dev', 'Command "dev" does not do anything'
                   + ' without python activated ("build_python=True")')
 else:
     install_targets.append('install-python')
 
 # Define dummy test command when tests are deactivated
 if not main_env['build_tests']:
     dummy_command(main_env, 'test', 'Command "test" does not do anything'
                   + ' without tests activated ("build_tests=True")')
 
 # Definition of target aliases, a.k.a. sub-commands
 main_env.Alias('build', build_targets)
 
 # Define proper install targets
 main_env.Alias('install', install_targets)
 
 # Default target is to build stuff
 main_env.Default('build')
 
 # Building a tar archive
 archive = main_env.Command(
     '../tamaas-{}.tar.gz'.format(main_env['version']),
     '',
     ('tar --exclude-vcs --exclude-vcs-ignores '
      '--exclude=third-party/googletest '
      '--exclude=third-party/pybind11 '
      '--exclude=joss '
      '-czf $TARGET .'),
 )
 main_env.Alias('archive', archive)
diff --git a/doc/sphinx/source/conf.py b/doc/sphinx/source/conf.py
index dee97b81..a45ab4e0 100644
--- a/doc/sphinx/source/conf.py
+++ b/doc/sphinx/source/conf.py
@@ -1,204 +1,205 @@
 # -*- coding: utf-8 -*-
 #
 # Configuration file for the Sphinx documentation builder.
 #
 # This file does only contain a selection of the most common options. For a
 # full list see the documentation:
 # http://www.sphinx-doc.org/en/master/config
 
 # -- Path setup --------------------------------------------------------------
 
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
 # documentation root, use os.path.abspath to make it absolute, like shown here.
 #
 import os
 # import sys
 # sys.path.insert(0, os.path.abspath('.'))
 import subprocess
 
 
 # -- Project information -----------------------------------------------------
 
 project = 'Tamaas'
 copyright = "2019-2020, EPFL (École Polytechnique Fédérale de Lausanne)," \
     + " Laboratory (LSMS - Laboratoire de Simulation en Mécanique des Solides)"
-author = 'Lucas Frérot, Guillaume Anciaux, Valentine Rey, Son Pham-ba'
+author = 'Lucas Frérot, Guillaume Anciaux, Valentine Rey, Son Pham-ba, ' \
+    'Jean Fraçois Molinari'
 
 # The short X.Y version
 version = ''
 # The full version, including alpha/beta/rc tags
 release = '1.0.0'
 
 
 # -- General configuration ---------------------------------------------------
 
 # If your documentation needs a minimal Sphinx version, state it here.
 #
 # needs_sphinx = '1.0'
 
 # Add any Sphinx extension module names here, as strings. They can be
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
 # ones.
 extensions = [
     'sphinx.ext.autodoc',
     'sphinx.ext.mathjax',
     'sphinx.ext.viewcode',
     'sphinx.ext.intersphinx',
     'breathe',
 ]
 
 # Add any paths that contain templates here, relative to this directory.
 templates_path = ['.templates']
 
 # The suffix(es) of source filenames.
 # You can specify multiple suffix as a list of string:
 #
 # source_suffix = ['.rst', '.md']
 source_suffix = '.rst'
 
 # The master toctree document.
 master_doc = 'index'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
 #
 # This is also used if you do content translation via gettext catalogs.
 # Usually you set "language" from the command line for these cases.
 language = None
 
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
 # This pattern also affects html_static_path and html_extra_path.
 exclude_patterns = []
 
 # The name of the Pygments (syntax highlighting) style to use.
 pygments_style = None
 
 
 # -- Options for HTML output -------------------------------------------------
 
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
 #
 html_theme = 'sphinx_rtd_theme'
 
 # Theme options are theme-specific and customize the look and feel of a theme
 # further.  For a list of options available for each theme, see the
 # documentation.
 #
 # html_theme_options = {}
 
 html_logo = '../../icon.svg'
 
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
 # so a file named "default.css" will overwrite the builtin "default.css".
 html_static_path = ['.static']
 
 # Custom sidebar templates, must be a dictionary that maps document names
 # to template names.
 #
 # The default sidebars (for documents that don't match any pattern) are
 # defined by theme itself.  Builtin themes are using these templates by
 # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
 # 'searchbox.html']``.
 #
 # html_sidebars = {}
 
 
 # -- Options for HTMLHelp output ---------------------------------------------
 
 # Output file base name for HTML help builder.
 htmlhelp_basename = 'Tamaasdoc'
 
 
 # -- Options for LaTeX output ------------------------------------------------
 latex_engine = 'lualatex'
 
 latex_elements = {
     # The paper size ('letterpaper' or 'a4paper').
     #
     # 'papersize': 'letterpaper',
 
     # The font size ('10pt', '11pt' or '12pt').
     #
     # 'pointsize': '10pt',
 
     # Additional stuff for the LaTeX preamble.
     #
     # 'preamble': '',
 
     # Latex figure (float) alignment
     #
     # 'figure_align': 'htbp',
 }
 
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title,
 #  author, documentclass [howto, manual, or own class]).
 latex_documents = [
     (master_doc, 'Tamaas.tex', 'Tamaas Documentation',
-     'Lucas Frérot, Guillaume Anciaux, Valentine Rey, Son Pham-ba', 'manual'),
+     author, 'manual'),
 ]
 
 
 # -- Options for manual page output ------------------------------------------
 
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
 man_pages = [
     (master_doc, 'tamaas', 'Tamaas Documentation',
      [author], 1)
 ]
 
 
 # -- Options for Texinfo output ----------------------------------------------
 
 # Grouping the document tree into Texinfo files. List of tuples
 # (source start file, target name, title, author,
 #  dir menu entry, description, category)
 texinfo_documents = [
     (master_doc, 'Tamaas', 'Tamaas Documentation',
      author, 'Tamaas', 'One line description of project.',
      'Miscellaneous'),
 ]
 
 
 # -- Options for Epub output -------------------------------------------------
 
 # Bibliographic Dublin Core info.
 epub_title = project
 
 # The unique identifier of the text. This can be a ISBN number
 # or the project homepage.
 #
 # epub_identifier = ''
 
 # A unique identification for the text.
 #
 # epub_uid = ''
 
 # A list of files that should not be packed into the epub file.
 epub_exclude_files = ['search.html']
 
 
 # -- Extension configuration -------------------------------------------------
 
 # If on RTD build, run doxygen
 on_read_the_docs = os.environ.get('READTHEDOCS') == 'True'
 
 if on_read_the_docs:
     subprocess.call('cd ../../; mkdir -p build/doxygen; '
                     + 'doxygen doxygen/Doxyfile', shell=True)
 
 breathe_projects = {
     'tamaas': '../../build/doxygen/xml'
 }
 
 breathe_default_project = 'tamaas'
 
 intersphinx_mapping = {
     'numpy': ('https://docs.scipy.org/doc/numpy/', None),
     'scipy': ('https://docs.scipy.org/doc/scipy/reference', None),
 }
diff --git a/doc/sphinx/source/contact.rst b/doc/sphinx/source/contact.rst
index 1178253b..48de2e07 100644
--- a/doc/sphinx/source/contact.rst
+++ b/doc/sphinx/source/contact.rst
@@ -1,133 +1,133 @@
 Solving contact
 ===============
 
 The resolution of contact problems is done with classes that inherit from :cpp:class:`tamaas::ContactSolver`. These usually take as argument a :cpp:class:`tamaas::Model` object, a surface described by a :cpp:class:`tamaas::Grid` or a 2D numpy array, and a tolerance. We will see the specificities of the different solver objects below.
 
 
 Elastic contact
 ---------------
 
 The most common case is normal elastic contact, and is most efficiently solved with :cpp:class:`tamaas::PolonskyKeerRey`. The advantage of this class is that it combines two algorithms into one. By default, it considers that the contact pressure field is the unknown, and tries to minimize the complementary energy of the system under the constraint that the mean pressure should be equal to the value supplied by the user, for example::
 
   # ...
   solver = tm.PolonskyKeerRey(model, surface, 1e-12)
   solver.solve(1e-2)
 
 Here the average pressure is ``1e-2``. The solver can also be instanciated by specifying the the constraint should be on the mean gap instead of mean pressure::
 
   solver = tm.PolonskyKeerRey(model, surface, 1e-12, constraint_type=tm.PolonskyKeerRey.gap)
   solver.solve(1e-2)
 
 The contact problem is now solved for a mean gap of ``1e-2``. Note that the choice of constraint affects the performance of the algorithm.
 
 Contact with adhesion
 ---------------------
 
 The second algorithm hidden in :cpp:class:`tamaas::PolonskyKeerRey` considers the **gap** to be the unknown. The constaint on the mean value can be chosen as above::
 
   solver = tm.PolonskyKeerRey(model, surface, 1e-12,
                               primal_type=tm.PolonskyKeerRey.gap,
                               constraint_type=tm.PolonskyKeerRey.gap)
   solver.solve(1e-2)
 
 The advantage of this formulation is to be able to solve adhesion problems (`Rey et al. <https://doi.org/10.1007/s00466-017-1392-5>`_). This is done by adding a term to the potential energy functional that the solver tries to minimize::
 
     adhesion_params = {
 	"rho": 2e-3,
 	"surface_energy": 2e-5
     }
 
     adhesion = tm.ExponentialAdhesionFunctional(surface)
     adhesion.setParameters(adhesion)
     solver.addFunctionalterm(adhesion)
 
     solver.solve(1e-2)
 
 Custom classes can be used in place of the example term here. One has to inherit from :cpp:class:`tamaas::Functional`::
 
   
     class AdhesionPython(tm.Functional):
 	"""
 	Functional class that extends a C++ class and implements the virtual
 	methods
 	"""
 	def __init__(self, rho, gamma):
 	    super().__init__(self)
 	    self.rho = rho
 	    self.gamma = gamma
 
 	def computeF(self, gap, pressure):
 	    return -self.gamma * np.sum(np.exp(-gap / self.rho))
 
 	def computeGradF(self, gap, gradient):
 	    gradient += self.gamma * np.exp(-gap / self.rho) / self.rho
 
 This example is actually equivalent to :cpp:class:`tamaas::functional::ExponentialAdhesionFunctional`.
 
 Tangential contact
 ------------------
 
 For frictional contact, several solver classes are available. Among them, :cpp:class:`tamaas::Condat` is able to solve a coupled normal/tangential contact problem regardless of the material properties. It however solves an associated version of the Coulomb friction law. In general, the Coulomb friction used in contact makes the problem ill-posed.
 
 Solving a tangential contact problem is not much different from normal contact::
 
   mu = 0.3  # Friction coefficient
   solver = tm.Condat(model, surface, 1e-12, mu)
-  solver.setMaxIterations(5000)  # The default of 1000 may be to little
+  solver.setMaxIterations(5000)  # The default of 1000 may be too little
   solver.solve([1e-2, 0, 1e-2])  # 3D components of applied mean pressure
 
 Elasto-plastic contact
 ----------------------
 
 For elastic-plastic contact, one needs three different solvers: an elastic contact solver like the ones described above, a non-linear solver and a coupling solver. The non-linear solvers available in Tamaas are implemented in python and inherit from the C++ class :cpp:class:`tamaas::EPSolver`. They make use of the non-linear solvers available in scipy:
 
 :py:class:`DFSANESolver <tamaas.nonlinear_solvers.DFSANESolver>`
     Implements an interface to :py:func:`scipy.optimize.root` wiht the DFSANE method.
 
 :py:class:`NewtonKrylovSolver <tamaas.nonlinear_solver.NewtonKrylovSolver>`
     Implements an interface to :py:func:`scipy.optimize.newton_krylov`.
 
 These solvers require a residual vector to cancel, the creation of which is handled with :cpp:class:`tamaas::ModelFactory`. Then, an instance of :cpp:class:`tamaas::EPICSolver` is needed to couple the contact and non-linear solvers for the elastic-plastic contact problem::
 
   import tamaas as tm
 
   from tamaas.nonlinear_solvers import DFSANESolver
 
   # Definition of modeled domain
   model_type = tm.model_type.volume_2d
   discretization = [32, 51, 51]  # Order: [z, x, y]
   flat_domain = [1, 1]
   system_size = [0.5] + flat_domain
 
   # Setup for plasticity
   residual = tm.ModelFactory.createResidual(model,
                                             sigma_y=0.1 * model.E,
                                             hardening=0.01 * model.E)
   epsolver = DFSANESolver(residual, model)
 
   # ...
 
   csolver = tm.PolonskyKeerRey(model, surface, 1e-12)
 
   epic = tm.EPICSolver(csolver, epsolver, 1e7, relaxation=0.3)
   epic.solve(1e-3)
 
   # or use an accelerated scheme (which can sometimes not converge)
 
   epic.acceleratedSolve(1e-3)
 
 By default, :cpp:func:`tamaas::EPICSolver::solve` uses a relaxed fixed point. It can be tricky to make it converge: you need to decrease the relaxation parameter passed as argument of the constructor, but this also hinders the convergence rate. The function :cpp:func:`tamaas::EPICSolver::acceleratedSolve` does not require the tweaking of a relaxation parameter, so it can be faster if the latter does not have an optimal value. However, it is not guaranteed to converge.
 
 Finally, during the first iterations, the fixed point error will be large compared to the error of the non-linear solver. It can improve performance to have the tolerance of the non-linear solver (which is the most expensive step of the fixed point solve) decrease over the iterations. This can be achieved with the decorator class :py:class:`ToleranceManager <tamaas.nonlinear_solvers.ToleranceManager>`::
 
   from tamaas.nonlinear_solvers import ToleranceManager, DFSANESolver
 
   @ToleranceManager(start=1e-2,
                     end=1e-9,
 		    rate=1 / 3)
   class Solver(DFSANESolver):
       pass
 
   # ...
 
   epsolver = Solver(residual, model)
diff --git a/doc/sphinx/source/index.rst b/doc/sphinx/source/index.rst
index d051d2de..7403e84b 100644
--- a/doc/sphinx/source/index.rst
+++ b/doc/sphinx/source/index.rst
@@ -1,185 +1,209 @@
 .. Tamaas documentation master file, created by sphinx-quickstart on Mon Apr 29
    18:08:33 2019. You can adapt this file completely to your liking, but it
    should at least contain the root `toctree` directive.
 
 Tamaas --- A blazingly fast rough contact library
 =================================================
 
 Tamaas (from تماس meaning "contact" in Arabic and Farsi) is a high-performance
 rough-surface periodic contact code based on boundary and volume integral
 equations. The clever mathematical formulation of the underlying numerical
 methods (see e.g. `10.1007/s00466-017-1392-5
 <https://doi.org/10.1007/s00466-017-1392-5>`_ and `arXiv:1811.11558
 <https://arxiv.org/abs/1811.11558>`_) allows the use of the fast-Fourier
 Transform, a great help in achieving peak performance: Tamaas is consistently
 **two orders of magnitude faster** (and lighter) than traditional FEM! Tamaas is
 aimed at researchers and practitioners wishing to compute realistic contact
 solutions for the study of interface phenomena.
 
 Quickstart
 ----------
 
 Here is a quick introduction to get you started with Tamaas.
 
 Installation
 ^^^^^^^^^^^^
 
 First make sure the following dependencies are installed for Tamaas:
 
 - a **C++ compiler** with full **C++14** and **OpenMP** support
 - **SCons** (python build system)
 - **FFTW3** compiled with **OpenMP** support
 - **thrust** (1.9.2+)
 - **boost** (pre-processor)
 - **python 3+** (probably works with python 2, but it is not tested) with
   **numpy**
 - **pybind11** (included as submodule)
 - **expolit** (included as submodule)
 
 Optional dependencies are:
 
 - **scipy** (for nonlinear solvers)
 - **uvw** (for dumpers)
 - **googletest** and **pytest** (for tests)
 - **Doxygen** and **Sphinx** (for documentation)
 
 .. tip:: On a HPC environment, use the following variables to specify where the
    dependencies are located:
 
    - ``FFTW_ROOT``
    - ``THRUST_ROOT``
    - ``BOOST_ROOT``
    - ``PYBIND11_ROOT``
    - ``GTEST_ROOT``
 
 .. note:: You can use the provided Dockerfile to build an image with the
    external dependencies installed.
 
 You should first clone the git submodules that are dependencies to tamaas
 (`expolit <https://c4science.ch/source/expolit/>`_, `pybind11
 <https://github.com/pybind/pybind11>`_ and `googletest
 <https://github.com/google/googletest>`_)::
 
     git submodule update --init --recursive
 
 The build system uses `SCons <https://scons.org/>`_. In order to compile Tamaas
 with the default options::
 
     scons
 
 After compiling a first time, you can edit the compilation options in the file
 ``build-setup.conf``, or alternatively supply the options directly in the
 command line::
 
     scons option=value [...]
 
 To get a list of **all** build options and their possible values, you can run
 ``scons -h``. You can run ``scons -H`` to see the SCons-specific options (among
 them ``-j n`` executes the build with ``n`` threads and ``-c`` cleans the
 build). Note that the build is aware of the ``CXX`` and ``CXXFLAGS`` environment
 variables.
 
 Once compiled, you can install the python module with::
 
   scons install prefix=/your/prefix
 
 The above command automatically calls ``pip(3)`` if it is installed, and falls
 back on ``setuptools`` otherwise. If you do not want to install Tamaas, you can
 use the following command::
 
   scons dev
 
 This creates a symlink in ``~/.local/lib/<python version>/site-packages`` and
 is equivalent to ``pip(3) install -e`` or ``./setup.py develop``. You can check
 that everything is working fine with::
 
   python3 -c 'import tamaas; print(tamaas)'
 
 
 Using Docker
 ............
 
 The Tamaas repository provides a `Dockerfile` that describes an appropriate
 build environment. You can use it in the following way::
 
   # Build the image, run it and mount the tamaas repository
   docker build -t tamaas_build .
   docker run -v $PWD:/app/tamaas -it tamaas_build bash
 
   # Once in the image shell: compile and install
   cd /app/tamaas
   scons
   scons dev
 
 The image also has some of the dependencies required to run the examples below
 (matplotlib, uvw).
 
 Running the contact pipe tools
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 In the ``examples/pipe_tools`` folder, you will find three scripts that can be
 used to explore the mechanics of elastic rough contact:
 
 - ``surface`` generates randomly rough surfaces (see :doc:`rough_surfaces`)
 - ``contact`` solves a contact problem with a surface read from ``stdin`` (see
   :doc:`contact`)
 - ``plot`` simply plots the surface tractions and displacements read from
   ``stdin``
 
 Here's a sample command line for you to try out::
 
   ./surface --cutoffs 2 4 64 --size 512 512 --hurst 0.8 | ./contact 1 | ./plot
 
 Check out the help of each script for a description of the arguments.
 
 Library overview
 ----------------
 
 Tamaas is mainly composed of three components:
 
 - Random surface generation procedures
 - Model state objects and operators
 - Contact solving procedures
 
 These parts are meant to be independent as well as inter-operable. The first one
 provides an interface to several stochastic surface generation algorithms
 described in :doc:`rough_surfaces`. The second provides an interface to a state
 object :py:class:`Model <tamaas._tamaas.Model>` (and C++ class
 :cpp:class:`tamaas::Model`) as well as integral operators based on the state
 object (see :doc:`model`). Finally, the third part provides contact solving
 routines that make use of the integral operators for performance (see
 :doc:`contact`).
 
+Seeking help
+------------
+
+You can ask your questions on `c4science <https://c4science.ch>`_ using this
+`form
+<https://c4science.ch/maniphest/task/edit/?owner=frerot&projectPHIDs=tamaas&view=public>`_.
+If you do not have an account, you can create one `here
+<https://c4science.ch/auth/start/?next=%2F>`_.
+
 Contribution
 ------------
 
 You can contribute to Tamaas by reporting any bugs you find `here
 <https://c4science.ch/maniphest/task/edit/?owner=frerot&projectPHIDs=tamaas&view=public>`_
 if you have an account on `c4science <https://c4science.ch>`_.
 
 To contribute code to Tamaas, you can use `Arcanist
 <https://secure.phabricator.com/book/phabricator/article/arcanist/>`_ to send
 code differentials. To know if you break anything, you can run the tests with::
 
   scons test
 
 Make sure you have `pytest <https://docs.pytest.org/en/latest/>`_ installed.
 
+In a nutshell, the process to contribute is:
+
+1. Create a branch for the modifications you wish to submit
+2. Work on your branch (commits)
+3. ``arc diff`` to send your code for review
+4. Commit any requested changes
+5. ``arc diff`` to send your modifications
+
+For reviewers:
+
+1. Checkout a code differential using ``arc patch D???``
+2. Accept the code differential on `c4science <https://c4science.ch>`_.
+3. ``arc land`` to merge the differential
+4. Profit with ``arc anoid``
+
 
 .. toctree::
    :maxdepth: 2
    :caption: Table of Contents:
 
    ./rough_surfaces
    ./model
    ./contact
    ./api_reference
 
 
 
 Indices and tables
 ==================
 
 * :ref:`genindex`
 * :ref:`modindex`
 * :ref:`search`
diff --git a/joss/paper.bibtex b/joss/paper.bibtex
index c6d96f6c..b374579e 100644
--- a/joss/paper.bibtex
+++ b/joss/paper.bibtex
@@ -1,292 +1,330 @@
 
 @book{bonnet_boundary_1995,
   title = {Boundary Integral Equation Methods for Solids and Fluids},
   author = {Bonnet, Marc},
   year = {1995},
   publisher = {{J. Wiley}},
   address = {{Chichester ; New York}},
   isbn = {978-0-471-97184-9},
   keywords = {Boundary element methods,Fluid mechanics,Mathematics,Mechanics; Applied},
   lccn = {TA347.B69 B6848 1995},
   note = {OCLC: ocm41368652}
 }
 
 @article{frerot_crack_2019,
   title = {Crack {{Nucleation}} in the {{Adhesive Wear}} of an {{Elastic}}-{{Plastic Half}}-{{Space}}},
   author = {Fr{\'e}rot, Lucas and Anciaux, Guillaume and Molinari, Jean-Fran{\c c}ois},
   year = {2019},
   month = oct,
   abstract = {The detachment of material in an adhesive wear process is driven by a fracture mechanism which is controlled by a critical length-scale. Previous efforts in multi-asperity wear modeling have applied this microscopic process to rough elastic contact. However, experimental data shows that the assumption of purely elastic deformation at rough contact interfaces is unrealistic, and that asperities in contact must deform plastically to accommodate the large contact stresses. We therefore investigate the consequences of plastic deformation on the macro-scale wear response. The crack nucleation process in a rough elastic-plastic contact is investigated in a comparative study with a classical \$J\_2\$ plasticity approach and a saturation plasticity model. We show that plastic residual deformations in the \$J\_2\$ model heighten the surface tensile stresses, leading to a higher crack nucleation likelihood for contacts. This effect is shown to be stronger when the material is more ductile. We also show that elastic interactions between contacts can increase the likelihood of individual contacts nucleating cracks, irrespective of the contact constitutive model. This is confirmed by a statistical approach we develop based on a Greenwood--Williamson model modified to take into account the elastic interactions between contacts and the shear strength of the contact junction.},
   archivePrefix = {arXiv},
   eprint = {1910.05163},
   eprinttype = {arxiv},
   journal = {arXiv:1910.05163 [cond-mat]},
   keywords = {Condensed Matter - Soft Condensed Matter},
   primaryClass = {cond-mat}
 }
 
 @article{frerot_fourieraccelerated_2019,
   title = {A {{Fourier}}-Accelerated Volume Integral Method for Elastoplastic Contact},
   author = {Fr{\'e}rot, Lucas and Bonnet, Marc and Molinari, Jean-Fran{\c c}ois and Anciaux, Guillaume},
   year = {2019},
   month = jul,
   volume = {351},
   pages = {951--976},
   issn = {0045-7825},
   doi = {10.1016/j.cma.2019.04.006},
   abstract = {The contact of solids with rough surfaces plays a fundamental role in physical phenomena such as friction, wear, sealing, and thermal transfer. However, its simulation is a challenging problem due to surface asperities covering a wide range of length-scales. In addition, non-linear local processes, such as plasticity, are expected to occur even at the lightest loads. In this context, robust and efficient computational approaches are required. We therefore present a novel numerical method, based on integral equations, capable of handling the large discretization requirements of real rough surfaces as well as the non-linear plastic flow occurring below and at the contacting asperities. This method is based on a new derivation of the Mindlin fundamental solution in Fourier space, which leverages the computational efficiency of the fast Fourier transform. The use of this Mindlin solution allows a dramatic reduction of the memory imprint (as the Fourier coefficients are computed on-the-fly), a reduction of the discretization error, and the exploitation of the structure of the functions to speed up computation of the integral operators. We validate our method against an elastic\textendash{}plastic FEM Hertz normal contact simulation and showcase its ability to simulate contact of rough surfaces with plastic flow.},
   copyright = {All rights reserved},
   journal = {Computer Methods in Applied Mechanics and Engineering},
   keywords = {Condensed Matter - Soft Condensed Matter,Contact,Fourier,Mindlin,Physics - Computational Physics,Plasticity,Rough surface,Volume integral equation}
 }
 
 @article{frerot_mechanistic_2018,
   title = {A Mechanistic Understanding of the Wear Coefficient: {{From}} Single to Multiple Asperities Contact},
   shorttitle = {A Mechanistic Understanding of the Wear Coefficient},
   author = {Fr{\'e}rot, Lucas and Aghababaei, Ramin and Molinari, Jean-Fran{\c c}ois},
   year = {2018},
   month = may,
   volume = {114},
   pages = {172--184},
   issn = {0022-5096},
   doi = {10.1016/j.jmps.2018.02.015},
   abstract = {Sliding contact between solids leads to material detaching from their surfaces in the form of debris particles, a process known as wear. According to the well-known Archard wear model, the wear volume (i.e. the volume of detached particles) is proportional to the load and the sliding distance, while being inversely proportional to the hardness. The influence of other parameters are empirically merged into a factor, referred to as wear coefficient, which does not stem from any theoretical development, thus limiting the predictive capacity of the model. Based on a recent understanding of a critical length-scale controlling wear particle formation, we present two novel derivations of the wear coefficient: one based on Archard's interpretation of the wear coefficient as the probability of wear particle detachment and one that follows naturally from the up-scaling of asperity-level physics into a generic multi-asperity wear model. As a result, the variation of wear rate and wear coefficient are discussed in terms of the properties of the interface, surface roughness parameters and applied load for various rough contact situations. Both new wear interpretations are evaluated analytically and numerically, and recover some key features of wear observed in experiments. This work shines new light on the understanding of wear, potentially opening a pathway for calculating the wear coefficient from first principles.},
   copyright = {All rights reserved},
   journal = {Journal of the Mechanics and Physics of Solids},
   keywords = {Cluster statistics,Contact,Self-affine surface,Wear coefficient}
 }
 
 @article{hu_simulation_1992,
   title = {Simulation of 3-{{D}} Random Rough Surface by 2-{{D}} Digital Filter and Fourier Analysis},
   author = {Hu, Y.Z. and Tonder, K.},
   year = {1992},
   month = feb,
   volume = {32},
   pages = {83--90},
   issn = {08906955},
   doi = {10.1016/0890-6955(92)90064-N},
   journal = {International Journal of Machine Tools and Manufacture},
   language = {en},
   number = {1-2}
 }
 
 @article{mindlin_thermoelastic_1950,
   title = {Thermoelastic {{Stress}} in the {{Semi}}-{{Infinite Solid}}},
   author = {Mindlin, Raymond D. and Cheng, David H.},
   year = {1950},
   month = sep,
   volume = {21},
   pages = {931--933},
   issn = {0021-8979},
   doi = {10.1063/1.1699786},
   journal = {Journal of Applied Physics},
   number = {9}
 }
 
 @article{persson_nature_2005,
   title = {On the Nature of Surface Roughness with Application to Contact Mechanics, Sealing, Rubber Friction and Adhesion},
   author = {Persson, B. N. J. and Albohr, O. and Tartaglino, U. and Volokitin, A. I. and Tosatti, E.},
   year = {2005},
   volume = {17},
   pages = {R1},
   issn = {0953-8984},
   doi = {10.1088/0953-8984/17/1/R01},
   abstract = {Surface roughness has a huge impact on many important phenomena. The most important property of rough surfaces is the surface roughness power spectrum C ( q ). We present surface roughness power spectra of many surfaces of practical importance, obtained from the surface height profile measured using optical methods and the atomic force microscope. We show how the power spectrum determines the contact area between two solids. We also present applications to sealing, rubber friction and adhesion for rough surfaces, where the power spectrum enters as an important input.},
   journal = {Journal of Physics: Condensed Matter},
   language = {en},
   number = {1}
 }
 
 @article{polonsky_numerical_1999,
   title = {A Numerical Method for Solving Rough Contact Problems Based on the Multi-Level Multi-Summation and Conjugate Gradient Techniques},
   author = {Polonsky, I. A. and Keer, L. M.},
   year = {1999},
   month = jul,
   volume = {231},
   pages = {206--219},
   issn = {0043-1648},
   doi = {10.1016/S0043-1648(99)00113-1},
   abstract = {An alternative numerical method for solving contact problems for real rough surfaces is proposed. The real area of contact and the contact pressure distribution are determined using a single-loop iteration scheme based on the conjugate gradient method, which converges for arbitrary rough surfaces. The surface deflections and subsurface stresses are computed using an alternative two-dimensional multi-level multi-summation algorithm, which allows the summation error to be kept under the discretization error for any number of contact points. The proposed method is fast: rough contact problems for surface samples with 105\textendash{}106 data points are solved on a personal computer in a few hours. The numerical algorithms are described in full detail so that an interested reader can implement the new contact solver in a computer code. Numerical examples demonstrating the method advantages are presented. The method is compared with other fast contact solvers that have emerged in the last few years.},
   journal = {Wear},
   keywords = {Conjugate gradient techniques,Multi-level multi-summation,Rough contact problems},
   number = {2}
 }
 
 @article{renard_constant_2013,
   title = {Constant Dimensionality of Fault Roughness from the Scale of Micro-Fractures to the Scale of Continents},
   author = {Renard, Fran{\c c}ois and Candela, Thibault and Bouchaud, Elisabeth},
   year = {2013},
   volume = {40},
   pages = {83--87},
   issn = {1944-8007},
   doi = {10.1029/2012GL054143},
   abstract = {Many faults and fractures in various natural and man-made materials share a remarkable common fractal property in their morphology. We report on the roughness of faults in rocks by analyzing the out-of-plane fluctuations of slip surfaces. They display a statistical power-law relationship with a nearly constant fractal exponent from millimeter scale micro-fractures in fault zones to coastlines measuring thousands of kilometers that have recorded continental breakup. A possible origin of this striking fractal relationship over 11 orders of magnitude of length scales is that all faulting processes in rocks share common characteristics that play a crucial role in the shaping of fault surfaces, such as the effects of elastic long-range stress interactions and stress screening by mechanical heterogeneities during quasi-static fracture growth.},
   copyright = {\textcopyright{}2012. American Geophysical Union. All Rights Reserved.},
   journal = {Geophysical Research Letters},
   language = {en},
   number = {1}
 }
 
 @article{rey_normal_2017,
   title = {Normal Adhesive Contact on Rough Surfaces: Efficient Algorithm for {{FFT}}-Based {{BEM}} Resolution},
   shorttitle = {Normal Adhesive Contact on Rough Surfaces},
   author = {Rey, Valentine and Anciaux, Guillaume and Molinari, Jean-Fran{\c c}ois},
   year = {2017},
   month = mar,
   pages = {1--13},
   issn = {0178-7675, 1432-0924},
   doi = {10.1007/s00466-017-1392-5},
   abstract = {We introduce a numerical methodology to compute the solution of an adhesive normal contact problem on rough surfaces with the Boundary Element Method. Based on the Fast Fourier Transform and the Westergaard's fundamental solution, the proposed algorithm enables to solve efficiently the constrained minimization problem: the numerical solution strictly verifies contact orthogonality and the algorithm takes advantage of the constraints to speed up the minimization. Comparisons with the analytical solution of the Hertz case prove the quality of the numerical computation. The method is also used to compute normal adhesive contact between rough surfaces made of multiple asperities.},
   journal = {Computational Mechanics},
   language = {en}
 }
 
 @article{rey_quantifying_2019,
   title = {Quantifying Uncertainties in Contact Mechanics of Rough Surfaces Using the Multilevel {{Monte Carlo}} Method},
   author = {Rey, V. and Krumscheid, S. and Nobile, F.},
   year = {2019},
   month = may,
   volume = {138},
   pages = {50--64},
   issn = {0020-7225},
   doi = {10.1016/j.ijengsci.2019.02.003},
   abstract = {We quantify the effect of uncertainties on quantities of interest related to contact mechanics of rough surfaces. Specifically, we consider the problem of frictionless non adhesive normal contact between two semi infinite linear elastic solids subject to uncertainties. These uncertainties may for example originate from an incomplete surface description. To account for surface uncertainties, we model a rough surface as a suitable Gaussian random field whose covariance function encodes the surface's roughness, which is experimentally measurable. Then, we introduce the multilevel Monte Carlo method which is a computationally efficient sampling method for the computation of the expectation and higher statistical moments of uncertain system output's, such as those derived from contact simulations. In particular, we consider two different quantities of interest, namely the contact area and the number of contact clusters, and show via numerical experiments that the multilevel Monte Carlo method offers significant computational gains compared to an approximation via a classic Monte Carlo sampling.},
   journal = {International Journal of Engineering Science},
   keywords = {Contact clusters,Frictionless contact,Multilevel Monte Carlo,Random surface generator,Rough surfaces}
 }
 
 @article{rey_stability_2018,
   title = {Stability Analysis of Rough Surfaces in Adhesive Normal Contact},
   author = {Rey, Valentine and Bleyer, Jeremy},
   year = {2018},
   month = mar,
   pages = {1--13},
   issn = {0178-7675, 1432-0924},
   doi = {10.1007/s00466-018-1556-y},
   abstract = {This paper deals with adhesive frictionless normal contact between one elastic flat solid and one stiff solid with rough surface. After computation of the equilibrium solution of the energy minimization principle and respecting the contact constraints, we aim at studying the stability of this equilibrium solution. This study of stability implies solving an eigenvalue problem with inequality constraints. To achieve this goal, we propose a proximal algorithm which enables qualifying the solution as stable or unstable and that gives the instability modes. This method has a low computational cost since no linear system inversion is required and is also suitable for parallel implementation. Illustrations are given for the Hertzian contact and for rough contact.},
   journal = {Computational Mechanics},
   language = {en}
 }
 
 @article{richart_implementation_2015,
   title = {Implementation of a Parallel Finite-Element Library: {{Test}} Case on a Non-Local Continuum Damage Model},
   shorttitle = {Implementation of a Parallel Finite-Element Library},
   author = {Richart, N. and Molinari, J. F.},
   year = {2015},
   month = aug,
   volume = {100},
   pages = {41--46},
   issn = {0168-874X},
   doi = {10.1016/j.finel.2015.02.003},
   abstract = {This paper presents an efficient method to implement a damage law within an explicit time-integration scheme, in an open-source object-oriented finite-element framework. The hybrid object/vector design of the framework and implementation choices are detailed in the special case of non-local continuum damage constitutive laws. The computationally demanding aspect of such constitutive laws requires efficient algorithms, capable of using High Performance Computing (HPC) clusters. The performance of our approach is demonstrated on a numerically and physically challenging 3D dynamic brittle-fragmentation test case. An almost perfect scalability is achieved on parallel computations. The global dynamics and energy terms are in good agreement with classical cohesive models' predictions.},
   journal = {Finite Elements in Analysis and Design},
   keywords = {Continuum damage,Finite element method,Non-local approach,Parallel computing}
 }
 
 @article{stanley_fftbased_1997,
   title = {An {{FFT}}-{{Based Method}} for {{Rough Surface Contact}}},
   author = {Stanley, H. M. and Kato, T.},
   year = {1997},
   month = jul,
   volume = {119},
   pages = {481--485},
   issn = {0742-4787},
   doi = {10.1115/1.2833523},
   abstract = {Elastic contact between a rigid plane and a halfspace whose surface height is described by a bandwidth-limited Fourier series is considered. The surface normal displacements and contact pressures are found by a numerical technique that exploits the structure of the Fast Fourier Transform (FFT) and an exact result in linear elasticity. The multiscale nature of rough surface contact is implicit to the method, and features such as contact agglomeration and asperity interaction\textemdash{}a source of difficulty for asperity-based models\textemdash{}evolve naturally. Both two-dimensional (2-D) and three-dimensional (3-D) contact are handled with equal ease. Finally, the implementation is simple, compact, and fast.},
   journal = {Journal of Tribology},
   number = {3}
 }
 
 @article{telles_application_1979,
   title = {On the Application of the Boundary Element Method to Plasticity},
   author = {Telles, J. C. F. and Brebbia, C. A.},
   year = {1979},
   month = dec,
   volume = {3},
   pages = {466--470},
   issn = {0307-904X},
   doi = {10.1016/S0307-904X(79)80030-X},
   journal = {Applied Mathematical Modelling},
   number = {6}
 }
 
 @article{yastrebov_accurate_2017,
   title = {On the Accurate Computation of the True Contact-Area in Mechanical Contact of Random Rough Surfaces},
   author = {Yastrebov, Vladislav A. and Anciaux, Guillaume and Molinari, Jean-Fran{\c c}ois},
   year = {2017},
   issn = {0301-679X},
   doi = {10.1016/j.triboint.2017.04.023},
   abstract = {We introduce a corrective function to compensate errors in contact area computations coming from mesh discretization. The correction is based on geometrical arguments, and apart from the contact area itself requires only one additional quantity to be computed: the length of contact/non-contact interfaces. The new technique enables to evaluate accurately the true contact area using a very coarse mesh, for which the shortest wavelength in the surface spectrum reaches the grid size. The validity of the approach is demonstrated for surfaces with different fractal dimensions and different spectral content using a properly designed mesh convergence test. In addition, we use a topology preserving smoothing technique to adjust the morphology of contact clusters obtained with a coarse grid.},
   journal = {Tribology International},
   keywords = {Boundary element Method,Contact area,Contact area correction,Simulations,surface roughness}
 }
 
 @article{yastrebov_contact_2012,
   title = {Contact between Representative Rough Surfaces},
   author = {Yastrebov, Vladislav A. and Anciaux, Guillaume and Molinari, Jean-Fran{\c c}ois},
   year = {2012},
   month = sep,
   volume = {86},
   issn = {1539-3755, 1550-2376},
   doi = {10.1103/PhysRevE.86.035601},
   journal = {Physical Review E},
   language = {en},
   number = {3}
 }
 
 @article{yastrebov_contact_2014,
   title = {The {{Contact}} of {{Elastic Regular Wavy Surfaces Revisited}}},
   author = {Yastrebov, Vladislav A. and Anciaux, Guillaume and Molinari, Jean-Fran{\c c}ois},
   year = {2014},
   month = oct,
   volume = {56},
   pages = {171--183},
   issn = {1023-8883, 1573-2711},
   doi = {10.1007/s11249-014-0395-z},
   abstract = {We revisit the classic problem of an elastic solid with a two-dimensional wavy surface squeezed against an elastic flat half-space from infinitesimal to full contact. Through extensive numerical calculations and analytic derivations, we discover previously overlooked transition regimes. These are seen in particular in the evolution with applied load of the contact area and perimeter, the mean pressure and the probability density of contact pressure. These transitions are correlated with the contact area shape, which is affected by long range elastic interactions. Our analysis has implications for general random rough surfaces, as similar local transitions occur continuously at detached areas or coalescing contact zones. We show that the probability density of null contact pressures is nonzero at full contact. This might suggest revisiting the conditions necessary for applying Persson's model at partial contacts and guide the comparisons with numerical simulations. We also address the evaluation of the contact perimeter for discrete geometries and the applicability of Westergaard's solution for three-dimensional geometries.},
   journal = {Tribology Letters},
   language = {en},
   number = {1}
 }
 
 @article{yastrebov_infinitesimal_2015,
   title = {From Infinitesimal to Full Contact between Rough Surfaces: {{Evolution}} of the Contact Area},
   shorttitle = {From Infinitesimal to Full Contact between Rough Surfaces},
   author = {Yastrebov, Vladislav A. and Anciaux, Guillaume and Molinari, Jean-Fran{\c c}ois},
   year = {2015},
   month = jan,
   volume = {52},
   pages = {83--102},
   issn = {00207683},
   doi = {10.1016/j.ijsolstr.2014.09.019},
   journal = {International Journal of Solids and Structures},
   language = {en}
 }
 
 @article{yastrebov_role_2017,
   title = {The Role of the Roughness Spectral Breadth in Elastic Contact of Rough Surfaces},
   author = {Yastrebov, Vladislav A. and Anciaux, Guillaume and Molinari, Jean-Fran{\c c}ois},
   year = {2017},
   month = oct,
   volume = {107},
   pages = {469--493},
   issn = {0022-5096},
   doi = {10.1016/j.jmps.2017.07.016},
   abstract = {We study frictionless and non-adhesive contact between elastic half-spaces with self-affine surfaces. Using a recently suggested corrective technique, we ensure an unprecedented accuracy in computation of the true contact area evolution under increasing pressure. This accuracy enables us to draw conclusions on the role of the surface's spectrum breadth (Nayak parameter) in the contact area evolution. We show that for a given normalized pressure, the contact area decreases logarithmically with the Nayak parameter. By linking the Nayak parameter with the Hurst exponent (or fractal dimension), we show the effect of the latter on the true contact area. This effect, undetectable for surfaces with poor spectral content, is quite strong for surfaces with rich spectra. Numerical results are compared with analytical models and other available numerical results. A phenomenological equation for the contact area growth is suggested with coefficients depending on the Nayak parameter. Using this equation, the pressure-dependent friction coefficient is deduced based on the adhesive theory of friction. Some observations on Persson's model of rough contact, whose prediction does not depend on Nayak parameter, are reported. Overall, the paper provides a unifying picture of rough elastic contact and clarifies discrepancies between preceding results.},
   journal = {Journal of the Mechanics and Physics of Solids},
   keywords = {Contact area,Hurst exponent,Nayak parameter,Pressure-dependent friction,Roughness,Spectrum breadth}
 }
 
 @article{brink_parameter_2020,
   title = {A Parameter-Free Mechanistic Model of the Adhesive Wear Process of Rough Surfaces in Sliding Contact},
   author = {Brink, Tobias and Fr{\'e}rot, Lucas and Molinari, Jean-Fran{\c c}ois},
   year = {2020},
   month = apr,
   archivePrefix = {arXiv},
   eprint = {2004.00559},
   eprinttype = {arxiv},
   journal = {arXiv:2004.00559 [physics]},
   keywords = {Physics - Applied Physics},
   primaryClass = {physics}
 }
 
+@article{archard_elastic_1957,
+  title={Elastic deformation and the laws of friction},
+  author={Archard, J.F.},
+  journal={Proceedings of the Royal Society of London. Series A. Mathematical and Physical Sciences},
+  volume={243},
+  number={1233},
+  pages={190--205},
+  year={1957},
+  publisher={The Royal Society London}
+}
+
+@article{condat_primal_2012,
+  title = {A {{Primal}}\textendash{{Dual Splitting Method}} for {{Convex Optimization Involving Lipschitzian}}, {{Proximable}} and {{Linear Composite Terms}}},
+  author = {Condat, Laurent},
+  year = {2012},
+  month = dec,
+  volume = {158},
+  pages = {460--479},
+  issn = {0022-3239, 1573-2878},
+  doi = {10.1007/s10957-012-0245-9},
+  journal = {Journal of Optimization Theory and Applications},
+  language = {en},
+  number = {2}
+}
+
+@article{almqvist_dry_2007,
+  title = {On the Dry Elasto-Plastic Contact of Nominally Flat Surfaces},
+  author = {Almqvist, A. and Sahlin, F. and Larsson, R. and Glavatskih, S.},
+  year = {2007},
+  month = apr,
+  volume = {40},
+  pages = {574--579},
+  issn = {0301679X},
+  doi = {10.1016/j.triboint.2005.11.008},
+  journal = {Tribology International},
+  language = {en},
+  number = {4}
+}
diff --git a/joss/paper.md b/joss/paper.md
index 18af7676..d47a550e 100644
--- a/joss/paper.md
+++ b/joss/paper.md
@@ -1,150 +1,159 @@
 ---
 title: 'Tamaas: a library for elastic-plastic contact of periodic rough surfaces'
 tags:
   - C++
   - Python
   - contact
   - rough surface
   - plasticity
 authors:
   - name: Lucas Frérot
     orcid: 0000-0002-4138-1052
     affiliation: 1
   - name: Guillaume Anciaux
     orcid: 0000-0002-9624-5621
     affiliation: 1
   - name: Valentine Rey
     orcid: 0000-0003-1019-1819
     affiliation: 2
   - name: Son Pham-Ba
     orcid: 0000-0003-3451-7297
     affiliation: 1
   - name: Jean-François Molinari
     orcid: 0000-0002-1728-1844
     affiliation: 1
 affiliations:
  - name: Civil Engineering Institute, École Polytechnique Fédérale de Lausanne, Switzerland
    index: 1
  - name: Université de Nantes Sciences et Techniques, Nantes, France
    index: 2
 
 date: 13 December 2019
 bibliography: paper.bibtex
 ---
 
 # Summary
 
 Physical phenomena that happen at solid contact interfaces, such as
 friction and wear, are largely entwined with the roughness of the
 surfaces in contact. For example, the fact that the friction force
 between two solids in contact is independent of their apparent contact
 area is due to roughness, as the solids are only in contact over a
-smaller "true contact area" which only depends on the normal force.
-Roughness occurs on most man-made and natural surfaces
+smaller "true contact area" which only depends on the normal force 
+[@archard_elastic_1957]. Roughness occurs on most man-made and natural surfaces
 [@persson_nature_2005] and can span many orders of magnitude, from the
 nanometer scale to the kilometer scale [@renard_constant_2013]. This
 poses a serious challenge to conventional numerical approaches in
 solid mechanics such as the finite-element method (FEM).
 
 Boundary integral methods [@bonnet_boundary_1995] are commonly
 employed in place of the FEM for rough elastic contact because of an
 inherent dimensionality reduction: the computational effort is focused
 on the contact interface whereas the FEM requires discretization of
 the volume of the solids in contact. In addition, the use of a
 half-space geometry provides a translational invariance: the
 computation of periodic equilibrium solutions can then be accelerated
 with the fast-Fourier Transform [@stanley_fftbased_1997].
 
 However, because of the roughness, the total contact load is
 distributed over a small area and local contact pressures are expected
 to cause non-linear material behavior, such as plasticity. In this
 case, volume integral methods can be employed to account for plastic
 deformation [@telles_application_1979]. These enjoy properties
 analogous to boundary integral methods and can also be accelerated
 with a Fourier approach [@frerot_fourieraccelerated_2019].
 
 Taking plasticity into account is necessary in the accurate
 description of contact interfaces for the understanding of friction
 and wear. Moreover, high performance implementations are needed to
 model realistic rough surfaces with roughness spanning many orders of
 magnitude in scale.
 
 ``Tamaas`` is a C++ library with a Python interface, developed in the
 [Computational Solid Mechanics
 Laboratory](https://www.epfl.ch/labs/lsms) at EPFL, that implements a
 unique Fourier-accelerated volume integral formulation of equilibrium
 [@frerot_fourieraccelerated_2019] for the solution of elastic-plastic
 rough contact problems. The use of C++ allows for a particular focus
 on performance: most loops are paralelized using ``Thrust/OpenMP`` and
 the fast-Fourier transforms are computed with ``FFTW3/OpenMP``. Thanks
 to this, it can handle simulations with upwards of 100 million degrees
 of freedom on a single compute node [@frerot_fourieraccelerated_2019].
 ``Tamaas`` is aimed at researchers and practitioners wishing to compute
 realistic contact solutions for the study of interface phenomena.
 
 # Features and Performance
 
 ``Tamaas`` provides access in its Python API to random rough surface
 generation procedures (e.g. @hu_simulation_1992), statistical tools
 (e.g. autocorrelation and power spectrum computations) and a variety
 of contact algorithms:
 
 - Normal and adhesive contact schemes based on the conjugate gradient
-  [@polonsky_numerical_1999; @rey_normal_2017];
-- Frictional contact;
-- Elastic-plastic contact [@frerot_fourieraccelerated_2019].
+  [@polonsky_numerical_1999; @rey_normal_2017] and using the boundary integral
+  method;
+- Associated frictional contact using proximal algorithms [@condat_primal_2012];
+- Elastic-plastic contact using the Fourier-accelerated volume integral method
+  [@frerot_fourieraccelerated_2019] and saturated surface pressure
+  [@almqvist_dry_2007].
+
+We are not aware of any public software package providing implementation to all
+of the above methods, although the web-based package
+[contact.engineering](https://contact.engineering/) allows elastic normal
+contact solutions using a boundary integral method as well.
 
 ``Tamaas`` also exposes in its Python API the accelerated linear
 operators it uses to compute equilibrium solutions, making prototyping
 new algorithms convenient.
 
 We compare in figure 1 the scaling properties of ``Tamaas`` to a
 reference high-performance C++ FEM implementation named
 [``Akantu``](https://gitlab.com/akantu/akantu)
 [@richart_implementation_2015] which uses the direct solver
 [MUMPS](http://mumps.enseeiht.fr/). The reference problem is the
 elastic equilibrium of a half-space with an isotropic spherical
 inclusion [@mindlin_thermoelastic_1950], which is computed in serial
 for both implementations. $N$ represents the number of points in the
 computational domain. For large $N$, ``Tamaas`` is two orders of
 magnitude faster than ``Akantu``.
 
 ![Scaling comparison between the acclerated volume integral method
 implemented in ``Tamaas`` and a finite-element method with a direct
 solver for the solution of the equilibrium of a half-space with a
 spherical isotropic inclusion. $N$ is the number of points in the
 computational domain. When $N=2^{18}$ ``Tamaas`` is 200 times faster
 than the FEM implementation ``Akantu``.](complexity.pdf)
 
 Figure 2 shows the sub-surface plastic zones in a rough contact
 simulation, with color indicating their depth. The Fourier-accelerated
 approach allows an unprecendented level of detail on the topography of
 the zones which can have an influence on friction and wear
 [@frerot_crack_2019].
 
 ![Sub-surface plastic zones in an elastic-plastic rough contact
 simulation. Lighter shades are zones deeper below the contact
 interface. The simulation used to produce this picture had more than
 100 million degrees of freedom and ran on a single compute node (2
 $\times$ 14 Intel Broadwell cores + 128GB RAM).](plastic_zones.png)
 
 The following publications have been made possible with ``Tamaas``:
 
 - @yastrebov_contact_2012
 - @yastrebov_contact_2014
 - @yastrebov_infinitesimal_2015
 - @yastrebov_accurate_2017
 - @yastrebov_role_2017
 - @rey_normal_2017
 - @rey_stability_2018
 - @rey_quantifying_2019
 - @frerot_mechanistic_2018
 - @frerot_fourieraccelerated_2019
 - @frerot_crack_2019
 - @brink_parameter_2020
 
 # Acknowledgements
 
-We acknowledge the financial support of the Swiss National Science Foundation (grant #162569 "Contact mechanics of rough surfaces").
+We acknowledge the financial support of the Swiss National Science Foundation
+(grant #162569 "Contact mechanics of rough surfaces").
 
 # References
diff --git a/tests/conftest.py b/tests/conftest.py
index c83ff625..255ce71b 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,238 +1,267 @@
 # -*- coding: utf-8 -*-
 # @file
 # @section LICENSE
 #
 # Copyright (©) 2016-2020 EPFL (École Polytechnique Fédérale de Lausanne),
 # Laboratory (LSMS - Laboratoire de Simulation en Mécanique des Solides)
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as published
 # by the Free Software Foundation, either version 3 of the License, or
 # (at your option) any later version.
 #
 # This program is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU Affero General Public License for more details.
 #
 # You should have received a copy of the GNU Affero General Public License
 # along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 from __future__ import division, print_function
 
+import subprocess
+import sys
+
 import numpy as np
-import tamaas as tm
 import pytest
 
 from numpy.linalg import norm
 
+try:
+    import tamaas as tm
+except ModuleNotFoundError as e:
+    print(e, file=sys.stderr)
+    print("Use 'scons test' to run tests", file=sys.stderr)
+    sys.exit(1)
+
+
+def get_libtamaas_file():
+    try:
+        ldd_out = bytes(subprocess.check_output(['ldd', tm._tamaas.__file__]))
+        for line in filter(lambda x: 'Tamaas' in x,
+                           ldd_out.decode().split('\n')):
+            path = line.split(' => ')[1]
+            return path.split(' ')[0]
+
+    except subprocess.CalledProcessError:
+        return "N/A"
+
+
+def pytest_report_header(config):
+    print('tamaas build type: {}\nmodule file: {}\nwrapper: {}\nlibTamaas: {}'
+          .format(tm.TamaasInfo.build_type,
+                  tm.__file__,
+                  tm._tamaas.__file__,
+                  get_libtamaas_file()))
+
 
 def profile(func, size, mode, amplitude):
     x = np.linspace(0, 1, size[0], endpoint=False, dtype=tm.dtype)
     y = np.linspace(0, 1, size[1], endpoint=False, dtype=tm.dtype)
     x, y = np.meshgrid(x, y, indexing='ij')
     surface = amplitude * func(2*np.pi*x*mode[0]) * func(2*np.pi*y*mode[1])
     return surface.copy()
 
 
 @pytest.fixture(scope="session")
 def tamaas_fixture():
     tm.initialize()
     yield None
     tm.finalize()
 
 
 class HertzFixture:
     def __init__(self, n, load):
         self.domain_size = 1
         self.n = n
         self.load = load
         self.curvature = 0.1
         self.radius = 1. / self.curvature
         self.e_star = 1.
         self.a = (3 * load / (4 * self.curvature * self.e_star))**(1. / 3.)
 
         self.x = np.linspace(-self.domain_size / 2.,
                              self.domain_size / 2.,
                              self.n, dtype=tm.dtype)
         self.y = self.x.copy()
         self.x, self.y = np.meshgrid(self.x, self.y)
 
         self._computeSurface()
         self._computePressure()
         self._computeDisplacement()
 
     def _computeDisplacement(self):
         r = np.sqrt(self.x**2 + self.y**2)
         self.displacement = np.zeros_like(r)
         contact = r < self.a
         self.displacement[contact] = self.surface[contact]
         self.displacement[~contact] = \
             (self.surface[~contact]
              + self.a / (np.pi * self.radius) * np.sqrt(r[~contact]**2
                                                         - self.a**2)
              + (r[~contact]**2 - 2 * self.a**2) / (np.pi * self.radius)
              * np.arccos(self.a / r[~contact]))
 
     def _computePressure(self):
         r = np.sqrt(self.x**2 + self.y**2)
         self.pressure = np.zeros_like(r)
         contact = np.where(r < self.a)
         self.pressure[contact] = \
             2 * self.e_star / (np.pi * self.radius) \
             * np.sqrt(self.a**2 - r[contact]**2)
 
     def _computeSurface(self):
         self.surface = -1. / (2 * self.radius) * (self.x**2 + self.y**2)
 
 
 @pytest.fixture(scope="module")
 def hertz(tamaas_fixture):
     return HertzFixture(1024, 0.00001)
 
 
 @pytest.fixture(scope="module")
 def hertz_coarse(tamaas_fixture):
     return HertzFixture(512, 0.0001)
 
 @pytest.fixture(scope="module",
                 params=[tm.PolonskyKeerRey.pressure])
 def pkr(hertz, request):
     model = tm.ModelFactory.createModel(tm.model_type.basic_2d,
                                         [hertz.domain_size, hertz.domain_size],
                                         [hertz.n, hertz.n])
     model.setElasticity(hertz.e_star, 0)
     solver = tm.PolonskyKeerRey(model, hertz.surface, 1e-12,
                                 request.param,
                                 request.param)
     solver.solve(hertz.load)
 
     return model, hertz
 
 
 class WestergaardFixture:
     def __init__(self, n, load):
         self.domain_size = 1.
         self.lamda = 1.
         self.delta = 0.1
         self.e_star = 1.
 
         self.n = n
 
         self.p_star = np.pi * self.e_star * self.delta / self.lamda
 
         self.load = load * self.p_star
 
         self.a = self.lamda / np.pi \
             * np.arcsin(np.sqrt(self.load / self.p_star))
 
         self.x = np.linspace(-self.domain_size / 2.,
                              self.domain_size / 2.,
                              self.n, endpoint=False, dtype=tm.dtype)
 
         self._computeSurface()
         self._computePressure()
         self._computeDisplacement()
 
     def _computeSurface(self):
         self.surface = self.delta * np.cos(2 * np.pi * self.x / self.lamda)
 
     def _computePressure(self):
         self.pressure = np.zeros_like(self.surface)
         contact = np.where(np.abs(self.x) < self.a)
         self.pressure[contact] = 2 * self.load \
             * (np.cos(np.pi * self.x[contact] / self.lamda)
                / np.sin(np.pi * self.a / self.lamda)**2) \
             * np.sqrt(np.sin(np.pi * self.a / self.lamda)**2 -
                       np.sin(np.pi * self.x[contact] / self.lamda)**2)
 
     def _computeDisplacement(self):
         psi = np.pi * np.abs(self.x) / self.lamda
         psi_a = np.pi * self.a / self.lamda
 
         with np.errstate(invalid='ignore'):  # get some warnings out of the way
             self.displacement = (np.cos(2*psi) + 2 * np.sin(psi)
                                  * np.sqrt(np.sin(psi)**2 - np.sin(psi_a)**2)
                                  - 2 * np.sin(psi_a)**2 *
                                  np.log((np.sin(psi) +
                                          np.sqrt(np.sin(psi)**2
                                                  - np.sin(psi_a)**2))
                                         / np.sin(psi_a)))
 
         contact = np.where(np.abs(self.x) < self.a)
         self.displacement[contact] = np.cos(2*psi[contact])
         self.displacement *= self.load * self.lamda / (np.pi * self.e_star
                                                        * np.sin(psi_a)**2)
 
 
 @pytest.fixture(scope="module")
 def westergaard(tamaas_fixture):
     return WestergaardFixture(19683, 0.1)
 
 
 class PatchWestergaard:
     def __init__(self, model_type, domain, modes, size):
         self.E = 3.
         self.nu = 0.
         self.e_star = self.E / (1 - self.nu**2)
         self.n = size
         self.domain = domain
         self.model = tm.ModelFactory.createModel(model_type, domain,
                                                  size)
         self.model.setElasticity(self.E, self.nu)
 
         self.pressure = profile(np.cos, size, modes, 1)
         self.solution = profile(np.cos, size, modes,
                                 1 / (np.pi * self.e_star * norm(modes)))
 
 
 @pytest.fixture(scope="module", params=[tm.model_type.basic_2d])
 def patch_westergaard(tamaas_fixture, request):
     return PatchWestergaard(request.param, [1., 1.], [3, 1], [6, 6])
 
 
 class UniformPlasticity:
     def __init__(self, model_type, domain, sizes):
         self.n = sizes
         self.domain = domain
         self.model = tm.ModelFactory.createModel(model_type, domain,
                                                  sizes)
         self.E_h = 0.1
         self.sigma_y = 0.01
         self.residual = tm.ModelFactory.createResidual(self.model,
                                                        sigma_y=self.sigma_y,
                                                        hardening=self.E_h)
         self.model.E = 1.
         self.model.nu = 0.
 
     def solution(self, p):
         E, nu = self.model.E, self.model.nu
         E_h, sigma_y = self.E_h, self.sigma_y
         mu = E / (2 * (1 + nu))
 
         strain = -1 / (mu + E_h) * (p * (3 * mu + E_h) / (2 * mu)
                                     - np.sign(p) * sigma_y)
 
         dep = (2 * mu * np.abs(strain) - sigma_y) / (3 * mu + E_h)
         plastic_strain = np.sign(strain) / 2 * dep * np.array([
             -1, -1, 2, 0, 0, 0,
         ], dtype=tm.dtype)
 
         stress = 2 * mu * (np.array([
             0, 0, strain, 0, 0, 0
         ], dtype=tm.dtype) - plastic_strain)
 
         return {
             "stress": stress,
             "plastic_strain": plastic_strain,
             "cumulated_plastic_strain": dep
         }, {
             "stress": mu,
             "plastic_strain": 1,
             "cumulated_plastic_strain": 1
         }
 
 
 @pytest.fixture(scope="module", params=[tm.model_type.volume_2d])
 def patch_isotropic_plasticity(tamaas_fixture, request):
     return UniformPlasticity(request.param, [1., 1., 1.], [4, 4, 4])
diff --git a/tests/test_dumper.py b/tests/test_dumper.py
index 1e4a5a82..f76fe1bc 100644
--- a/tests/test_dumper.py
+++ b/tests/test_dumper.py
@@ -1,137 +1,139 @@
 # -*- coding: utf-8 -*-
 # @file
 # @section LICENSE
 #
 # Copyright (©) 2016-2020 EPFL (École Polytechnique Fédérale de Lausanne),
 # Laboratory (LSMS - Laboratoire de Simulation en Mécanique des Solides)
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as published
 # by the Free Software Foundation, either version 3 of the License, or
 # (at your option) any later version.
 #
 # This program is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU Affero General Public License for more details.
 #
 # You should have received a copy of the GNU Affero General Public License
 # along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 from __future__ import division
 
 import os
 import shutil
 import tamaas as tm
 import numpy as np
+import warnings
+warnings.filterwarnings('ignore')
 
 from tamaas.dumpers import NumpyDumper
 
 
 class Dumper(tm.ModelDumper):
     """Simple numpy dumper"""
 
     def __init__(self):
         tm.ModelDumper.__init__(self)
 
     def dump(self, model):
         np.savetxt('tractions.txt', np.ravel(model.getField('traction')))
         np.savetxt('displacement.txt',
                    np.ravel(model.getField('displacement')))
 
 
 def cleanup():
     for name in ['tractions.txt',
                  'displacement.txt',
                  'numpys',
                  'hdf5',
                  'netcdf']:
         if os.path.exists(name) and os.path.isdir(name):
             shutil.rmtree(name)
         elif os.path.exists(name):
             os.remove(name)
 
 
 def test_dumpers(tamaas_fixture):
     model = tm.ModelFactory.createModel(tm.model_type.volume_2d, [1., 1., 1.],
                                         [16, 4, 8])
     dumper = Dumper()
     np_dumper = NumpyDumper('test_dump', 'traction', 'displacement')
     model.addDumper(np_dumper)
     model.dump()
     model.dump()
 
     dumper << model
 
     ref_t = model.getTraction()
     ref_d = model.getDisplacement()
 
     tractions = np.loadtxt('tractions.txt')
     displacements = np.loadtxt('displacement.txt')
 
     assert np.all(tractions.reshape(ref_t.shape) == ref_t)
     assert np.all(displacements.reshape(ref_d.shape) == ref_d)
 
     with np.load('numpys/test_dump_0000.npz') as npfile:
         tractions = npfile['traction']
         displacements = npfile['displacement']
         attributes = npfile['attrs'].item()
 
         assert np.all(tractions == ref_t)
         assert np.all(displacements == ref_d)
         assert str(model.type) == attributes['model_type']
 
     assert os.path.isfile('numpys/test_dump_0001.npz')
     cleanup()
 
 
 # Protecting test
 try:
     from tamaas.dumpers import H5Dumper
     import h5py
 
     def test_h5dumper(tamaas_fixture):
         model = tm.ModelFactory.createModel(tm.model_type.volume_2d,
                                             [1., 1., 1.],
                                             [16, 4, 8])
         model.getDisplacement()[...] = 3.1415
         dumper = H5Dumper('test_hdf5', 'traction', 'displacement')
         dumper << model
 
         assert os.path.isfile('hdf5/test_hdf5_0000.h5')
 
         fh = h5py.File('hdf5/test_hdf5_0000.h5', 'r')
         disp = np.array(fh['displacement'], dtype=tm.dtype)
         assert np.all(np.abs(disp - 3.1415) < 1e-15)
         assert list(fh.attrs['discretization']) == [16, 4, 8]
         assert fh.attrs['model_type'] == str(model.type)
         fh.close()
         cleanup()
 
 except ImportError:
     pass
 
 try:
     from tamaas.dumpers import NetCDFDumper
     import netCDF4
 
     def test_netcdfdumper(tamaas_fixture):
         model = tm.ModelFactory.createModel(tm.model_type.volume_2d,
                                             [1., 1., 1.],
                                             [16, 4, 8])
         model.getDisplacement()[...] = 3.1415
         dumper = NetCDFDumper('test_netcdf', 'traction', 'displacement')
         dumper << model
 
         assert os.path.isfile('netcdf/test_netcdf_0000.nc')
 
         fh = netCDF4.Dataset('netcdf/test_netcdf_0000.nc', 'r')
         disp = fh['displacement'][:]
         assert np.all(np.abs(disp - 3.1415) < 1e-15)
         assert disp.shape == (16, 4, 8, 3)
 
         fh.close()
         cleanup()
 
 except ImportError:
     pass
diff --git a/tests/test_epic.py b/tests/test_epic.py
index cf0aa08f..761143b6 100644
--- a/tests/test_epic.py
+++ b/tests/test_epic.py
@@ -1,57 +1,72 @@
 # -*- coding: utf-8 -*-
 # @file
 # @section LICENSE
 #
 # Copyright (©) 2016-2020 EPFL (École Polytechnique Fédérale de Lausanne),
 # Laboratory (LSMS - Laboratoire de Simulation en Mécanique des Solides)
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as published
 # by the Free Software Foundation, either version 3 of the License, or
 # (at your option) any later version.
 #
 # This program is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU Affero General Public License for more details.
 #
 # You should have received a copy of the GNU Affero General Public License
 # along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 from __future__ import division, print_function
 
+import pytest
 import tamaas as tm
 
 from numpy.linalg import norm
 from tamaas.nonlinear_solvers import DFSANESolver
 
+@pytest.fixture(scope="module",
+                params=[tm.PolonskyKeerRey.pressure])
+def pkr_coarse(hertz_coarse, request):
+    model = tm.ModelFactory.createModel(tm.model_type.basic_2d,
+                                        [hertz_coarse.domain_size,
+                                         hertz_coarse.domain_size],
+                                        [hertz_coarse.n, hertz_coarse.n])
+    model.setElasticity(hertz_coarse.e_star, 0)
+    solver = tm.PolonskyKeerRey(model, hertz_coarse.surface, 1e-12,
+                                request.param,
+                                request.param)
+    solver.solve(hertz_coarse.load)
 
-def test_epic(pkr):
+    return model, hertz_coarse
+
+def test_epic(pkr_coarse):
     """Test the full elastic-plastic solve step in elasticity"""
     # We use computed values from basic PKR to test
-    model_elastic, hertz = pkr
+    model_elastic, hertz = pkr_coarse
 
     model = tm.ModelFactory.createModel(
         tm.model_type.volume_2d,
         [0.001, hertz.domain_size, hertz.domain_size],
-        [3, hertz.n, hertz.n]
+        [2, hertz.n, hertz.n]
     )
 
     model.setElasticity(hertz.e_star, 0)
     residual = tm.ModelFactory.createResidual(
         model,
         sigma_y=1e2 * model.E,
         hardening=0
     )
 
     epsolver = DFSANESolver(residual, model)
     csolver = tm.PolonskyKeerRey(model, hertz.surface, 1e-12)
 
     epic = tm.EPICSolver(csolver, epsolver, 1e-12)
     epic.solve(hertz.load)
 
     error = norm((model['traction'][..., 2] - model_elastic['traction'])
                  * (model['displacement'][0, ..., 2]
                     - model_elastic['displacement']))
     error /= norm(hertz.pressure * hertz.displacement)
     assert error < 1e-16