diff --git a/python/tamaas/dumpers/__init__.py b/python/tamaas/dumpers/__init__.py
index 5ac0b624..6bf3ab6a 100644
--- a/python/tamaas/dumpers/__init__.py
+++ b/python/tamaas/dumpers/__init__.py
@@ -1,267 +1,285 @@
 # -*- mode:python; coding: utf-8 -*-
 # @file
 # @section LICENSE
 #
 # Copyright (©) 2016-2020 EPFL (École Polytechnique Fédérale de Lausanne),
 # Laboratory (LSMS - Laboratoire de Simulation en Mécanique des Solides)
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as published
 # by the Free Software Foundation, either version 3 of the License, or
 # (at your option) any later version.
 #
 # This program is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU Affero General Public License for more details.
 #
 # You should have received a copy of the GNU Affero General Public License
 # along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 """
 Dumpers for the class tamaas.Model
 """
 from __future__ import print_function
 from sys import stderr
 from os import makedirs
 import os.path
 
 import numpy as np
 
 from .. import ModelDumper, model_type
 from ._helper import make_periodic, step_dump, directory_dump
 
 
 class FieldDumper(ModelDumper):
     """Abstract dumper for python classes using fields"""
     postfix = ''
     extension = ''
     name_format = "{basename}{postfix}.{extension}"
 
     def __init__(self, basename, *fields, **kwargs):
         """Construct with desired fields"""
         super(FieldDumper, self).__init__()
         self.basename = basename
         self.fields = fields
         self.all_fields = kwargs.get('all_fields', False)
 
     def add_field(self, field):
         """Add another field to the dump"""
         if field not in self.fields:
             self.fields.append(field)
 
     def dump_to_file(self, fd, model):
         """Dump to a file (name or handle)"""
         pass
 
     def get_fields(self, model):
         """Get the desired fields"""
         if not self.all_fields:
             requested_fields = self.fields
         else:
             requested_fields = model.getFields()
 
         return {field: model.getField(field) for field in requested_fields}
 
     def get_attributes(self, model):
         """Get model attributes"""
         return {
             'model_type': str(model.type),
             'system_size': model.getSystemSize(),
             'discretization': model.getDiscretization()
         }
 
     def dump(self, model):
         self.dump_to_file(self.file_path, model)
 
     @property
     def file_path(self):
         """Get the default filename"""
         return self.name_format.format(basename=self.basename,
                                        postfix=self.postfix,
                                        extension=self.extension)
 
 
 @directory_dump('numpys')
 @step_dump
 class NumpyDumper(FieldDumper):
     """Dumper to compressed numpy files"""
     extension = 'npz'
 
     def dump_to_file(self, fd, model):
         """Saving to compressed multi-field Numpy format"""
         np.savez_compressed(fd, attrs=self.get_attributes(model),
                             **self.get_fields(model))
 
 
 try:
     import h5py
 
     @directory_dump('hdf5')
     @step_dump
     class H5Dumper(FieldDumper):
         """Dumper to HDF5 file format"""
         extension = 'h5'
 
         def dump_to_file(self, fd, model):
             """Saving to HDF5 with metadata about the model"""
             with h5py.File(fd, 'w') as fh:
                 # Writing data
                 for name, field in self.get_fields(model).items():
                     dset = fh.create_dataset(name, field.shape, field.dtype,
                                              compression='gzip',
                                              compression_opts=7)
                     dset[:] = field
 
                 # Writing metadata
                 for name, attr in self.get_attributes(model).items():
                     fh.attrs[name] = attr
 except ImportError:
     pass
 
 try:
     import uvw  # noqa
 
     @directory_dump('paraview')
     @step_dump
     class UVWDumper(FieldDumper):
         """Dumper to VTK files for elasto-plastic calculations"""
         extension = 'vtr'
         forbidden_fields = ['traction', 'gap']  # TODO make generic
 
         def dump_to_file(self, fd, model):
             """Dump displacements, plastic deformations and stresses"""
             discretization = model.getDiscretization().copy()
 
             # Because we make fields periodic
             discretization[1] += 1
             discretization[2] += 1
 
             # Space coordinates
             coordinates = [np.linspace(0, L, N)
                            for L, N in zip(model.getSystemSize(),
                                            discretization)]
 
             # Correct order of coordinate dimensions
             dimension_indices = [1, 2, 0]
 
             # Creating rectilinear grid with correct order for components
             grid = uvw.RectilinearGrid(
                 fd, (coordinates[i] for i in dimension_indices),
                 compression=True,
             )
 
             # Iterator over fields we want to dump
             # Avoid 2D fields (TODO make generic)
             fields_it = filter(lambda t: t[0] not in self.forbidden_fields,
                                self.get_fields(model).items())
 
             # We make fields periodic for visualization
             for name, field in fields_it:
                 array = uvw.DataArray(np.array(make_periodic[name](field),
                                                dtype=np.double),
                                       dimension_indices, name)
                 grid.addPointData(array)
 
             grid.write()
 
     @directory_dump('paraview')
     class UVWGroupDumper(FieldDumper):
         "Dumper to ParaViewData files"
         extension = 'pvd'
 
         def __init__(self, basename, *fields, **kwargs):
             """Construct with desired fields"""
             super(UVWGroupDumper, self).__init__(basename, *fields, **kwargs)
 
             subdir = os.path.join('paraview', basename + '-VTR')
             if not os.path.exists(subdir):
                 makedirs(subdir)
 
             self.uvw_dumper = UVWDumper(
                 os.path.join(basename + '-VTR', basename), *fields, **kwargs
             )
 
             self.group = uvw.ParaViewData(self.file_path, compression=True)
 
         def dump_to_file(self, fd, model):
             self.group.addFile(
                 self.uvw_dumper.file_path.replace('paraview/', ''),
                 timestep=self.uvw_dumper.count
             )
             self.group.write()
             self.uvw_dumper.dump(model)
 except ImportError as error:
     print(error, file=stderr)
 
 
 try:
     from netCDF4 import Dataset
 
     @directory_dump('netcdf')
-    @step_dump
     class NetCDFDumper(FieldDumper):
         """Dumper to netCDF4 files"""
 
         extension = "nc"
         boundary_fields = ['traction', 'gap']
 
-        def dump_to_file(self, fd, model):
-            with Dataset(fd, 'w', format='NETCDF4_CLASSIC') as rootgrp:
-                model_dim = len(model.getDiscretization())
-                self._vec = rootgrp.createDimension('spatial', model_dim)
-                self._tens = rootgrp.createDimension('Voigt', 2*model_dim)
-
-                self._dump_boundary(rootgrp, model)
+        def _file_setup(self, grp, model):
+            grp.createDimension('frame', None)
 
-                if model.type in {model_type.volume_1d, model_type.volume_2d}:
-                    self._dump_volume(rootgrp, model)
+            # Local dimensions
+            model_dim = len(model.getDiscretization())
+            self._vec = grp.createDimension('spatial', model_dim)
+            self._tens = grp.createDimension('Voigt', 2*model_dim)
+            self.model_info = model.getDiscretization(), model.type
 
-        def _dump_boundary(self, grp, model):
             # Create boundary dimensions
             it = zip("xy", model.getBoundaryDiscretization(),
-                     model.getSystemSize())
+                     model.getBoundarySystemSize())
 
             for label, size, length in it:
                 grp.createDimension(label, size)
                 coord = grp.createVariable(label, 'f8', (label,))
                 coord[:] = np.linspace(0, length, size, endpoint=False)
 
-            self._dump_generic(grp, model,
-                               lambda f: f[0] in self.boundary_fields,
-                               'boundary',
-                               model.getBoundaryDiscretization(),
-                               "xy")
+            self._create_variables(
+                grp, model,
+                lambda f: f[0] in self.boundary_fields,
+                model.getBoundaryDiscretization(), "xy"
+            )
 
-        def _dump_volume(self, grp, model):
             # Create volume dimension
-            size = model.getDiscretization()[0]
-            grp.createDimension("z", size)
-            coord = grp.createVariable("z", 'f8', ("z",))
-            coord[:] = np.linspace(0, model.getSystemSize()[0], size)
+            if model.type in {model_type.volume_1d, model_type.volume_2d}:
+                size = model.getDiscretization()[0]
+                grp.createDimension("z", size)
+                coord = grp.createVariable("z", 'f8', ("z",))
+                coord[:] = np.linspace(0, model.getSystemSize()[0], size)
+
+                self._create_variables(
+                    grp, model,
+                    lambda f: f[0] not in self.boundary_fields,
+                    model.getDiscretization(), "zxy"
+                )
+
+        def dump_to_file(self, fd, model):
+            mode = 'a' if os.path.isfile(fd) else 'w'
 
-            self._dump_generic(grp, model,
-                               lambda f: f[0] not in self.boundary_fields,
-                               'volume', model.getDiscretization(), "zxy")
+            with Dataset(fd, mode, format='NETCDF4_CLASSIC') as rootgrp:
+                if rootgrp.dimensions == {}:
+                    self._file_setup(rootgrp, model)
 
-        def _dump_generic(self, grp, model, predicate,
-                          group_name, shape, dimensions):
+                if self.model_info != (model.getDiscretization(), model.type):
+                    raise Exception("Unexpected model {}".format(model))
+
+                self._dump_generic(rootgrp, model)
+
+        def _create_variables(self, grp, model, predicate,
+                              shape, dimensions):
             field_dim = len(shape)
             fields = filter(predicate, self.get_fields(model).items())
             dim_labels = list(dimensions[:field_dim])
 
             for label, data in fields:
                 dims = dim_labels
 
                 # If we have an extra component
                 if data.ndim > field_dim:
                     if data.shape[-1] == self._tens.size:
                         dims.append(self._tens.name)
                     elif data.shape[-1] == self._vec.size:
                         dims.append(self._vec.name)
 
-                var = grp.createVariable(label, 'f8', dims)
-                var[:] = np.array(data, dtype=np.double)
+                grp.createVariable(label, 'f8', ['frame'] + dims, zlib=True)
+
+        def _dump_generic(self, grp, model):
+            fields = self.get_fields(model).items()
+
+            new_frame = grp.dimensions['frame'].size
+            for label, data in fields:
+                var = grp[label]
+                var[new_frame, :] = np.array(data, dtype=np.double)
+
 
 except ImportError:
     pass
diff --git a/tests/test_dumper.py b/tests/test_dumper.py
index 393d66a1..907a580d 100644
--- a/tests/test_dumper.py
+++ b/tests/test_dumper.py
@@ -1,139 +1,148 @@
 # -*- coding: utf-8 -*-
 # @file
 # @section LICENSE
 #
 # Copyright (©) 2016-2020 EPFL (École Polytechnique Fédérale de Lausanne),
 # Laboratory (LSMS - Laboratoire de Simulation en Mécanique des Solides)
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as published
 # by the Free Software Foundation, either version 3 of the License, or
 # (at your option) any later version.
 #
 # This program is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU Affero General Public License for more details.
 #
 # You should have received a copy of the GNU Affero General Public License
 # along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 from __future__ import division
 
 import os
 import shutil
 import tamaas as tm
 import numpy as np
 import warnings
 warnings.filterwarnings('ignore')
 
+import pytest
 from tamaas.dumpers import NumpyDumper
 
 
 class Dumper(tm.ModelDumper):
     """Simple numpy dumper"""
 
     def __init__(self):
         tm.ModelDumper.__init__(self)
 
     def dump(self, model):
         np.savetxt('tractions.txt', np.ravel(model.getField('traction')))
         np.savetxt('displacement.txt',
                    np.ravel(model.getField('displacement')))
 
 
 def cleanup():
     for name in ['tractions.txt',
                  'displacement.txt',
                  'numpys',
                  'hdf5',
                  'netcdf']:
         if os.path.exists(name) and os.path.isdir(name):
             shutil.rmtree(name)
         elif os.path.exists(name):
             os.remove(name)
 
 
 def test_dumpers(tamaas_fixture):
     model = tm.ModelFactory.createModel(tm.model_type.volume_2d, [1., 1., 1.],
                                         [16, 4, 8])
     dumper = Dumper()
     np_dumper = NumpyDumper('test_dump', 'traction', 'displacement')
     model.addDumper(np_dumper)
     model.dump()
     model.dump()
 
     dumper << model
 
     ref_t = model.getTraction()
     ref_d = model.getDisplacement()
 
     tractions = np.loadtxt('tractions.txt')
     displacements = np.loadtxt('displacement.txt')
 
     assert np.all(tractions.reshape(ref_t.shape) == ref_t)
     assert np.all(displacements.reshape(ref_d.shape) == ref_d)
 
     with np.load('numpys/test_dump_0000.npz', allow_pickle=True) as npfile:
         tractions = npfile['traction']
         displacements = npfile['displacement']
         attributes = npfile['attrs'].item()
 
         assert np.all(tractions == ref_t)
         assert np.all(displacements == ref_d)
         assert str(model.type) == attributes['model_type']
 
     assert os.path.isfile('numpys/test_dump_0001.npz')
     cleanup()
 
 
 # Protecting test
 try:
     from tamaas.dumpers import H5Dumper
     import h5py
 
     def test_h5dumper(tamaas_fixture):
         model = tm.ModelFactory.createModel(tm.model_type.volume_2d,
                                             [1., 1., 1.],
                                             [16, 4, 8])
         model.getDisplacement()[...] = 3.1415
         dumper = H5Dumper('test_hdf5', 'traction', 'displacement')
         dumper << model
 
         assert os.path.isfile('hdf5/test_hdf5_0000.h5')
 
         fh = h5py.File('hdf5/test_hdf5_0000.h5', 'r')
         disp = np.array(fh['displacement'], dtype=tm.dtype)
         assert np.all(np.abs(disp - 3.1415) < 1e-15)
         assert list(fh.attrs['discretization']) == [16, 4, 8]
         assert fh.attrs['model_type'] == str(model.type)
         fh.close()
         cleanup()
 
 except ImportError:
     pass
 
 try:
     from tamaas.dumpers import NetCDFDumper
-    import netCDF4
+    from netCDF4 import Dataset
 
     def test_netcdfdumper(tamaas_fixture):
         model = tm.ModelFactory.createModel(tm.model_type.volume_2d,
                                             [1., 1., 1.],
                                             [16, 4, 8])
         model.getDisplacement()[...] = 3.1415
         dumper = NetCDFDumper('test_netcdf', 'traction', 'displacement')
+        # Dumping two frames
+        dumper << model
         dumper << model
 
-        assert os.path.isfile('netcdf/test_netcdf_0000.nc')
+        assert os.path.isfile('netcdf/test_netcdf.nc')
 
-        fh = netCDF4.Dataset('netcdf/test_netcdf_0000.nc', 'r')
-        disp = fh['displacement'][:]
-        assert np.all(np.abs(disp - 3.1415) < 1e-15)
-        assert disp.shape == (16, 4, 8, 3)
+        with Dataset('netcdf/test_netcdf.nc', 'r') as netcdf_file:
+            disp = netcdf_file['displacement'][:]
+            assert np.all(np.abs(disp - 3.1415) < 1e-15)
+            assert disp.shape[1:] == (16, 4, 8, 3)
+            assert disp.shape[0] == 2
+
+        with pytest.raises(Exception):
+            model = tm.ModelFactory.createModel(tm.model_type.volume_2d,
+                                                [1., 1., 1.],
+                                                [15, 4, 8])
+            dumper << model
 
-        fh.close()
         cleanup()
 
 except ImportError:
     pass