diff --git a/exercises/week14/particle-pybind/sujet.pdf b/exercises/week14/particle-pybind/sujet.pdf index f41d0c6..eba97c1 100644 Binary files a/exercises/week14/particle-pybind/sujet.pdf and b/exercises/week14/particle-pybind/sujet.pdf differ diff --git a/work/week14/particle-pybind/starting_point/CMakeLists.txt b/work/week14/particle-pybind/starting_point/CMakeLists.txt index 243d2e3..cb26244 100644 --- a/work/week14/particle-pybind/starting_point/CMakeLists.txt +++ b/work/week14/particle-pybind/starting_point/CMakeLists.txt @@ -1,98 +1,103 @@ cmake_minimum_required (VERSION 3.1) project (Particles) cmake_policy(VERSION 3.3) set(CMAKE_EXPORT_COMPILE_COMMANDS 1) set(CMAKE_CXX_STANDARD 14) set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${PROJECT_SOURCE_DIR}/cmake") ################################################################ # FFTW ################################################################ set(FFTW_LIBRARY_PATH CACHE PATH "library where to search FFTW") find_library (FFTW_LIBRARY fftw3 /usr/include/ ${FFTW_LIBRARY_PATH}) ################################################################ # libpart ################################################################ add_library(part SHARED compute_boundary.cc compute_verlet_integration.cc particle.cc planet.cc compute_gravity.cc csv_reader.cc particles_factory_interface.cc planets_factory.cc compute_contact.cc compute_kinetic_energy.cc csv_writer.cc system.cc compute_energy.cc compute_potential_energy.cc ping_pong_ball.cc material_point.cc system_evolution.cc ping_pong_balls_factory.cc compute_interaction.cc compute_temperature.cc material_points_factory.cc ) target_link_libraries(part ${FFTW_LIBRARY}) add_executable(particles main.cc) target_link_libraries(particles part) ################################################################ # pybind11 ################################################################ -# find_package(pybind11 REQUIRED) # ---> for Flavio & Tristan -add_subdirectory(pybind11) # ---> for Theo +find_package(pybind11 REQUIRED) # ---> for Flavio & Tristan +#add_subdirectory(pybind11) # ---> for Theo add_library(pypart MODULE pypart_pybind.cpp) target_link_libraries(pypart PRIVATE pybind11::module part) set_target_properties(pypart PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}" SUFFIX "${PYTHON_MODULE_EXTENSION}") file( COPY ${CMAKE_CURRENT_SOURCE_DIR}/main.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/ ) +file( + COPY ${CMAKE_CURRENT_SOURCE_DIR}/generate_input.py + DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/ + ) + ################################################################ # Google test ################################################################ include(GoogleTest) enable_testing() find_package(GTest) if (GTEST_FOUND) include_directories(${GTEST_INCLUDE_DIRS}) add_executable(test_kepler test_kepler.cc) add_executable(test_fft test_fft.cc) target_link_libraries(test_kepler part ${GTEST_BOTH_LIBRARIES} pthread) target_link_libraries(test_fft part ${GTEST_BOTH_LIBRARIES} ${FFTW_LIBRARY} pthread) gtest_discover_tests(test_kepler) gtest_discover_tests(test_fft) endif() ################################################################ # Doxygen ################################################################ find_package(Doxygen) if (DOXYGEN_FOUND) # to set other options, read: https://cmake.org/cmake/help/v3.9/module/FindDoxygen.html doxygen_add_docs( doxygen ${PROJECT_SOURCE_DIR} COMMENT "Generate html pages" ) add_custom_target(doc DEPENDS doxygen) endif(DOXYGEN_FOUND) diff --git a/work/week14/particle-pybind/starting_point/README.md b/work/week14/particle-pybind/starting_point/README.md index 28537c1..234e7c2 100644 --- a/work/week14/particle-pybind/starting_point/README.md +++ b/work/week14/particle-pybind/starting_point/README.md @@ -1,63 +1,74 @@ ### How to run the code We assume here that you have a Linux distribution with all the necessary packages (eigen, fftw3, pybind11) installed, and a Python3 distribution with the necessary packages as well (argparse, pybind11, etc.). Once you've cloned the repository, go to the source code directory. From the repo's root folder: `cd work/week14/particle-pybind/starting_point` Then create a build directory and move to it: ``` mkdir build cd build ``` Now, configure the compilation utilities using Cmake: `ccmake ..` -This will prompt a GUI in the terminal. Hit `c` to configure (i.e. find the FFTW and PyBind libraries) and then `g` to generate the makefiles. +This will prompt a GUI in the terminal. Hit `c` to configure (i.e. find the FFTW and PyBind libraries) and then `g` to generate the makefiles. It will also generate a copied verison of the `main.py` and `generate_inpute.py` files from the parent source directory in the `build` folder. Compile the C++ code: `make` -This will create a ... +Then using the `generate_inpute.py` file, create a init.csv input file containing the particles parameters in the folder using the following command: + +`python generate_input.py -n 262144 -H circular -T random -R 0.3 -o init.csv` + +or with any parameters specified ( use `python generate_input.py -h` to see the help associated with the `generate_inpute.py` file). + +To run a material point simulation of 10 steps with a timestep of 0.1s and a dump frequency of 1, use: + +`python ./main.py 10 1 init.csv material_point 0.1` + +The output files, following a pattern of `step-XXXXX.csv`, will be created in the `build` folder + ### Overloaded *createSimulation* method The original *createSimulation* method of the *MaterialPointsFactory* class had 2 main roles: - create unique *System* and *SystemEvolution* objects, and fill the System object with particles parsed from an input file - create specific *Compute* object(s) that are going to be called at each iteration of our simulation The main drawback of this method is that the creation and registration of *Compute* objects for this factory is **hard-coded**, meaning that the user has no flexibility in the type / number of *Compute* objects to be called at runtime. We would now like to be able to register those *Compute* objects from the `main.py` python script, such that the user can possibly decide which one(s) to consider by simply modifying this python script, or even better, based on command line arguments. This is done by overloading the *createSimulation* method with another declaration inside the class *MaterialPointsFactory*, taking a functor as an extra parameter. This functor object is registered as a method of the class, called *createComputes*, in charge of creating the *Compute* objects. The overloaded *createSimulation* method then calls the "original" one, which in turns calls the method *createComputes* method previously defined. This mechanism allows to define which *Compute* objects are created and registered through the custom *createComputes* method **defined in Python** thanks to the *pybinder* functionalities. ### Management of *Compute* objects in python *Compute* objects are created inside a function `createComputes` in our python script. If no holder type template argument is provided upon binding, these objects are created as `std::unique_ptr` by default. Unfortunately, such objects cannot then be used as function arguments, since that would imply that python needs to give up ownership of an object that is passed to this function, which is not possible. This is an issue in our case, since we want to add these objects to the System Evolution by calling the addCompute() method. Trying to pass such objects restuls in a runtime error: `RuntimeError: Unable to load a custom holder type from a default-holder instance` Moreover, using `std::unique_ptr` references means that the objects are deallocated when python’s reference count goes to zero, i.e. at the end of the `createComputes` function, which might affect their existence in the C++ world. Hence, we define a `std::shared_ptr` holder upon binding of the *Compute* classes, such that the objects can be passed as function arguments. ### Management of *Factory* objects in python *Factory* objects are accessed in the python script via their `getInstance` method that return singleton objects, and thus does not allow to return copies. Hence, we specify a reference return policy when binding these functions (`py::return_value_policy::reference`), which creates references to these objects, without python taking any ownership. The C++ side is responsible for managing the object’s lifetime and deallocating it when it is no longer used. This scheme is affordable in our case since *Factory* objects are just used once, in order create a *SystemEvolution* object that is then completely independent from them. Hence, when the C++ side will delte those objects, they will not be use anymore in python. A similar strategy is used to bind the `createSimulation` methods of the different factories, to match their C++ implementation that specifies a reference return. diff --git a/work/week14/particle-pybind/starting_point/generate_input.py b/work/week14/particle-pybind/starting_point/generate_input.py index e793655..861bf4f 100644 --- a/work/week14/particle-pybind/starting_point/generate_input.py +++ b/work/week14/particle-pybind/starting_point/generate_input.py @@ -1,168 +1,165 @@ import os import numpy as np import math import argparse import matplotlib.pyplot as plt def isSquare(n): ''' Check if an integer value is a perfect square or not. ''' x = n // 2 seen = set([x]) while x * x != n: x = (x + (n // x)) // 2 if x in seen: return False seen.add(x) return True def plotDistributions(hv2d, temp2d): ''' Plot 2D heat and temperature distributions. ''' fig, axes = plt.subplots(1, 2, figsize=(12, 5)) ax = axes[0] ax.set_title('Heat source distribution') ax.set_xlabel('Y') ax.set_ylabel('X') ax.imshow(hv2d, extent=[-1, 1, -1, 1]) ax = axes[1] ax.set_title('Temperature distribution') ax.set_xlabel('Y') ax.set_ylabel('X') ax.imshow(temp2d, extent=[-1, 1, -1, 1]) return fig def main(): parser = argparse.ArgumentParser() parser.add_argument('-n', '--number', type=int, default=None, help='Number of particles') parser.add_argument('-o', '--outfile', type=str, default=None, help='Name of output file') parser.add_argument('-H', '--heatfield', type=str, default='circular', help='Heat distribution type ("null", "line" or "circular")') parser.add_argument('-T', '--tempfield', type=str, default=None, help='Initial temperature field ("random" or "homogeneous"),\ only used for non-test heatfields') parser.add_argument('-t', '--test', default=False, action='store_true', help='Test configuration (temperature field automtcially generated') parser.add_argument('-R', '--radius', type=float, default=1 / 3, help='Heat source radius (if heatfield is set to "circular")') parser.add_argument('-p', '--plot', default=False, action='store_true', help='Plot heat and temperature distribution') parser.add_argument('--hr', type=float, default=1, help='Specific heat rate') args = parser.parse_args() # Check for input validity test_error_str = 'cannot be specified for test configurations' if args.test: if args.outfile is not None: raise ValueError('output file {}'.format(test_error_str)) if args.tempfield is not None: raise ValueError('temperature field {}'.format(test_error_str)) if args.number is not None: raise ValueError('number of particles {}'.format(test_error_str)) # Get number of particles and square root of it N = int(args.number) if args.number is not None else 16**2 if not(isSquare(N)): raise ValueError('number of particles must be square') sqrtN = int(math.sqrt(N)) # Set initial positions on a 2D rectilinear grid (z = 0) x = np.linspace(-1, 1, sqrtN) pos2d = np.array(np.meshgrid(x, x)).T # Set heat volumetric source according to user input hv2d = np.zeros((sqrtN, sqrtN)) if args.heatfield == 'null': if args.test: # Test configuration: set constant temperature field theta2d = np.ones((sqrtN, sqrtN)) outfile = 'testnull.csv' elif args.heatfield == 'line': # Set punctual (dirac) heat distribution hv2d[int(sqrtN / 4), :] = -sqrtN hv2d[int(3 * sqrtN / 4), :] = sqrtN # Test configuration: set corresponding temperature field from analytical solution if args.test: outfile = 'testline.csv' L = 2 freqs = np.fft.fftfreq(sqrtN) * 2 * np.pi / L * sqrtN freqs_2d = np.einsum('i,j->ij', np.ones(sqrtN), freqs) freqs = freqs_2d**2 + freqs_2d.T**2 freqs[0, 0] = 1. T = np.fft.fft2(hv2d) / freqs T[0, 0] = 0. T = np.fft.ifft2(T) theta2d = T.real elif args.heatfield == 'circular': if args.test: raise ValueError('no test configuration available for "circular" heat field') # Set circular heat distribution R = args.radius if R < 0: raise ValueError('radius must be positive') for i in range(sqrtN): for j in range(sqrtN): if (x[i]**2 + x[j]**2 < R): hv2d[i, j] = sqrtN else: raise ValueError('heatfield must be "null", "line" or "circular"') if not args.test: # Set temperature field according to user input if args.tempfield == 'random' or args.tempfield is None: # Random field (between zero and 2) theta2d = np.random.random((sqrtN, sqrtN)) * 2 elif args.tempfield == 'homogeneous': # Homogeneous field (all equal to one) theta2d = np.ones((sqrtN, sqrtN)) else: raise ValueError('tempfield must be "random" or "homogeneous"') # Set output file according to user input outfile = args.outfile if args.outfile is not None else 'step-00000.csv' # Reshape matrices into serialized vectors positions = np.hstack((pos2d.reshape(N, 2), np.zeros((N, 1)))) hv = np.reshape(hv2d, (hv2d.size, 1)) theta = np.reshape(theta2d, (theta2d.size, 1)) # Generate serialized vectors for other, constant quantities hr = args.hr * np.ones((N, 1)) velocities = np.zeros((N, 3)) forces = np.zeros((N, 3)) masses = np.ones((N, 1)) # Save data into csv file file_data = np.hstack((positions, velocities, forces, masses, theta, hr, hv)) - outdir = os.path.join(os.getcwd(), 'build', 'dumps') - if not os.path.isdir(outdir): - os.makedirs(outdir) - filepath = os.path.join(os.getcwd(), 'build', 'dumps', outfile) + filepath = os.path.join(os.getcwd(), outfile) np.savetxt(filepath, file_data, delimiter=" ") # Plot the heat and temperature fields if args.plot: plotDistributions(hv2d, theta2d) plt.show() if __name__ == '__main__': try: main() except ValueError as e: print('Error:', e) quit()