diff --git a/src/model/adhesion_functional.cpp b/src/model/adhesion_functional.cpp index 08400e2..b5ef113 100644 --- a/src/model/adhesion_functional.cpp +++ b/src/model/adhesion_functional.cpp @@ -1,113 +1,113 @@ /** * @file * * @author Lucas Frérot * @author Valentine Rey * * @section LICENSE * * Copyright (©) 2017 EPFL (Ecole Polytechnique Fédérale de * Lausanne) Laboratory (LSMS - Laboratoire de Simulation en Mécanique des * Solides) * * Tamaas is free software: you can redistribute it and/or modify it under the * terms of the GNU Lesser General Public License as published by the Free * Software Foundation, either version 3 of the License, or (at your option) any * later version. * * Tamaas is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR * A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. * * You should have received a copy of the GNU Lesser General Public License * along with Tamaas. If not, see . * */ /* -------------------------------------------------------------------------- */ #include "adhesion_functional.hh" /* -------------------------------------------------------------------------- */ __BEGIN_TAMAAS__ namespace functional { Real ExponentialAdhesionFunctional::computeF( GridBase& gap, GridBase& /*pressure*/) const { auto rho_inv = 1. / getParameters().at("rho"); auto gamma = getParameters().at("surface_energy"); return -gamma * Loop::reduce( [rho_inv] CUDA_LAMBDA(const Real& g) { return std::exp(-g * rho_inv); }, gap); } void ExponentialAdhesionFunctional::computeGradF( GridBase& gap, GridBase& gradient) const { auto rho_inv = 1. / getParameters().at("rho"); auto gamma = getParameters().at("surface_energy"); Loop::loop( [rho_inv, gamma] CUDA_LAMBDA(const Real& g, Real& grad) { grad += gamma * std::exp(-g * rho_inv) * rho_inv; }, gap, gradient); } Real MaugisAdhesionFunctional::computeF(GridBase& gap, GridBase& /*pressure*/) const { auto rho = getParameters().at("rho"); auto rho_inv = 1. / rho; auto gamma = getParameters().at("surface_energy"); return -gamma * Loop::reduce( - [rho, rho_inv](const Real& g) { + [rho, rho_inv] CUDA_LAMBDA(const Real& g) { return (g > rho) ? 0 : 1 - g * rho_inv; }, gap); } void MaugisAdhesionFunctional::computeGradF(GridBase& gap, GridBase& gradient) const { auto rho = getParameters().at("rho"); auto rho_inv = 1. / rho; auto gamma = getParameters().at("surface_energy"); Loop::loop( [rho, rho_inv, gamma] CUDA_LAMBDA(const Real& g, Real& grad) { grad += (g > rho) ? 0 : gamma * rho_inv; }, gap, gradient); } Real SquaredExponentialAdhesionFunctional::computeF( GridBase& gap, GridBase& /*pressure*/) const { auto rho_inv = 1. / getParameters().at("rho"); auto rho_inv_2 = rho_inv * rho_inv; auto gamma = getParameters().at("surface_energy"); return -gamma * Loop::reduce( [rho_inv_2] CUDA_LAMBDA(const Real& g) { return std::exp(-0.5 * g * g * rho_inv_2); }, gap); } void SquaredExponentialAdhesionFunctional::computeGradF( GridBase& gap, GridBase& gradient) const { auto rho_inv = 1. / getParameters().at("rho"); auto rho_inv_2 = rho_inv * rho_inv; auto gamma = getParameters().at("surface_energy"); Loop::loop( [rho_inv_2, gamma] CUDA_LAMBDA(const Real& g, Real& grad) { grad += g * rho_inv_2 * gamma * std::exp(-0.5 * g * g * rho_inv_2); }, gap, gradient); } } // namespace functional __END_TAMAAS__