Page MenuHomec4science

generic_approximant.py
No OneTemporary

File Metadata

Created
Sat, May 11, 22:34

generic_approximant.py

# Copyright (C) 2018 by the RROMPy authors
#
# This file is part of RROMPy.
#
# RROMPy is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RROMPy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with RROMPy. If not, see <http://www.gnu.org/licenses/>.
#
from abc import abstractmethod
import numpy as np
from itertools import product as iterprod
from copy import deepcopy as copy
from os import remove as osrm
from rrompy.sampling.linear_problem import (SamplingEngineLinear,
SamplingEngineLinearPOD)
from rrompy.utilities.base.types import (Np1D, DictAny, HFEng, List, Tuple,
ListAny, strLst, paramVal, paramList,
sampList)
from rrompy.utilities.base import purgeDict, verbosityDepth, getNewFilename
from rrompy.utilities.exception_manager import (RROMPyException, RROMPyAssert,
RROMPy_READY, RROMPy_FRAGILE)
from rrompy.utilities.base import pickleDump, pickleLoad
from rrompy.parameter import (emptyParameterList, checkParameter,
checkParameterList)
from rrompy.sampling import sampleList, emptySampleList
__all__ = ['GenericApproximant']
def addNormFieldToClass(self, fieldName):
def objFunc(self, mu:paramList, homogeneized : bool = False) -> Np1D:
uV = getattr(self.__class__, "get" + fieldName)(self, mu, homogeneized)
val = self.HFEngine.norm(uV)
return val
setattr(self.__class__, "norm" + fieldName, objFunc)
def addPlotFieldToClass(self, fieldName):
def objFunc(self, mu:paramList, *args, homogeneized : bool = False,
**kwargs):
uV = getattr(self.__class__, "get" + fieldName)(self, mu, homogeneized)
kwargsCopy = copy(kwargs)
for j, u in enumerate(uV):
if "name" in kwargs.keys():
kwargsCopy["name"] = kwargs["name"] + str(j)
self.HFEngine.plot(u, *args, **kwargs)
setattr(self.__class__, "plot" + fieldName, objFunc)
def addOutParaviewFieldToClass(self, fieldName):
def objFunc(self, mu:paramVal, *args, homogeneized : bool = False,
**kwargs):
if not hasattr(self.HFEngine, "outParaview"):
raise RROMPyException(("High fidelity engine cannot output to "
"Paraview."))
uV = getattr(self.__class__, "get" + fieldName)(self, mu, homogeneized)
kwargsCopy = copy(kwargs)
for j, u in enumerate(uV):
if "name" in kwargs.keys():
kwargsCopy["name"] = kwargs["name"] + str(j)
self.HFEngine.outParaview(u, *args, **kwargsCopy)
setattr(self.__class__, "outParaview" + fieldName, objFunc)
def addOutParaviewTimeDomainFieldToClass(self, fieldName):
def objFunc(self, mu:paramVal, *args,
homogeneized : bool = False, **kwargs):
if not hasattr(self.HFEngine, "outParaviewTimeDomain"):
raise RROMPyException(("High fidelity engine cannot output to "
"Paraview."))
uV = getattr(self.__class__, "get" + fieldName)(self, mu, homogeneized)
omega = args.pop(0) if len(args) > 0 else np.real(mu)
kwargsCopy = copy(kwargs)
for j, u in enumerate(uV):
if "name" in kwargs.keys():
kwargsCopy["name"] = kwargs["name"] + str(j)
self.HFEngine.outParaviewTimeDomain(u, omega, *args,
**kwargsCopy)
setattr(self.__class__, "outParaviewTimeDomain" + fieldName, objFunc)
class GenericApproximant:
"""
ABSTRACT
ROM approximant computation for parametric problems.
Args:
HFEngine: HF problem solver.
mu0(optional): Default parameter. Defaults to 0.
approxParameters(optional): Dictionary containing values for main
parameters of approximant. Recognized keys are:
- 'POD': whether to compute POD of snapshots; defaults to True;
- 'S': total number of samples current approximant relies upon.
Defaults to empty dict.
homogeneized(optional): Whether to homogeneize Dirichlet BCs. Defaults
to False.
verbosity(optional): Verbosity level. Defaults to 10.
Attributes:
HFEngine: HF problem solver.
trainedModel: Trained model evaluator.
mu0: Default parameter.
homogeneized: Whether to homogeneize Dirichlet BCs.
approxParameters: Dictionary containing values for main parameters of
approximant. Recognized keys are in parameterList{Soft,Critical}.
parameterListSoft: Recognized keys of soft approximant parameters:
- 'POD': whether to compute POD of snapshots.
parameterListCritical: Recognized keys of critical approximant
parameters:
- 'S': total number of samples current approximant relies upon.
verbosity: Verbosity level.
POD: Whether to compute POD of snapshots.
S: Number of solution snapshots over which current approximant is
based upon.
samplingEngine: Sampling engine.
uHF: High fidelity solution(s) with parameter(s) lastSolvedHF as
sampleList.
lastSolvedHF: Parameter(s) corresponding to last computed high fidelity
solution(s) as parameterList.
uApproxReduced: Reduced approximate solution(s) with parameter(s)
lastSolvedApprox as sampleList.
lastSolvedApproxReduced: Parameter(s) corresponding to last computed
reduced approximate solution(s) as parameterList.
uApprox: Approximate solution(s) with parameter(s) lastSolvedApprox as
sampleList.
lastSolvedApprox: Parameter(s) corresponding to last computed
approximate solution(s) as parameterList.
"""
__all__ += [ftype + dtype for ftype, dtype in iterprod(
["norm", "plot", "outParaview", "outParaviewTimeDomain"],
["HF", "RHS", "Approx", "Res", "Err"])]
def __init__(self, HFEngine:HFEng, mu0 : paramVal = None,
approxParameters : DictAny = {}, homogeneized : bool = False,
verbosity : int = 10, timestamp : bool = True):
self._preInit()
self._mode = RROMPy_READY
self.verbosity = verbosity
self.timestamp = timestamp
if self.verbosity >= 10:
verbosityDepth("INIT", ("Initializing approximant engine of "
"type {}.").format(self.name()),
timestamp = self.timestamp)
self._HFEngine = HFEngine
self.trainedModel = None
self.lastSolvedHF = emptyParameterList()
self.uHF = emptySampleList()
self._addParametersToList(["POD"], [True], ["S"], [[1]])
if mu0 is None:
if hasattr(self.HFEngine, "mu0"):
self.mu0 = checkParameter(self.HFEngine.mu0)
else:
raise RROMPyException(("Center of approximation cannot be "
"inferred from HF engine. Parameter "
"required"))
else:
self.mu0 = checkParameter(mu0, self.HFEngine.npar)
self.resetSamples()
self.homogeneized = homogeneized
self.approxParameters = approxParameters
self._postInit()
### add norm{HF,RHS,Approx,Res,Err} methods
"""
Compute norm of * at arbitrary parameter.
Args:
mu: Target parameter.
homogeneized(optional): Whether to remove Dirichlet BC. Defaults to
False.
Returns:
Target norm of *.
"""
for objName in ["HF", "RHS", "Err"]:
addNormFieldToClass(self, objName)
def objFunc(self, mu:paramList, homogeneized : bool = False) -> Np1D:
# uV = getattr(self.__class__, "getRes")(self, mu, homogeneized,
# duality = False)
uV = self.getRes(mu, homogeneized, duality = False)
val = self.HFEngine.norm(uV, dual = True, duality = False)
return val
setattr(self.__class__, "normRes", objFunc)
if not hasattr(self, "normApprox"):
addNormFieldToClass(self, "Approx")
### add plot{HF,RHS,Approx,Res,Err} methods
"""
Do some nice plots of * at arbitrary parameter.
Args:
mu: Target parameter.
name(optional): Name to be shown as title of the plots. Defaults to
'u'.
what(optional): Which plots to do. If list, can contain 'ABS',
'PHASE', 'REAL', 'IMAG'. If str, same plus wildcard 'ALL'.
Defaults to 'ALL'.
save(optional): Where to save plot(s). Defaults to None, i.e. no
saving.
saveFormat(optional): Format for saved plot(s). Defaults to "eps".
saveDPI(optional): DPI for saved plot(s). Defaults to 100.
show(optional): Whether to show figure. Defaults to True.
homogeneized(optional): Whether to remove Dirichlet BC. Defaults to
False.
figspecs(optional key args): Optional arguments for matplotlib
figure creation.
"""
for objName in ["HF", "RHS", "Approx", "Res", "Err"]:
addPlotFieldToClass(self, objName)
### add outParaview{HF,RHS,Approx,Res,Err} methods
"""
Output * to ParaView file.
Args:
mu: Target parameter.
name(optional): Base name to be used for data output.
filename(optional): Name of output file.
time(optional): Timestamp.
what(optional): Which plots to do. If list, can contain 'MESH',
'ABS', 'PHASE', 'REAL', 'IMAG'. If str, same plus wildcard
'ALL'. Defaults to 'ALL'.
forceNewFile(optional): Whether to create new output file.
filePW(optional): Fenics File entity (for time series).
homogeneized(optional): Whether to remove Dirichlet BC. Defaults to
False.
"""
for objName in ["HF", "RHS", "Approx", "Res", "Err"]:
addOutParaviewFieldToClass(self, objName)
### add outParaviewTimeDomain{HF,RHS,Approx,Res,Err} methods
"""
Output * to ParaView file, converted to time domain.
Args:
mu: Target parameter.
omega(optional): frequency.
timeFinal(optional): final time of simulation.
periodResolution(optional): number of time steps per period.
name(optional): Base name to be used for data output.
filename(optional): Name of output file.
forceNewFile(optional): Whether to create new output file.
homogeneized(optional): Whether to remove Dirichlet BC. Defaults to
False.
"""
for objName in ["HF", "RHS", "Approx", "Res", "Err"]:
addOutParaviewTimeDomainFieldToClass(self, objName)
def _preInit(self):
if not hasattr(self, "depth"): self.depth = 0
else: self.depth += 1
@property
def parameterList(self):
"""Value of parameterListSoft + parameterListCritical."""
return self.parameterListSoft + self.parameterListCritical
def _addParametersToList(self, whatSoft:strLst, defaultSoft:ListAny,
whatCritical : strLst = [],
defaultCritical : ListAny = [],
toBeExcluded : strLst = []):
if not hasattr(self, "parameterToBeExcluded"):
self.parameterToBeExcluded = []
self.parameterToBeExcluded += toBeExcluded
if not hasattr(self, "parameterListSoft"):
self.parameterListSoft = []
if not hasattr(self, "parameterDefaultSoft"):
self.parameterDefaultSoft = {}
if not hasattr(self, "parameterListCritical"):
self.parameterListCritical = []
if not hasattr(self, "parameterDefaultCritical"):
self.parameterDefaultCritical = {}
for j, what in enumerate(whatSoft):
if what not in self.parameterToBeExcluded:
self.parameterListSoft += [what]
self.parameterDefaultSoft[what] = defaultSoft[j]
for j, what in enumerate(whatCritical):
if what not in self.parameterToBeExcluded:
self.parameterListCritical += [what]
self.parameterDefaultCritical[what] = defaultCritical[j]
def _postInit(self):
if self.depth == 0:
if self.verbosity >= 10:
verbosityDepth("DEL", "Done initializing.",
timestamp = self.timestamp)
del self.depth
else: self.depth -= 1
def name(self) -> str:
return self.__class__.__name__
def __str__(self) -> str:
return self.name()
def __repr__(self) -> str:
return self.__str__() + " at " + hex(id(self))
def setupSampling(self):
"""Setup sampling engine."""
RROMPyAssert(self._mode, message = "Cannot setup sampling engine.")
if not hasattr(self, "_POD") or self._POD is None: return
if self.POD:
SamplingEngine = SamplingEngineLinearPOD
else:
SamplingEngine = SamplingEngineLinear
self.samplingEngine = SamplingEngine(self.HFEngine,
verbosity = self.verbosity,
allowRepeatedSamples = True)
@property
def HFEngine(self):
"""Value of HFEngine."""
return self._HFEngine
@HFEngine.setter
def HFEngine(self, HFEngine):
raise RROMPyException("Cannot change HFEngine.")
@property
def mu0(self):
"""Value of mu0."""
return self._mu0
@mu0.setter
def mu0(self, mu0):
mu0 = checkParameter(mu0)
if not hasattr(self, "_mu0") or mu0 != self.mu0:
self.resetSamples()
self._mu0 = mu0
@property
def npar(self):
"""Number of parameters."""
return self.mu0.shape[1]
@property
def approxParameters(self):
"""Value of approximant parameters."""
return self._approxParameters
@approxParameters.setter
def approxParameters(self, approxParams):
if not hasattr(self, "approxParameters"):
self._approxParameters = {}
approxParameters = purgeDict(approxParams, self.parameterList,
dictname = self.name() + ".approxParameters",
baselevel = 1)
keyList = list(approxParameters.keys())
for key in self.parameterListCritical:
if key in keyList:
setattr(self, "_" + key, self.parameterDefaultCritical[key])
for key in self.parameterListSoft:
if key in keyList:
setattr(self, "_" + key, self.parameterDefaultSoft[key])
fragile = False
for key in self.parameterListCritical:
if key in keyList:
val = approxParameters[key]
else:
val = getattr(self, "_" + key, None)
if val is None:
val = self.parameterDefaultCritical[key]
getattr(self.__class__, key, None).fset(self, val)
fragile = fragile or val is None
for key in self.parameterListSoft:
if key in keyList:
val = approxParameters[key]
else:
val = getattr(self, "_" + key, None)
if val is None:
val = self.parameterDefaultSoft[key]
getattr(self.__class__, key, None).fset(self, val)
if fragile:
self._mode = RROMPy_FRAGILE
@property
def POD(self):
"""Value of POD."""
return self._POD
@POD.setter
def POD(self, POD):
if hasattr(self, "_POD"): PODold = self.POD
else: PODold = -1
self._POD = POD
self._approxParameters["POD"] = self.POD
if PODold != self.POD:
self.samplingEngine = None
self.resetSamples()
@property
def S(self):
"""Value of S."""
return self._S
@S.setter
def S(self, S):
if not hasattr(S, "__len__"): S = [S]
if any([s <= 0 for s in S]):
raise RROMPyException("S must be positive.")
if hasattr(self, "_S") and self._S is not None: Sold = tuple(self.S)
else: Sold = -1
self._S = S
self._approxParameters["S"] = self.S
if Sold != tuple(self.S):
self.resetSamples()
@property
def homogeneized(self):
"""Value of homogeneized."""
return self._homogeneized
@homogeneized.setter
def homogeneized(self, homogeneized):
if not hasattr(self, "_homogeneized"):
self._homogeneized = None
if homogeneized != self.homogeneized:
self._homogeneized = homogeneized
self.resetSamples()
@property
def trainedModel(self):
"""Value of trainedModel."""
return self._trainedModel
@trainedModel.setter
def trainedModel(self, trainedModel):
self._trainedModel = trainedModel
if self._trainedModel is not None:
self._trainedModel.lastSolvedApproxReduced = emptyParameterList()
self._trainedModel.lastSolvedApprox = emptyParameterList()
self.lastSolvedApproxReduced = emptyParameterList()
self.lastSolvedApprox = emptyParameterList()
self.uApproxReduced = emptySampleList()
self.uApprox = emptySampleList()
def resetSamples(self):
if hasattr(self, "samplingEngine") and self.samplingEngine is not None:
self.samplingEngine.resetHistory()
else:
self.setupSampling()
self._mode = RROMPy_READY
def plotSamples(self, name : str = "u", save : str = None,
what : strLst = 'all', saveFormat : str = "eps",
saveDPI : int = 100, **figspecs):
"""
Do some nice plots of the samples.
Args:
name(optional): Name to be shown as title of the plots. Defaults to
'u'.
what(optional): Which plots to do. If list, can contain 'ABS',
'PHASE', 'REAL', 'IMAG'. If str, same plus wildcard 'ALL'.
Defaults to 'ALL'.
save(optional): Where to save plot(s). Defaults to None, i.e. no
saving.
saveFormat(optional): Format for saved plot(s). Defaults to "eps".
saveDPI(optional): DPI for saved plot(s). Defaults to 100.
figspecs(optional key args): Optional arguments for matplotlib
figure creation.
"""
RROMPyAssert(self._mode, message = "Cannot plot samples.")
self.samplingEngine.plotSamples(name = name, save = save, what = what,
saveFormat = saveFormat,
saveDPI = saveDPI,
**figspecs)
def outParaviewSamples(self, name : str = "u", filename : str = "out",
times : Np1D = None, what : strLst = 'all',
forceNewFile : bool = True, folders : bool = False,
filePW = None):
"""
Output samples to ParaView file.
Args:
name(optional): Base name to be used for data output.
filename(optional): Name of output file.
times(optional): Timestamps.
what(optional): Which plots to do. If list, can contain 'MESH',
'ABS', 'PHASE', 'REAL', 'IMAG'. If str, same plus wildcard
'ALL'. Defaults to 'ALL'.
forceNewFile(optional): Whether to create new output file.
folders(optional): Whether to split output in folders.
filePW(optional): Fenics File entity (for time series).
"""
RROMPyAssert(self._mode, message = "Cannot output samples.")
self.samplingEngine.outParaviewSamples(name = name,
filename = filename,
times = times, what = what,
forceNewFile = forceNewFile,
folders = folders,
filePW = filePW)
def outParaviewTimeDomainSamples(self, omegas : Np1D = None,
timeFinal : Np1D = None,
periodResolution : int = 20,
name : str = "u",
filename : str = "out",
forceNewFile : bool = True,
folders : bool = False):
"""
Output samples to ParaView file, converted to time domain.
Args:
omegas(optional): frequencies.
timeFinal(optional): final time of simulation.
periodResolution(optional): number of time steps per period.
name(optional): Base name to be used for data output.
filename(optional): Name of output file.
forceNewFile(optional): Whether to create new output file.
folders(optional): Whether to split output in folders.
"""
RROMPyAssert(self._mode, message = "Cannot output samples.")
self.samplingEngine.outParaviewTimeDomainSamples(omegas = omegas,
timeFinal = timeFinal,
periodResolution = periodResolution,
name = name, filename = filename,
forceNewFile = forceNewFile,
folders = folders)
def setSamples(self, samplingEngine):
"""Copy samplingEngine and samples."""
if self.verbosity >= 10:
verbosityDepth("INIT", "Transfering samples.",
timestamp = self.timestamp)
self.samplingEngine = copy(samplingEngine)
if self.verbosity >= 10:
verbosityDepth("DEL", "Done transfering samples.",
timestamp = self.timestamp)
def setTrainedModel(self, model):
"""Deepcopy approximation from trained model."""
if hasattr(model, "storeTrainedModel"):
verb = model.verbosity
model.verbosity = 0
fileOut = model.storeTrainedModel()
model.verbosity = verb
else:
try:
fileOut = getNewFilename("trained_model", "pkl")
pickleDump(model.data.__dict__, fileOut)
except:
raise RROMPyException(("Failed to store model data. Parameter "
"model must have either "
"storeTrainedModel or "
"data.__dict__ properties."))
self.loadTrainedModel(fileOut)
osrm(fileOut)
@abstractmethod
def setupApprox(self):
"""
Setup approximant. (ABSTRACT)
Any specialization should include something like
if self.checkComputedApprox():
return
RROMPyAssert(self._mode, message = "Cannot setup approximant.")
...
self.trainedModel = ...
self.trainedModel.data = ...
self.trainedModel.data.approxParameters = copy(
self.approxParameters)
"""
pass
def checkComputedApprox(self) -> bool:
"""
Check if setup of new approximant is not needed.
Returns:
True if new setup is not needed. False otherwise.
"""
return self._mode == RROMPy_FRAGILE or (self.trainedModel is not None
and self.trainedModel.data.approxParameters == self.approxParameters)
def _pruneBeforeEval(self, mu:paramList, field:str, append:bool,
prune:bool) -> Tuple[paramList, Np1D, Np1D, bool]:
mu, _ = checkParameterList(mu, self.npar)
idx = np.empty(len(mu), dtype = np.int)
if prune:
jExtra = np.zeros(len(mu), dtype = bool)
muExtra = emptyParameterList()
lastSolvedMus = getattr(self, "lastSolved" + field)
if (len(mu) > 0 and len(mu) == len(lastSolvedMus)
and mu == lastSolvedMus):
idx = np.arange(len(mu), dtype = np.int)
return muExtra, jExtra, idx, True
muKeep = copy(muExtra)
for j in range(len(mu)):
jPos = lastSolvedMus.find(mu[j])
if jPos is not None:
idx[j] = jPos
muKeep.append(mu[j])
else:
jExtra[j] = True
muExtra.append(mu[j])
if len(muKeep) > 0 and not append:
lastSolvedu = getattr(self, "u" + field)
idx[~jExtra] = getattr(self.__class__, "set" + field)(self,
muKeep, lastSolvedu[idx[~jExtra]], append)
append = True
else:
jExtra = np.ones(len(mu), dtype = bool)
muExtra = mu
return muExtra, jExtra, idx, append
def _setObject(self, mu:paramList, field:str, object:sampList,
append:bool) -> List[int]:
newMus, _ = checkParameterList(mu, self.npar)
newObj = sampleList(object)
if append:
getattr(self, "lastSolved" + field).append(newMus)
getattr(self, "u" + field).append(newObj)
Ltot = len(getattr(self, "u" + field))
return list(range(Ltot - len(newObj), Ltot))
setattr(self, "lastSolved" + field, copy(newMus))
setattr(self, "u" + field, copy(newObj))
return list(range(len(getattr(self, "u" + field))))
def setHF(self, muHF:paramList, uHF:sampleList,
append : bool = False) -> List[int]:
"""Assign high fidelity solution."""
return self._setObject(muHF, "HF", uHF, append)
def evalHF(self, mu:paramList, append : bool = False,
prune : bool = True) -> List[int]:
"""
Find high fidelity solution with original parameters and arbitrary
parameter.
Args:
mu: Target parameter.
append(optional): Whether to append new HF solutions to old ones.
prune(optional): Whether to remove duplicates of already appearing
HF solutions.
"""
muExtra, jExtra, idx, append = self._pruneBeforeEval(mu, "HF", append,
prune)
if len(muExtra) > 0:
newuHFs = self.samplingEngine.solveLS(muExtra,
homogeneized = self.homogeneized)
idx[jExtra] = self.setHF(muExtra, newuHFs, append)
return list(idx)
def setApproxReduced(self, muApproxR:paramList, uApproxR:sampleList,
append : bool = False) -> List[int]:
"""Assign high fidelity solution."""
return self._setObject(muApproxR, "ApproxReduced", uApproxR, append)
def evalApproxReduced(self, mu:paramList, append : bool = False,
prune : bool = True) -> List[int]:
"""
Evaluate reduced representation of approximant at arbitrary parameter.
Args:
mu: Target parameter.
append(optional): Whether to append new HF solutions to old ones.
prune(optional): Whether to remove duplicates of already appearing
HF solutions.
"""
self.setupApprox()
muExtra, jExtra, idx, append = self._pruneBeforeEval(mu,
"ApproxReduced",
append, prune)
if len(muExtra) > 0:
newuApproxs = self.trainedModel.getApproxReduced(muExtra)
idx[jExtra] = self.setApproxReduced(muExtra, newuApproxs, append)
return list(idx)
def setApprox(self, muApprox:paramList, uApprox:sampleList,
append : bool = False) -> List[int]:
"""Assign high fidelity solution."""
return self._setObject(muApprox, "Approx", uApprox, append)
def evalApprox(self, mu:paramList, append : bool = False,
prune : bool = True) -> List[int]:
"""
Evaluate approximant at arbitrary parameter.
Args:
mu: Target parameter.
append(optional): Whether to append new HF solutions to old ones.
prune(optional): Whether to remove duplicates of already appearing
HF solutions.
"""
self.setupApprox()
muExtra, jExtra, idx, append = self._pruneBeforeEval(mu, "Approx",
append, prune)
if len(muExtra) > 0:
newuApproxs = self.trainedModel.getApprox(muExtra)
idx[jExtra] = self.setApprox(muExtra, newuApproxs, append)
return list(idx)
def getHF(self, mu:paramList, homogeneized : bool = False,
append : bool = False, prune : bool = True) -> sampList:
"""
Get HF solution at arbitrary parameter.
Args:
mu: Target parameter.
homogeneized(optional): Whether to remove Dirichlet BC. Defaults to
False.
Returns:
HFsolution.
"""
mu, _ = checkParameterList(mu, self.npar)
idx = self.evalHF(mu, append = append, prune = prune)
uHFs = self.uHF(idx)
if self.homogeneized and not homogeneized:
for j, m in enumerate(mu):
uHFs[j] += self.HFEngine.liftDirichletData(m)
if not self.homogeneized and homogeneized:
for j, m in enumerate(mu):
uHFs[j] -= self.HFEngine.liftDirichletData(m)
return uHFs
def getRHS(self, mu:paramList, homogeneized : bool = False,
duality : bool = True) -> sampList:
"""
Get linear system RHS at arbitrary parameter.
Args:
mu: Target parameter.
homogeneized(optional): Whether to remove Dirichlet BC. Defaults to
False.
Returns:
Linear system RHS.
"""
return self.HFEngine.residual(None, mu, homogeneized = homogeneized,
duality = duality)
def getApproxReduced(self, mu:paramList, append : bool = False,
prune : bool = True) -> sampList:
"""
Get approximant at arbitrary parameter.
Args:
mu: Target parameter.
Returns:
Reduced approximant.
"""
mu, _ = checkParameterList(mu, self.npar)
idx = self.evalApproxReduced(mu, append = append, prune = prune)
uApproxRs = self.uApproxReduced(idx)
return uApproxRs
def getApprox(self, mu:paramList, homogeneized : bool = False,
append : bool = False, prune : bool = True) -> sampList:
"""
Get approximant at arbitrary parameter.
Args:
mu: Target parameter.
homogeneized(optional): Whether to remove Dirichlet BC. Defaults to
False.
Returns:
Approximant.
"""
mu, _ = checkParameterList(mu, self.npar)
idx = self.evalApprox(mu, append = append, prune = prune)
uApproxs = self.uApprox(idx)
if self.homogeneized and not homogeneized:
for j, m in enumerate(mu):
uApproxs[j] += self.HFEngine.liftDirichletData(m)
if not self.homogeneized and homogeneized:
for j, m in enumerate(mu):
uApproxs[j] -= self.HFEngine.liftDirichletData(m)
return uApproxs
def getRes(self, mu:paramList, homogeneized : bool = False,
duality : bool = True) -> sampList:
"""
Get residual at arbitrary parameter.
Args:
mu: Target parameter.
homogeneized(optional): Whether to remove Dirichlet BC. Defaults to
False.
Returns:
Approximant residual.
"""
return self.HFEngine.residual(self.getApprox(mu, homogeneized), mu,
homogeneized = homogeneized,
duality = duality)
def getErr(self, mu:paramList, homogeneized : bool = False,
append : bool = False, prune : bool = True) -> sampList:
"""
Get error at arbitrary parameter.
Args:
mu: Target parameter.
homogeneized(optional): Whether to remove Dirichlet BC. Defaults to
False.
Returns:
Approximant error.
"""
return (self.getApprox(mu, homogeneized, append = append, prune =prune)
- self.getHF(mu, homogeneized, append = append, prune = prune))
def getPoles(self) -> Np1D:
"""
Obtain approximant poles.
Returns:
Numpy complex vector of poles.
"""
self.setupApprox()
if self.verbosity >= 20:
verbosityDepth("INIT", "Computing poles of model.",
timestamp = self.timestamp)
poles = self.trainedModel.getPoles()
if self.verbosity >= 20:
verbosityDepth("DEL", "Done computing poles.",
timestamp = self.timestamp)
return poles
def storeTrainedModel(self, filenameBase : str = "trained_model",
forceNewFile : bool = True) -> str:
"""Store trained reduced model to file."""
self.setupApprox()
if self.verbosity >= 20:
verbosityDepth("INIT", "Storing trained model to file.",
timestamp = self.timestamp)
if forceNewFile:
filename = getNewFilename(filenameBase, "pkl")
else:
filename = "{}.pkl".format(filenameBase)
pickleDump(self.trainedModel.data.__dict__, filename)
if self.verbosity >= 20:
verbosityDepth("DEL", "Done storing trained model.",
timestamp = self.timestamp)
return filename
def loadTrainedModel(self, filename:str):
"""Load trained reduced model from file."""
if self.verbosity >= 20:
verbosityDepth("INIT", "Loading pre-trained model from file.",
timestamp = self.timestamp)
datadict = pickleLoad(filename)
name = datadict.pop("name")
if name == "TrainedModelPade":
from rrompy.reduction_methods.trained_model import \
TrainedModelPade as tModel
elif name == "TrainedModelRB":
from rrompy.reduction_methods.trained_model import \
TrainedModelRB as tModel
else:
raise RROMPyException(("Trained model name not recognized. "
"Loading failed."))
self.mu0 = datadict.pop("mu0")
from rrompy.reduction_methods.trained_model import TrainedModelData
trainedModel = tModel()
trainedModel.verbosity = self.verbosity
trainedModel.timestamp = self.timestamp
data = TrainedModelData(name, self.mu0, datadict.pop("projMat"),
datadict.pop("rescalingExp"))
if "mus" in datadict:
data.mus = datadict.pop("mus")
approxParameters = datadict.pop("approxParameters")
data.approxParameters = copy(approxParameters)
if "sampler" in approxParameters:
self._approxParameters["sampler"] = approxParameters.pop("sampler")
self.approxParameters = copy(approxParameters)
if "mus" in data.__dict__:
self.mus = copy(data.mus)
if name == "TrainedModelPade":
self.scaleFactor = datadict.pop("scaleFactor")
data.scaleFactor = self.scaleFactor
for key in datadict:
setattr(data, key, datadict[key])
trainedModel.data = data
self.trainedModel = trainedModel
self._mode = RROMPy_FRAGILE
if self.verbosity >= 20:
verbosityDepth("DEL", "Done loading pre-trained model.",
timestamp = self.timestamp)

Event Timeline