exponential
DiminishingExponential
Bases: OneToOneInversableDerivableTransformer
Transform series by applying the Exponential diminishing return equation (same as in Masster).
Parameters:
| Name | Type | Description | Default |
|---|---|---|---|
- | gamma (float | Strictly positive parameter of the exponential diminishing function. The concavity of the response curve decreases when gamma increases. | required |
- | max (float | Symbolic maximum, which represents the value above which the variable is in the area of saturation (default to None and then set to the maximum value of the training serie). | required |
Source code in eki_mmo_equations/one_to_one_transformations/diminishing_return/exponential.py
class DiminishingExponential(OneToOneInversableDerivableTransformer):
"""Transform series by applying the Exponential diminishing return equation (same as in Masster).
```math
1 - \\exp(\\frac{-serie \\times 100 }{\\gamma \\times max})
```
Args:
- gamma (float): Strictly positive parameter of the exponential diminishing function.
The concavity of the response curve decreases when gamma increases.
- max (float): Symbolic maximum, which represents the value above which the variable is in the area
of saturation (default to None and then set to the maximum value of the training serie).
"""
def __init__(self, gamma, max=None) -> None:
self.gamma = gamma
self.max = max
@property
def parameters(self) -> Dict[str, float]:
return self.__dict__
# ------- METHODS -------
def fit(self, serie: np.ndarray, y=None):
if self.max is None:
if np.any(serie[serie > 0]):
self.max = serie.max()
else:
self.max = serie.min() or 1
return super().fit(serie, y)
def transform(self, serie: np.ndarray, copy=False) -> np.ndarray:
serie = super().transform(serie, copy)
return self._transformer(serie, self.gamma, self.max)
def inverse_transform(self, serie: np.ndarray, copy=False) -> np.ndarray:
serie = super().inverse_transform(serie, copy)
return self._inverse_transformer(serie, self.gamma, self.max)
def derivative_transform(self, serie: np.ndarray, copy=False) -> np.ndarray:
serie = super().derivative_transform(serie, copy)
return self._derivative_transformer(serie, self.gamma, self.max)
# ------- TRANSFORMERS -------
@staticmethod
def _transformer(serie: np.ndarray, gamma, max) -> np.ndarray:
scale = max / 100
d = serie / scale
return 1 - np.exp(-d / gamma)
@staticmethod
def _inverse_transformer(serie: np.ndarray, gamma, max) -> np.ndarray:
return -(gamma * max / 100) * np.log(1 - serie)
@staticmethod
def _derivative_transformer(serie: np.ndarray, gamma, max) -> np.ndarray:
scale = max / 100
d = serie / scale
return (100 / (gamma * max)) * np.exp(-d / gamma)
# ------- CHECKERS -------
def check_params(self, serie: np.ndarray):
"""Check if parameters respect their application scope."""
if self.gamma <= 0:
raise ParameterScopeException(f"Parameter gamma must be strictly positive, not {self.gamma}.")
if self.max <= 0:
raise ParameterScopeException(f"Parameter max must be strictly positive, not {self.max}.")
check_params(serie)
Check if parameters respect their application scope.
Source code in eki_mmo_equations/one_to_one_transformations/diminishing_return/exponential.py
def check_params(self, serie: np.ndarray):
"""Check if parameters respect their application scope."""
if self.gamma <= 0:
raise ParameterScopeException(f"Parameter gamma must be strictly positive, not {self.gamma}.")
if self.max <= 0:
raise ParameterScopeException(f"Parameter max must be strictly positive, not {self.max}.")
DiminishingExponentialUnscale
Bases: DiminishingExponential
Transform series by applying the Exponential diminishing return equation (same as in Masster) multiply by the max parameter.
Parameters:
| Name | Type | Description | Default |
|---|---|---|---|
- | gamma (float | Strictly positive parameter of the exponential diminishing function. The concavity of the response curve decreases when gamma increases. | required |
- | max (float | Symbolic maximum, which represents the value above which the variable is in the area of saturation (default to None and then set to the maximum value of the training serie). | required |
Source code in eki_mmo_equations/one_to_one_transformations/diminishing_return/exponential.py
class DiminishingExponentialUnscale(DiminishingExponential):
"""Transform series by applying the Exponential diminishing return equation (same as in Masster)
multiply by the max parameter.
```math
max \\times (1 - \\exp(\\frac{-serie \\times 100 }{\\gamma \\times max}))
```
Args:
- gamma (float): Strictly positive parameter of the exponential diminishing function.
The concavity of the response curve decreases when gamma increases.
- max (float): Symbolic maximum, which represents the value above which the variable is in the area
of saturation (default to None and then set to the maximum value of the training serie).
"""
@staticmethod
def _transformer(serie: np.ndarray, gamma, max) -> np.ndarray:
return max * super(DiminishingExponentialUnscale, DiminishingExponentialUnscale)._transformer(
serie=serie, gamma=gamma, max=max
)
@staticmethod
def _inverse_transformer(serie: np.ndarray, gamma, max) -> np.ndarray:
return -(gamma * max / 100) * np.log(1 - serie / max)
@staticmethod
def _derivative_transformer(serie: np.ndarray, gamma, max) -> np.ndarray:
return max * super(DiminishingExponentialUnscale, DiminishingExponentialUnscale)._derivative_transformer(
serie=serie, gamma=gamma, max=max
)
# ------- CHECKERS -------
def check_params(self, serie: np.ndarray):
"""Check if parameters respect their application scope."""
if self.gamma <= 0:
raise ParameterScopeException(f"Parameter gamma must be strictly positive, not {self.gamma}.")
if self.max <= 0:
raise ParameterScopeException(f"Parameter max must be strictly positive, not {self.max}.")
check_params(serie)
Check if parameters respect their application scope.
Source code in eki_mmo_equations/one_to_one_transformations/diminishing_return/exponential.py
def check_params(self, serie: np.ndarray):
"""Check if parameters respect their application scope."""
if self.gamma <= 0:
raise ParameterScopeException(f"Parameter gamma must be strictly positive, not {self.gamma}.")
if self.max <= 0:
raise ParameterScopeException(f"Parameter max must be strictly positive, not {self.max}.")