forked from hyperopt/hyperopt-sklearn
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdiscriminant_analysis.py
More file actions
108 lines (84 loc) · 3.78 KB
/
discriminant_analysis.py
File metadata and controls
108 lines (84 loc) · 3.78 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
from hpsklearn.components._base import validate
from hyperopt.pyll import scope, Apply
from hyperopt import hp
from sklearn import discriminant_analysis
import numpy.typing as npt
import numpy as np
import typing
@scope.define
def sklearn_LinearDiscriminantAnalysis(*args, **kwargs):
return discriminant_analysis.LinearDiscriminantAnalysis(*args, **kwargs)
@scope.define
def sklearn_QuadraticDiscriminantAnalysis(*args, **kwargs):
return discriminant_analysis.QuadraticDiscriminantAnalysis(*args, **kwargs)
def _discriminant_analysis_tol(name: str):
"""
Declaration search space 'tol' parameter
"""
return hp.loguniform(name, np.log(1e-5), np.log(1e-2))
def _discriminant_analysis_hp_space(
name_func,
priors: npt.ArrayLike = None,
store_covariance: bool = False,
tol: float = None,
**kwargs
):
"""
Common hyper parameter search space
linear discriminant analysis
quadratic discriminant analysis
"""
hp_space = dict(
priors=priors,
store_covariance=store_covariance,
tol=_discriminant_analysis_tol(name_func("tol")) if tol is None else tol,
**kwargs
)
return hp_space
@validate(params=["solver"],
validation_test=lambda param: not isinstance(param, str) or param in ["svd", "lsqr", "eigen"],
msg="Invalid parameter '%s' with value '%s'. Value must be in ['svd', 'lsqr', 'eigen'].")
@validate(params=["shrinkage"],
validation_test=lambda param: not isinstance(param, str) or param == "auto",
msg="Invalid parameter '%s' with value '%s'. Value must be 'auto' or float.")
def linear_discriminant_analysis(name: str,
solver: typing.Union[str, Apply] = None,
shrinkage: typing.Union[float, str, Apply] = None,
n_components: int = None,
covariance_estimator: callable = None,
**kwargs):
"""
Return a pyll graph with hyperparameters that will construct
a sklearn.discriminant_analysis.LinearDiscriminantAnalysis model.
Args:
name: name | str
solver: solver to use | str
shrinkage: shrinkage parameter | str or float
n_components: number of components | int
covariance_estimator: covariance estimator to use | callable
See help(hpsklearn.components.discriminant_analysis._discriminant_analysis_hp_space)
for info on additional available discriminant analysis arguments.
"""
def _name(msg):
return f"{name}.linear_discriminant_analysis_{msg}"
hp_space = _discriminant_analysis_hp_space(_name, **kwargs)
hp_space["solver"] = hp.choice(_name("solver"), ["svd", "lsqr", "eigen"]) if solver is None else solver
hp_space["shrinkage"] = shrinkage
hp_space["n_components"] = n_components
hp_space["covariance_estimator"] = covariance_estimator
return scope.sklearn_LinearDiscriminantAnalysis(**hp_space)
def quadratic_discriminant_analysis(name: str, reg_param: typing.Union[float, Apply] = None, **kwargs):
"""
Return a pyll graph with hyperparameters that will construct
a sklearn.discriminant_analysis.QuadraticDiscriminantAnalysis model.
Args:
name: name | str
reg_param: regularization parameter | float
See help(hpsklearn.components.discriminant_analysis._discriminant_analysis_hp_space)
for info on additional available discriminant analysis arguments.
"""
def _name(msg):
return f"{name}.quadratic_discriminant_analysis_{msg}"
hp_space = _discriminant_analysis_hp_space(_name, **kwargs)
hp_space["reg_param"] = hp.uniform(_name("reg_param"), 0.0, 0.5) if reg_param is None else reg_param
return scope.sklearn_QuadraticDiscriminantAnalysis(**hp_space)