55
66@dataclass
77class MultiONetConfig :
8- """Model config for MultiONet for the simple_ode dataset"""
8+ """Model config for MultiONet for the primordial dataset"""
99
10- # Naive configuration
11- loss_function : nn .Module = nn .MSELoss ()
12- optimizer : str = "adamw"
10+ # primordial_final, trial 196
1311 scheduler : str = "schedulefree"
14- branch_hidden_layers : int = 5
15- hidden_size : int = 256
16- output_factor : int = 200
17- trunk_hidden_layers : int = 5
18- learning_rate : float = 1e-04
19- regularization_factor : float = 1e-04
20- activation : nn .Module = nn .ReLU ()
12+ optimizer : str = "AdamW"
13+ loss_function : nn .Module = nn .SmoothL1Loss ()
14+ beta : float = 0.845
15+ branch_hidden_layers : int = 3
16+ hidden_size : int = 160
17+ output_factor : int = 130
18+ trunk_hidden_layers : int = 2
19+ learning_rate : float = 9.6e-04
20+ regularization_factor : float = 0.283
21+ activation : nn .Module = nn .LeakyReLU ()
2122
2223
2324@dataclass
2425class LatentNeuralODEConfig :
25- """Model config for LatentNeuralODE for the simple_ode dataset"""
26+ """Model config for LatentNeuralODE for the primordial dataset"""
2627
27- # Naive configuration
28- loss_function : nn .Module = nn .MSELoss ()
29- optimizer : str = "adamw"
28+ # primordial_final, trial 243
3029 scheduler : str = "schedulefree"
31- ode_tanh_reg : bool = False
32- latent_features : int = 10
33- coder_layers : int = 3
34- coder_width : int = 256
35- ode_layers : int = 3
36- ode_width : int = 256
37- learning_rate : float = 1e-04
38- regularization_factor : float = 1e-04
39- activation : nn .Module = nn .ReLU ()
30+ optimizer : str = "SGD"
31+ loss_function : nn .Module = nn .MSELoss ()
32+ momentum : float = 0.085
33+ latent_features : int = 9
34+ coder_layers : int = 1
35+ coder_width : int = 580
36+ ode_tanh_reg : bool = True
37+ ode_layers : int = 6
38+ ode_width : int = 480
39+ learning_rate : float = 9.68e-5
40+ regularization_factor : float = 0.0544
41+ activation : nn .Module = nn .SiLU ()
4042
4143
4244@dataclass
4345class FullyConnectedConfig :
44- """Model config for FullyConnected for the simple_ode dataset"""
46+ """Model config for FullyConnected for the primordial dataset"""
4547
46- # Naive configuration
48+ # primordial_final, trial 174
49+ scheduler : str = "poly"
50+ optimizer : str = "adam"
4751 loss_function : nn .Module = nn .MSELoss ()
48- optimizer : str = "adamw"
49- scheduler : str = "schedulefree"
50- # other params from primordial_tuning, trial 63
51- hidden_size : int = 256
52- num_hidden_layers : int = 5
53- learning_rate : float = 1e -04
54- regularization_factor : float = 1e-04
52+ momentum : float = 0.0132
53+ degree : int = 4
54+ latent_features : int = 8
55+ coder_layers : int = 2
56+ coder_width : int = 470
57+ learning_rate : float = 1.77e -04
58+ regularization_factor : float = 9.20e-03
5559 activation : nn .Module = nn .ReLU ()
5660
5761
5862@dataclass
5963class LatentPolyConfig :
60- """Model config for LatentPoly for the simple_ode dataset"""
64+ """Model config for LatentPoly for the primordial dataset"""
6165
62- # Naive configuration
63- loss_function : nn .Module = nn .MSELoss ()
64- optimizer : str = "adamw"
66+ # primordial_final, trial 31
6567 scheduler : str = "schedulefree"
66- degree : int = 3
67- latent_features : int = 10
68- coder_layers : int = 3
69- coder_width : int = 256
70- learning_rate : float = 1e-04
71- regularization_factor : float = 1e-04
72- activation : nn .Module = nn .ReLU ()
68+ optimizer : str = "SGD"
69+ loss_function : nn .Module = nn .SmoothL1Loss ()
70+ beta : float = 3.73
71+ hidden_size : int = 470
72+ num_hidden_layers : int = 5
73+ learning_rate : float = 1.27e-03
74+ regularization_factor : float = 3.23e-05
75+ activation : nn .Module = nn .ELU ()
76+
77+
78+ # @dataclass
79+ # class MultiONetConfig:
80+ # """Model config for MultiONet for the primordial dataset"""
81+
82+ # # Naive configuration
83+ # loss_function: nn.Module = nn.MSELoss()
84+ # optimizer: str = "adamw"
85+ # scheduler: str = "schedulefree"
86+ # branch_hidden_layers: int = 5
87+ # hidden_size: int = 256
88+ # output_factor: int = 200
89+ # trunk_hidden_layers: int = 5
90+ # learning_rate: float = 1e-04
91+ # regularization_factor: float = 1e-04
92+ # activation: nn.Module = nn.ReLU()
93+
94+
95+ # @dataclass
96+ # class LatentNeuralODEConfig:
97+ # """Model config for LatentNeuralODE for the primordial dataset"""
98+
99+ # # Naive configuration
100+ # loss_function: nn.Module = nn.MSELoss()
101+ # optimizer: str = "adamw"
102+ # scheduler: str = "schedulefree"
103+ # ode_tanh_reg: bool = False
104+ # latent_features: int = 10
105+ # coder_layers: int = 3
106+ # coder_width: int = 256
107+ # ode_layers: int = 3
108+ # ode_width: int = 256
109+ # learning_rate: float = 1e-04
110+ # regularization_factor: float = 1e-04
111+ # activation: nn.Module = nn.ReLU()
112+
113+
114+ # @dataclass
115+ # class FullyConnectedConfig:
116+ # """Model config for FullyConnected for the primordial dataset"""
117+
118+ # # Naive configuration
119+ # loss_function: nn.Module = nn.MSELoss()
120+ # optimizer: str = "adamw"
121+ # scheduler: str = "schedulefree"
122+ # # other params from primordial_tuning, trial 63
123+ # hidden_size: int = 256
124+ # num_hidden_layers: int = 5
125+ # learning_rate: float = 1e-04
126+ # regularization_factor: float = 1e-04
127+ # activation: nn.Module = nn.ReLU()
128+
129+
130+ # @dataclass
131+ # class LatentPolyConfig:
132+ # """Model config for LatentPoly for the primordial dataset"""
133+
134+ # # Naive configuration
135+ # loss_function: nn.Module = nn.MSELoss()
136+ # optimizer: str = "adamw"
137+ # scheduler: str = "schedulefree"
138+ # degree: int = 3
139+ # latent_features: int = 10
140+ # coder_layers: int = 3
141+ # coder_width: int = 256
142+ # learning_rate: float = 1e-04
143+ # regularization_factor: float = 1e-04
144+ # activation: nn.Module = nn.ReLU()
73145
74146
75147# @dataclass
76148# class MultiONetConfig:
77- # """Model config for MultiONet for the simple_ode dataset"""
149+ # """Model config for MultiONet for the primordial dataset"""
78150
79151# # primordial_tuning, trial 18
80152# scheduler: str = "poly"
@@ -93,7 +165,7 @@ class LatentPolyConfig:
93165
94166# @dataclass
95167# class LatentNeuralODEConfig:
96- # """Model config for LatentNeuralODE for the simple_ode dataset"""
168+ # """Model config for LatentNeuralODE for the primordial dataset"""
97169
98170# # primordial_tuning, trial 186
99171# scheduler: str = "poly"
@@ -114,7 +186,7 @@ class LatentPolyConfig:
114186
115187# @dataclass
116188# class FullyConnectedConfig:
117- # """Model config for FullyConnected for the simple_ode dataset"""
189+ # """Model config for FullyConnected for the primordial dataset"""
118190
119191# # primordial_tuning, trial 63
120192# scheduler: str = "poly"
@@ -131,7 +203,7 @@ class LatentPolyConfig:
131203
132204# @dataclass
133205# class LatentPolyConfig:
134- # """Model config for LatentPoly for the simple_ode dataset"""
206+ # """Model config for LatentPoly for the primordial dataset"""
135207
136208# # primordial_tuning, trial 176
137209# scheduler: str = "poly"
0 commit comments