-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path_mn_optimizers.py
More file actions
59 lines (55 loc) · 2.65 KB
/
_mn_optimizers.py
File metadata and controls
59 lines (55 loc) · 2.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
from machinable import Interface, get
import os
class _MnOptimizers(Interface):
def launch(self):
from models.ops import import_initial_samples
for trial in range(3):
with get("machinable.scope", {"trial": trial}):
for optimizer in ["age", "smpso"]:
initial = get(
"interface.sopt_modeling",
[
f"""~from_protocol()""",
{
"dopt_params.surrogate_method_name": "gpr",
"dopt_params.n_epochs": 0,
},
],
)
assert initial.cached()
initial.save_attribute("preflight", True)
with get("machinable.scope", {"parent": initial.hash}):
for version in [
{
"dopt_params.surrogate_method_name": "gpr",
},
{
"dopt_params.surrogate_method_name": "megp",
},
"~joint_model(mode='o', backbone='resnet')",
"~joint_model(mode='c+o', backbone='resnet')",
"~joint_model(mode='o', backbone='fttransformer')",
"~joint_model(mode='c+o', backbone='fttransformer')",
]:
e = get(
"interface.sopt_modeling",
[
f"""~from_protocol()""",
version,
{
"dopt_params.optimizer_name": optimizer,
},
],
).launch()
if os.environ.get("LAUNCH", 0) and not os.path.isfile(
e.output_filepath
):
e.commit()
import_initial_samples(
file_path=e.output_filepath,
source=initial.output_filepath,
num=e.num_initial_samples,
opt_id=e.config.dopt_params.opt_id,
feature_dtypes=e.feature_dtypes,
param_names=e.parameter_names,
)