mirror of
https://github.com/microsoft/autogen.git
synced 2026-02-15 20:35:25 -05:00
Fix HPO evaluation bug (#645)
* fix eval automl metric bug on val_loss inconsistency * updating starting point search space to continuous * shortening notebok
This commit is contained in:
@@ -24,7 +24,6 @@ def test_custom_hp_nlp():
|
||||
automl_settings["fit_kwargs_by_estimator"] = {
|
||||
"transformer": {
|
||||
"output_dir": "test/data/output/",
|
||||
"ckpt_per_epoch": 1,
|
||||
"fp16": False,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{"class": "transformer_ms",
|
||||
"hyperparameters": {"learning_rate": 1e-5, "num_train_epochs": 1.0, "per_device_train_batch_size": 8,
|
||||
"warmup_ratio": 0.0, "weight_decay": 0.0, "adam_epsilon": 1e-6, "seed": 44, "global_max_steps": 101,
|
||||
"seed": 44, "global_max_steps": 101,
|
||||
"model_path": "google/electra-base-discriminator"}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
{"class": "transformer_ms",
|
||||
"hyperparameters": {"learning_rate": 1e-5, "num_train_epochs": 1.0, "per_device_train_batch_size": 8,
|
||||
"warmup_ratio": 0.0, "weight_decay": 0.0, "adam_epsilon": 1e-6, "seed": 43, "global_max_steps": 100,
|
||||
"seed": 43, "global_max_steps": 100,
|
||||
"model_path": "google/electra-base-discriminator"}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
{"class": "transformer_ms",
|
||||
"hyperparameters": {"learning_rate": 1e-5, "num_train_epochs": 1.0, "per_device_train_batch_size": 8,
|
||||
"warmup_ratio": 0.0, "weight_decay": 0.0, "adam_epsilon": 1e-6, "seed": 41, "global_max_steps": 102,
|
||||
"seed": 41, "global_max_steps": 102,
|
||||
"model_path": "google/electra-base-discriminator" }
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
{"class": "transformer_ms",
|
||||
"hyperparameters": {"learning_rate": 1e-5, "num_train_epochs": 1.0, "per_device_train_batch_size": 8,
|
||||
"warmup_ratio": 0.0, "weight_decay": 0.0, "adam_epsilon": 1e-6, "seed": 42, "global_max_steps": 103,
|
||||
"seed": 42, "global_max_steps": 103,
|
||||
"model_path": "google/electra-base-discriminator" }
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
{"class": "transformer_ms",
|
||||
"hyperparameters": {"learning_rate": 1e-5, "num_train_epochs": 1.0, "per_device_train_batch_size": 8,
|
||||
"warmup_ratio": 0.0, "weight_decay": 0.0, "adam_epsilon": 1e-6, "seed": 40, "global_max_steps": 105,
|
||||
"seed": 40, "global_max_steps": 105,
|
||||
"model_path": "google/electra-base-discriminator"}
|
||||
}
|
||||
@@ -27,6 +27,21 @@ def test_hf_data():
|
||||
except requests.exceptions.HTTPError:
|
||||
return
|
||||
|
||||
import json
|
||||
|
||||
with open("seqclass.log", "r") as fin:
|
||||
for line in fin:
|
||||
each_log = json.loads(line.strip("\n"))
|
||||
if "validation_loss" in each_log:
|
||||
val_loss = each_log["validation_loss"]
|
||||
min_inter_result = min(
|
||||
each_dict.get("eval_automl_metric", sys.maxsize)
|
||||
for each_dict in each_log["logged_metric"]["intermediate_results"]
|
||||
)
|
||||
|
||||
if min_inter_result != sys.maxsize:
|
||||
assert val_loss == min_inter_result
|
||||
|
||||
automl = AutoML()
|
||||
|
||||
automl_settings.pop("max_iter", None)
|
||||
|
||||
@@ -6,7 +6,7 @@ from utils import get_toy_data_summarization, get_automl_settings
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "darwin" or sys.version < "3.7",
|
||||
reason="do not run on mac os or py < 3.7",
|
||||
reason="do not run on mac os or py3.6",
|
||||
)
|
||||
def test_summarization():
|
||||
# TODO: manual test for how effective postprocess_seq2seq_prediction_label is
|
||||
|
||||
@@ -1518,7 +1518,6 @@ def get_automl_settings(estimator_name="transformer"):
|
||||
automl_settings["fit_kwargs_by_estimator"] = {
|
||||
estimator_name: {
|
||||
"output_dir": "test/data/output/",
|
||||
"ckpt_per_epoch": 1,
|
||||
"fp16": False,
|
||||
}
|
||||
}
|
||||
@@ -1527,7 +1526,6 @@ def get_automl_settings(estimator_name="transformer"):
|
||||
estimator_name: {
|
||||
"model_path": "google/electra-small-discriminator",
|
||||
"output_dir": "test/data/output/",
|
||||
"ckpt_per_epoch": 1,
|
||||
"fp16": False,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,7 +85,6 @@ def _test_hf_data():
|
||||
"transformer": {
|
||||
"model_path": "facebook/muppet-roberta-base",
|
||||
"output_dir": "test/data/output/",
|
||||
"ckpt_per_epoch": 5,
|
||||
"fp16": True,
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user