Allow FLAML_sample_size in starting_points (#619)

* FLAML_sample_size

* clean up

* starting_points as a list

* catch AssertionError

* per estimator sample size

* import

* per estimator min_sample_size

* Update flaml/automl.py

Co-authored-by: Chi Wang <wang.chi@microsoft.com>

* Update test/automl/test_warmstart.py

Co-authored-by: Chi Wang <wang.chi@microsoft.com>

* add warnings

* adding more tests

* fix a bug in validating starting points

* improve test

* revise test

* revise test

* documentation about custom_hp

* doc and efficiency

* update test

Co-authored-by: Chi Wang <wang.chi@microsoft.com>
This commit is contained in:
Qingyun Wu
2022-07-09 16:04:46 -04:00
committed by GitHub
parent 6cb6a2a19a
commit b7846048dc
4 changed files with 203 additions and 24 deletions

View File

@@ -57,15 +57,25 @@ def test_starting_point_not_in_search_space():
"learning_rate": {
"domain": tune.choice([1e-4, 1e-5]),
},
"per_device_train_batch_size": {
"domain": 2,
},
}
}
automl_settings["starting_points"] = "data:test/nlp/default/"
del automl_settings["fit_kwargs_by_estimator"][this_estimator_name]["model_path"]
automl.fit(X_train, y_train, **automl_settings)
assert (
len(automl._search_states[this_estimator_name].init_config) == 0
) # check that init config is not updated, but search space is updated
assert len(automl._search_states[this_estimator_name].init_config) == len(
automl._search_states[this_estimator_name]._search_space_domain
) - len(automl_settings["custom_hp"][this_estimator_name]), (
"The search space is updated with the custom_hp on {} hyperparameters of "
"the specified estimator without an initial value. Thus a valid init config "
"should only contain the cardinality of the search space minus {}".format(
len(automl_settings["custom_hp"][this_estimator_name]),
len(automl_settings["custom_hp"][this_estimator_name]),
)
)
assert (
automl._search_states[this_estimator_name].search_space["model_path"]
== "albert-base-v2"