Skip to content

Commit 34affd9

Browse files
author
Github Actions
committed
Lukas Strack: Fixing hps remain active & meta hp configuration (#1536)
1 parent fad2ed1 commit 34affd9

File tree

98 files changed

+5568
-5362
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

98 files changed

+5568
-5362
lines changed

development/_downloads/23ae4950352edc8dd9ea5443ba77886b/example_extending_regression.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
The following example demonstrates how to create a new regression
77
component for using in auto-sklearn.
88
"""
9+
from typing import Optional
910
from pprint import pprint
1011

1112
from ConfigSpace.configuration_space import ConfigurationSpace
@@ -17,6 +18,8 @@
1718
from ConfigSpace.conditions import EqualsCondition
1819

1920
import sklearn.metrics
21+
22+
from autosklearn.askl_typing import FEAT_TYPE_TYPE
2023
import autosklearn.regression
2124
import autosklearn.pipeline.components.regression
2225
from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm
@@ -86,7 +89,9 @@ def get_properties(dataset_properties=None):
8689
}
8790

8891
@staticmethod
89-
def get_hyperparameter_search_space(dataset_properties=None):
92+
def get_hyperparameter_search_space(
93+
feat_type: Optional[FEAT_TYPE_TYPE] = None, dataset_properties=None
94+
):
9095
cs = ConfigurationSpace()
9196
alpha = UniformFloatHyperparameter(
9297
name="alpha", lower=10**-5, upper=1, log=True, default_value=1.0

development/_downloads/3188e6424499bf3242cdcdfc032bf614/example_restrict_number_of_hyperparameters.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
component with a new component, implementing the same classifier,
88
but with different hyperparameters .
99
"""
10+
from typing import Optional
1011

1112
from ConfigSpace.configuration_space import ConfigurationSpace
1213
from ConfigSpace.hyperparameters import (
@@ -17,6 +18,7 @@
1718
from sklearn.datasets import load_breast_cancer
1819
from sklearn.model_selection import train_test_split
1920

21+
from autosklearn.askl_typing import FEAT_TYPE_TYPE
2022
import autosklearn.classification
2123
import autosklearn.pipeline.components.classification
2224
from autosklearn.pipeline.components.classification import (
@@ -84,7 +86,9 @@ def get_properties(dataset_properties=None):
8486
}
8587

8688
@staticmethod
87-
def get_hyperparameter_search_space(dataset_properties=None):
89+
def get_hyperparameter_search_space(
90+
feat_type: Optional[FEAT_TYPE_TYPE] = None, dataset_properties=None
91+
):
8892
cs = ConfigurationSpace()
8993

9094
# The maximum number of features used in the forest is calculated as m^max_features, where

development/_downloads/4f9b78e1d6464520c85232e30bf19d2b/example_text_preprocessing.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@
8080
},
8181
"outputs": [],
8282
"source": [
83-
"# create an autosklearn Classifier or Regressor depending on your task at hand.\nautoml = autosklearn.classification.AutoSklearnClassifier(\n time_left_for_this_task=60,\n per_run_time_limit=30,\n tmp_folder=\"/tmp/autosklearn_text_example_tmp\",\n)\n\nautoml.fit(X_train, y_train, dataset_name=\"20_Newsgroups\") # fit the automl model"
83+
"# create an autosklearn Classifier or Regressor depending on your task at hand.\nautoml = autosklearn.classification.AutoSklearnClassifier(\n time_left_for_this_task=60,\n per_run_time_limit=30,\n)\n\nautoml.fit(X_train, y_train, dataset_name=\"20_Newsgroups\") # fit the automl model"
8484
]
8585
},
8686
{

development/_downloads/515ab036d01801cb08e4878be1aef556/example_extending_classification.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
The following example demonstrates how to create a new classification
77
component for using in auto-sklearn.
88
"""
9+
from typing import Optional
910
from pprint import pprint
1011

1112
from ConfigSpace.configuration_space import ConfigurationSpace
@@ -16,6 +17,8 @@
1617
)
1718

1819
import sklearn.metrics
20+
21+
from autosklearn.askl_typing import FEAT_TYPE_TYPE
1922
import autosklearn.classification
2023
import autosklearn.pipeline.components.classification
2124
from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm
@@ -100,7 +103,9 @@ def get_properties(dataset_properties=None):
100103
}
101104

102105
@staticmethod
103-
def get_hyperparameter_search_space(dataset_properties=None):
106+
def get_hyperparameter_search_space(
107+
feat_type: Optional[FEAT_TYPE_TYPE] = None, dataset_properties=None
108+
):
104109
cs = ConfigurationSpace()
105110
hidden_layer_depth = UniformIntegerHyperparameter(
106111
name="hidden_layer_depth", lower=1, upper=3, default_value=1

development/_downloads/60abb5e0c4b0861f5ecbe4ae9c2e51dd/example_extending_data_preprocessor.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,15 @@
55
66
The following example demonstrates how to turn off data preprocessing step in auto-skearn.
77
"""
8+
from typing import Optional
89
from pprint import pprint
910

1011
import autosklearn.classification
1112
import autosklearn.pipeline.components.data_preprocessing
1213
import sklearn.metrics
1314
from ConfigSpace.configuration_space import ConfigurationSpace
15+
16+
from autosklearn.askl_typing import FEAT_TYPE_TYPE
1417
from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm
1518
from autosklearn.pipeline.constants import SPARSE, DENSE, UNSIGNED_DATA, INPUT
1619
from sklearn.datasets import load_breast_cancer
@@ -49,7 +52,9 @@ def get_properties(dataset_properties=None):
4952
}
5053

5154
@staticmethod
52-
def get_hyperparameter_search_space(dataset_properties=None):
55+
def get_hyperparameter_search_space(
56+
feat_type: Optional[FEAT_TYPE_TYPE] = None, dataset_properties=None
57+
):
5358
return ConfigurationSpace() # Return an empty configuration as there is None
5459

5560

development/_downloads/6dea5849db1f35abbefe123cd0eb49fd/example_extending_data_preprocessor.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"from pprint import pprint\n\nimport autosklearn.classification\nimport autosklearn.pipeline.components.data_preprocessing\nimport sklearn.metrics\nfrom ConfigSpace.configuration_space import ConfigurationSpace\nfrom autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm\nfrom autosklearn.pipeline.constants import SPARSE, DENSE, UNSIGNED_DATA, INPUT\nfrom sklearn.datasets import load_breast_cancer\nfrom sklearn.model_selection import train_test_split"
29+
"from typing import Optional\nfrom pprint import pprint\n\nimport autosklearn.classification\nimport autosklearn.pipeline.components.data_preprocessing\nimport sklearn.metrics\nfrom ConfigSpace.configuration_space import ConfigurationSpace\n\nfrom autosklearn.askl_typing import FEAT_TYPE_TYPE\nfrom autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm\nfrom autosklearn.pipeline.constants import SPARSE, DENSE, UNSIGNED_DATA, INPUT\nfrom sklearn.datasets import load_breast_cancer\nfrom sklearn.model_selection import train_test_split"
3030
]
3131
},
3232
{
@@ -44,7 +44,7 @@
4444
},
4545
"outputs": [],
4646
"source": [
47-
"class NoPreprocessing(AutoSklearnPreprocessingAlgorithm):\n def __init__(self, **kwargs):\n \"\"\"This preprocessors does not change the data\"\"\"\n # Some internal checks makes sure parameters are set\n for key, val in kwargs.items():\n setattr(self, key, val)\n\n def fit(self, X, Y=None):\n return self\n\n def transform(self, X):\n return X\n\n @staticmethod\n def get_properties(dataset_properties=None):\n return {\n \"shortname\": \"NoPreprocessing\",\n \"name\": \"NoPreprocessing\",\n \"handles_regression\": True,\n \"handles_classification\": True,\n \"handles_multiclass\": True,\n \"handles_multilabel\": True,\n \"handles_multioutput\": True,\n \"is_deterministic\": True,\n \"input\": (SPARSE, DENSE, UNSIGNED_DATA),\n \"output\": (INPUT,),\n }\n\n @staticmethod\n def get_hyperparameter_search_space(dataset_properties=None):\n return ConfigurationSpace() # Return an empty configuration as there is None\n\n\n# Add NoPreprocessing component to auto-sklearn.\nautosklearn.pipeline.components.data_preprocessing.add_preprocessor(NoPreprocessing)"
47+
"class NoPreprocessing(AutoSklearnPreprocessingAlgorithm):\n def __init__(self, **kwargs):\n \"\"\"This preprocessors does not change the data\"\"\"\n # Some internal checks makes sure parameters are set\n for key, val in kwargs.items():\n setattr(self, key, val)\n\n def fit(self, X, Y=None):\n return self\n\n def transform(self, X):\n return X\n\n @staticmethod\n def get_properties(dataset_properties=None):\n return {\n \"shortname\": \"NoPreprocessing\",\n \"name\": \"NoPreprocessing\",\n \"handles_regression\": True,\n \"handles_classification\": True,\n \"handles_multiclass\": True,\n \"handles_multilabel\": True,\n \"handles_multioutput\": True,\n \"is_deterministic\": True,\n \"input\": (SPARSE, DENSE, UNSIGNED_DATA),\n \"output\": (INPUT,),\n }\n\n @staticmethod\n def get_hyperparameter_search_space(\n feat_type: Optional[FEAT_TYPE_TYPE] = None, dataset_properties=None\n ):\n return ConfigurationSpace() # Return an empty configuration as there is None\n\n\n# Add NoPreprocessing component to auto-sklearn.\nautosklearn.pipeline.components.data_preprocessing.add_preprocessor(NoPreprocessing)"
4848
]
4949
},
5050
{

development/_downloads/89acefb6af0174645412e5af4eafade1/example_text_preprocessing.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,6 @@
5959
automl = autosklearn.classification.AutoSklearnClassifier(
6060
time_left_for_this_task=60,
6161
per_run_time_limit=30,
62-
tmp_folder="/tmp/autosklearn_text_example_tmp",
6362
)
6463

6564
automl.fit(X_train, y_train, dataset_name="20_Newsgroups") # fit the automl model

development/_downloads/a23bc40b83b60b7e97a3eb188a82ad24/example_extending_preprocessor.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
discriminant analysis (LDA) algorithm from sklearn and use it as a preprocessor
88
in auto-sklearn.
99
"""
10+
from typing import Optional
1011
from pprint import pprint
1112

1213
from ConfigSpace.configuration_space import ConfigurationSpace
@@ -17,6 +18,8 @@
1718
from ConfigSpace.conditions import InCondition
1819

1920
import sklearn.metrics
21+
22+
from autosklearn.askl_typing import FEAT_TYPE_TYPE
2023
import autosklearn.classification
2124
import autosklearn.pipeline.components.feature_preprocessing
2225
from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm
@@ -76,7 +79,9 @@ def get_properties(dataset_properties=None):
7679
}
7780

7881
@staticmethod
79-
def get_hyperparameter_search_space(dataset_properties=None):
82+
def get_hyperparameter_search_space(
83+
feat_type: Optional[FEAT_TYPE_TYPE] = None, dataset_properties=None
84+
):
8085
cs = ConfigurationSpace()
8186
solver = CategoricalHyperparameter(
8287
name="solver", choices=["svd", "lsqr", "eigen"], default_value="svd"
Binary file not shown.

development/_downloads/c4ec137bb92db1e6b2219eb13818b898/example_extending_classification.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
"outputs": [],
2828
"source": [
29-
"from pprint import pprint\n\nfrom ConfigSpace.configuration_space import ConfigurationSpace\nfrom ConfigSpace.hyperparameters import (\n CategoricalHyperparameter,\n UniformIntegerHyperparameter,\n UniformFloatHyperparameter,\n)\n\nimport sklearn.metrics\nimport autosklearn.classification\nimport autosklearn.pipeline.components.classification\nfrom autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm\nfrom autosklearn.pipeline.constants import (\n DENSE,\n SIGNED_DATA,\n UNSIGNED_DATA,\n PREDICTIONS,\n)\n\nfrom sklearn.datasets import load_breast_cancer\nfrom sklearn.model_selection import train_test_split"
29+
"from typing import Optional\nfrom pprint import pprint\n\nfrom ConfigSpace.configuration_space import ConfigurationSpace\nfrom ConfigSpace.hyperparameters import (\n CategoricalHyperparameter,\n UniformIntegerHyperparameter,\n UniformFloatHyperparameter,\n)\n\nimport sklearn.metrics\n\nfrom autosklearn.askl_typing import FEAT_TYPE_TYPE\nimport autosklearn.classification\nimport autosklearn.pipeline.components.classification\nfrom autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm\nfrom autosklearn.pipeline.constants import (\n DENSE,\n SIGNED_DATA,\n UNSIGNED_DATA,\n PREDICTIONS,\n)\n\nfrom sklearn.datasets import load_breast_cancer\nfrom sklearn.model_selection import train_test_split"
3030
]
3131
},
3232
{
@@ -44,7 +44,7 @@
4444
},
4545
"outputs": [],
4646
"source": [
47-
"class MLPClassifier(AutoSklearnClassificationAlgorithm):\n def __init__(\n self,\n hidden_layer_depth,\n num_nodes_per_layer,\n activation,\n alpha,\n solver,\n random_state=None,\n ):\n self.hidden_layer_depth = hidden_layer_depth\n self.num_nodes_per_layer = num_nodes_per_layer\n self.activation = activation\n self.alpha = alpha\n self.solver = solver\n self.random_state = random_state\n\n def fit(self, X, y):\n self.num_nodes_per_layer = int(self.num_nodes_per_layer)\n self.hidden_layer_depth = int(self.hidden_layer_depth)\n self.alpha = float(self.alpha)\n\n from sklearn.neural_network import MLPClassifier\n\n hidden_layer_sizes = tuple(\n self.num_nodes_per_layer for i in range(self.hidden_layer_depth)\n )\n\n self.estimator = MLPClassifier(\n hidden_layer_sizes=hidden_layer_sizes,\n activation=self.activation,\n alpha=self.alpha,\n solver=self.solver,\n random_state=self.random_state,\n )\n self.estimator.fit(X, y)\n return self\n\n def predict(self, X):\n if self.estimator is None:\n raise NotImplementedError()\n return self.estimator.predict(X)\n\n def predict_proba(self, X):\n if self.estimator is None:\n raise NotImplementedError()\n return self.estimator.predict_proba(X)\n\n @staticmethod\n def get_properties(dataset_properties=None):\n return {\n \"shortname\": \"MLP Classifier\",\n \"name\": \"MLP CLassifier\",\n \"handles_regression\": False,\n \"handles_classification\": True,\n \"handles_multiclass\": True,\n \"handles_multilabel\": False,\n \"handles_multioutput\": False,\n \"is_deterministic\": False,\n # Both input and output must be tuple(iterable)\n \"input\": [DENSE, SIGNED_DATA, UNSIGNED_DATA],\n \"output\": [PREDICTIONS],\n }\n\n @staticmethod\n def get_hyperparameter_search_space(dataset_properties=None):\n cs = ConfigurationSpace()\n hidden_layer_depth = UniformIntegerHyperparameter(\n name=\"hidden_layer_depth\", lower=1, upper=3, default_value=1\n )\n num_nodes_per_layer = UniformIntegerHyperparameter(\n name=\"num_nodes_per_layer\", lower=16, upper=216, default_value=32\n )\n activation = CategoricalHyperparameter(\n name=\"activation\",\n choices=[\"identity\", \"logistic\", \"tanh\", \"relu\"],\n default_value=\"relu\",\n )\n alpha = UniformFloatHyperparameter(\n name=\"alpha\", lower=0.0001, upper=1.0, default_value=0.0001\n )\n solver = CategoricalHyperparameter(\n name=\"solver\", choices=[\"lbfgs\", \"sgd\", \"adam\"], default_value=\"adam\"\n )\n cs.add_hyperparameters(\n [\n hidden_layer_depth,\n num_nodes_per_layer,\n activation,\n alpha,\n solver,\n ]\n )\n return cs\n\n\n# Add MLP classifier component to auto-sklearn.\nautosklearn.pipeline.components.classification.add_classifier(MLPClassifier)\ncs = MLPClassifier.get_hyperparameter_search_space()\nprint(cs)"
47+
"class MLPClassifier(AutoSklearnClassificationAlgorithm):\n def __init__(\n self,\n hidden_layer_depth,\n num_nodes_per_layer,\n activation,\n alpha,\n solver,\n random_state=None,\n ):\n self.hidden_layer_depth = hidden_layer_depth\n self.num_nodes_per_layer = num_nodes_per_layer\n self.activation = activation\n self.alpha = alpha\n self.solver = solver\n self.random_state = random_state\n\n def fit(self, X, y):\n self.num_nodes_per_layer = int(self.num_nodes_per_layer)\n self.hidden_layer_depth = int(self.hidden_layer_depth)\n self.alpha = float(self.alpha)\n\n from sklearn.neural_network import MLPClassifier\n\n hidden_layer_sizes = tuple(\n self.num_nodes_per_layer for i in range(self.hidden_layer_depth)\n )\n\n self.estimator = MLPClassifier(\n hidden_layer_sizes=hidden_layer_sizes,\n activation=self.activation,\n alpha=self.alpha,\n solver=self.solver,\n random_state=self.random_state,\n )\n self.estimator.fit(X, y)\n return self\n\n def predict(self, X):\n if self.estimator is None:\n raise NotImplementedError()\n return self.estimator.predict(X)\n\n def predict_proba(self, X):\n if self.estimator is None:\n raise NotImplementedError()\n return self.estimator.predict_proba(X)\n\n @staticmethod\n def get_properties(dataset_properties=None):\n return {\n \"shortname\": \"MLP Classifier\",\n \"name\": \"MLP CLassifier\",\n \"handles_regression\": False,\n \"handles_classification\": True,\n \"handles_multiclass\": True,\n \"handles_multilabel\": False,\n \"handles_multioutput\": False,\n \"is_deterministic\": False,\n # Both input and output must be tuple(iterable)\n \"input\": [DENSE, SIGNED_DATA, UNSIGNED_DATA],\n \"output\": [PREDICTIONS],\n }\n\n @staticmethod\n def get_hyperparameter_search_space(\n feat_type: Optional[FEAT_TYPE_TYPE] = None, dataset_properties=None\n ):\n cs = ConfigurationSpace()\n hidden_layer_depth = UniformIntegerHyperparameter(\n name=\"hidden_layer_depth\", lower=1, upper=3, default_value=1\n )\n num_nodes_per_layer = UniformIntegerHyperparameter(\n name=\"num_nodes_per_layer\", lower=16, upper=216, default_value=32\n )\n activation = CategoricalHyperparameter(\n name=\"activation\",\n choices=[\"identity\", \"logistic\", \"tanh\", \"relu\"],\n default_value=\"relu\",\n )\n alpha = UniformFloatHyperparameter(\n name=\"alpha\", lower=0.0001, upper=1.0, default_value=0.0001\n )\n solver = CategoricalHyperparameter(\n name=\"solver\", choices=[\"lbfgs\", \"sgd\", \"adam\"], default_value=\"adam\"\n )\n cs.add_hyperparameters(\n [\n hidden_layer_depth,\n num_nodes_per_layer,\n activation,\n alpha,\n solver,\n ]\n )\n return cs\n\n\n# Add MLP classifier component to auto-sklearn.\nautosklearn.pipeline.components.classification.add_classifier(MLPClassifier)\ncs = MLPClassifier.get_hyperparameter_search_space()\nprint(cs)"
4848
]
4949
},
5050
{

0 commit comments

Comments
 (0)