Skip to content

Predictors

AbstractPredictor

Bases: ABC

Abstract base class for all predictors.

Methods

fit(X, Y, kwargs) Fit the model to the data. predict(X, kwargs) Predict using the model. save(file_path) Save the model to a file. load(file_path) Load the model from a file. get_configuration_space(cs) Get the configuration space for the predictor. get_from_configuration(configuration) Get a predictor instance from a configuration.

Source code in asf/predictors/abstract_predictor.py
class AbstractPredictor(ABC):
    """
    Abstract base class for all predictors.

    Methods
    -------
    fit(X, Y, **kwargs)
        Fit the model to the data.
    predict(X, **kwargs)
        Predict using the model.
    save(file_path)
        Save the model to a file.
    load(file_path)
        Load the model from a file.
    get_configuration_space(cs)
        Get the configuration space for the predictor.
    get_from_configuration(configuration)
        Get a predictor instance from a configuration.
    """

    def __init__(self):
        """
        Initialize the predictor.
        """
        pass

    @abstractmethod
    def fit(self, X: Any, Y: Any, **kwargs: Any) -> None:
        """
        Fit the model to the data.

        Parameters
        ----------
        X : Any
            Training data.
        Y : Any
            Target values.
        kwargs : Any
            Additional arguments for fitting the model.
        """
        pass

    @abstractmethod
    def predict(self, X: Any, **kwargs: Any) -> Any:
        """
        Predict using the model.

        Parameters
        ----------
        X : Any
            Data to predict on.
        kwargs : Any
            Additional arguments for prediction.

        Returns
        -------
        Any
            Predicted values.
        """
        pass

    @abstractmethod
    def save(self, file_path: str) -> None:
        """
        Save the model to a file.

        Parameters
        ----------
        file_path : str
            Path to the file where the model will be saved.
        """
        pass

    @abstractmethod
    def load(self, file_path: str) -> None:
        """
        Load the model from a file.

        Parameters
        ----------
        file_path : str
            Path to the file from which the model will be loaded.
        """
        pass

__init__()

Initialize the predictor.

Source code in asf/predictors/abstract_predictor.py
def __init__(self):
    """
    Initialize the predictor.
    """
    pass

fit(X, Y, **kwargs) abstractmethod

Fit the model to the data.

Parameters

X : Any Training data. Y : Any Target values. kwargs : Any Additional arguments for fitting the model.

Source code in asf/predictors/abstract_predictor.py
@abstractmethod
def fit(self, X: Any, Y: Any, **kwargs: Any) -> None:
    """
    Fit the model to the data.

    Parameters
    ----------
    X : Any
        Training data.
    Y : Any
        Target values.
    kwargs : Any
        Additional arguments for fitting the model.
    """
    pass

load(file_path) abstractmethod

Load the model from a file.

Parameters

file_path : str Path to the file from which the model will be loaded.

Source code in asf/predictors/abstract_predictor.py
@abstractmethod
def load(self, file_path: str) -> None:
    """
    Load the model from a file.

    Parameters
    ----------
    file_path : str
        Path to the file from which the model will be loaded.
    """
    pass

predict(X, **kwargs) abstractmethod

Predict using the model.

Parameters

X : Any Data to predict on. kwargs : Any Additional arguments for prediction.

Returns

Any Predicted values.

Source code in asf/predictors/abstract_predictor.py
@abstractmethod
def predict(self, X: Any, **kwargs: Any) -> Any:
    """
    Predict using the model.

    Parameters
    ----------
    X : Any
        Data to predict on.
    kwargs : Any
        Additional arguments for prediction.

    Returns
    -------
    Any
        Predicted values.
    """
    pass

save(file_path) abstractmethod

Save the model to a file.

Parameters

file_path : str Path to the file where the model will be saved.

Source code in asf/predictors/abstract_predictor.py
@abstractmethod
def save(self, file_path: str) -> None:
    """
    Save the model to a file.

    Parameters
    ----------
    file_path : str
        Path to the file where the model will be saved.
    """
    pass

EPMRandomForest

Bases: ForestRegressor, AbstractPredictor

Implementation of random forest as described in the paper "Algorithm runtime prediction: Methods & evaluation" by Hutter, Xu, Hoos, and Leyton-Brown (2014).

This class extends ForestRegressor and AbstractPredictor to provide a random forest implementation with additional functionality for runtime prediction.

Parameters

n_estimators : int, optional The number of trees in the forest. Default is 100. log : bool, optional Whether to apply logarithmic transformation to the tree values. Default is False. cross_trees_variance : bool, optional Whether to compute variance across trees. Default is False. criterion : str, optional The function to measure the quality of a split. Default is "squared_error". splitter : str, optional The strategy used to choose the split at each node. Default is "random". max_depth : int, optional The maximum depth of the tree. Default is None. min_samples_split : int, optional The minimum number of samples required to split an internal node. Default is 2. min_samples_leaf : int, optional The minimum number of samples required to be at a leaf node. Default is 1. min_weight_fraction_leaf : float, optional The minimum weighted fraction of the sum total of weights required to be at a leaf node. Default is 0.0. max_features : float, optional The number of features to consider when looking for the best split. Default is 1.0. max_leaf_nodes : int, optional Grow trees with max_leaf_nodes in best-first fashion. Default is None. min_impurity_decrease : float, optional A node will be split if this split induces a decrease of the impurity greater than or equal to this value. Default is 0.0. bootstrap : bool, optional Whether bootstrap samples are used when building trees. Default is False. oob_score : bool, optional Whether to use out-of-bag samples to estimate the generalization score. Default is False. n_jobs : int, optional The number of jobs to run in parallel. Default is None. random_state : int, optional Controls the randomness of the estimator. Default is None. verbose : int, optional Controls the verbosity when fitting and predicting. Default is 0. warm_start : bool, optional When set to True, reuse the solution of the previous call to fit and add more estimators to the ensemble. Default is False. ccp_alpha : float, optional Complexity parameter used for Minimal Cost-Complexity Pruning. Default is 0.0. max_samples : int or float, optional If bootstrap is True, the number of samples to draw from X to train each base estimator. Default is None. monotonic_cst : array-like, optional Constraints for monotonicity of features. Default is None.

Source code in asf/predictors/epm_random_forest.py
class EPMRandomForest(ForestRegressor, AbstractPredictor):
    """
    Implementation of random forest as described in the paper
    "Algorithm runtime prediction: Methods & evaluation" by Hutter, Xu, Hoos, and Leyton-Brown (2014).

    This class extends `ForestRegressor` and `AbstractPredictor` to provide
    a random forest implementation with additional functionality for runtime prediction.

    Parameters
    ----------
    n_estimators : int, optional
        The number of trees in the forest. Default is 100.
    log : bool, optional
        Whether to apply logarithmic transformation to the tree values. Default is False.
    cross_trees_variance : bool, optional
        Whether to compute variance across trees. Default is False.
    criterion : str, optional
        The function to measure the quality of a split. Default is "squared_error".
    splitter : str, optional
        The strategy used to choose the split at each node. Default is "random".
    max_depth : int, optional
        The maximum depth of the tree. Default is None.
    min_samples_split : int, optional
        The minimum number of samples required to split an internal node. Default is 2.
    min_samples_leaf : int, optional
        The minimum number of samples required to be at a leaf node. Default is 1.
    min_weight_fraction_leaf : float, optional
        The minimum weighted fraction of the sum total of weights required to be at a leaf node. Default is 0.0.
    max_features : float, optional
        The number of features to consider when looking for the best split. Default is 1.0.
    max_leaf_nodes : int, optional
        Grow trees with max_leaf_nodes in best-first fashion. Default is None.
    min_impurity_decrease : float, optional
        A node will be split if this split induces a decrease of the impurity greater than or equal to this value. Default is 0.0.
    bootstrap : bool, optional
        Whether bootstrap samples are used when building trees. Default is False.
    oob_score : bool, optional
        Whether to use out-of-bag samples to estimate the generalization score. Default is False.
    n_jobs : int, optional
        The number of jobs to run in parallel. Default is None.
    random_state : int, optional
        Controls the randomness of the estimator. Default is None.
    verbose : int, optional
        Controls the verbosity when fitting and predicting. Default is 0.
    warm_start : bool, optional
        When set to True, reuse the solution of the previous call to fit and add more estimators to the ensemble. Default is False.
    ccp_alpha : float, optional
        Complexity parameter used for Minimal Cost-Complexity Pruning. Default is 0.0.
    max_samples : int or float, optional
        If bootstrap is True, the number of samples to draw from X to train each base estimator. Default is None.
    monotonic_cst : array-like, optional
        Constraints for monotonicity of features. Default is None.
    """

    def __init__(
        self,
        n_estimators: int = 100,
        *,
        log: bool = False,
        return_var: bool = False,
        criterion: str = "squared_error",
        splitter: str = "random",
        max_depth: int = None,
        min_samples_split: int = 2,
        min_samples_leaf: int = 1,
        min_weight_fraction_leaf: float = 0.0,
        max_features: float = 1.0,
        max_leaf_nodes: int = None,
        min_impurity_decrease: float = 0.0,
        bootstrap: bool = False,
        oob_score: bool = False,
        n_jobs: int = None,
        random_state: int = None,
        verbose: int = 0,
        warm_start: bool = False,
        ccp_alpha: float = 0.0,
        max_samples: int | float = None,
        monotonic_cst: np.ndarray = None,
    ) -> None:
        super().__init__(
            DecisionTreeRegressor(),
            n_estimators,
            estimator_params=(
                "criterion",
                "max_depth",
                "min_samples_split",
                "min_samples_leaf",
                "min_weight_fraction_leaf",
                "max_features",
                "max_leaf_nodes",
                "min_impurity_decrease",
                "random_state",
                "ccp_alpha",
                "monotonic_cst",
            ),
            bootstrap=bootstrap,
            oob_score=oob_score,
            n_jobs=n_jobs,
            random_state=random_state,
            verbose=verbose,
            warm_start=warm_start,
            max_samples=max_samples,
        )
        self.criterion = criterion
        self.max_depth = max_depth
        self.min_samples_split = min_samples_split
        self.min_samples_leaf = min_samples_leaf
        self.min_weight_fraction_leaf = min_weight_fraction_leaf
        self.max_features = max_features
        self.max_leaf_nodes = max_leaf_nodes
        self.min_impurity_decrease = min_impurity_decrease
        self.ccp_alpha = ccp_alpha
        self.monotonic_cst = monotonic_cst
        self.splitter = splitter
        self.log = log
        self.return_var = return_var

    def fit(
        self, X: np.ndarray, y: np.ndarray, sample_weight: np.ndarray = None
    ) -> None:
        """
        Fit the model to the data.

        Parameters
        ----------
        X : np.ndarray
            Training data of shape (n_samples, n_features).
        y : np.ndarray
            Target values of shape (n_samples,).
        sample_weight : np.ndarray, optional
            Sample weights. Default is None.

        Raises
        ------
        AssertionError
            If sample weights are provided, as they are not supported.
        """
        assert sample_weight is None, "Sample weights are not supported"
        super().fit(X=X, y=y, sample_weight=sample_weight)

        self.trainX = X
        self.trainY = y
        if self.log:
            for tree, samples_idx in zip(self.estimators_, self.estimators_samples_):
                curX = X[samples_idx]
                curY = y[samples_idx]
                preds = tree.apply(curX)
                for k in np.unique(preds):
                    tree.tree_.value[k, 0, 0] = np.log(np.exp(curY[preds == k]).mean())

    def predict(self, X: np.ndarray) -> tuple[np.ndarray, np.ndarray]:
        """
        Predict using the model.

        Parameters
        ----------
        X : np.ndarray
            Data to predict on of shape (n_samples, n_features).

        Returns
        -------
        tuple[np.ndarray, np.ndarray]
            A tuple containing:
            - Predicted means of shape (n_samples, 1).
            - Predicted variances of shape (n_samples, 1).
        """
        preds = []
        for tree, samples_idx in zip(self.estimators_, self.estimators_samples_):
            preds.append(tree.predict(X))
        preds = np.array(preds).T

        means = preds.mean(axis=1)
        vars = preds.var(axis=1)

        if self.return_var:
            return means, vars
        else:
            return means

    def save(self, file_path: str) -> None:
        """
        Save the model to a file.

        Parameters
        ----------
        file_path : str
            Path to the file where the model will be saved.
        """
        import joblib

        joblib.dump(self, file_path)

    def load(self, file_path: str) -> "EPMRandomForest":
        """
        Load the model from a file.

        Parameters
        ----------
        file_path : str
            Path to the file from which the model will be loaded.

        Returns
        -------
        EPMRandomForest
            The loaded model.
        """
        import joblib

        return joblib.load(file_path)

fit(X, y, sample_weight=None)

Fit the model to the data.

Parameters

X : np.ndarray Training data of shape (n_samples, n_features). y : np.ndarray Target values of shape (n_samples,). sample_weight : np.ndarray, optional Sample weights. Default is None.

Raises

AssertionError If sample weights are provided, as they are not supported.

Source code in asf/predictors/epm_random_forest.py
def fit(
    self, X: np.ndarray, y: np.ndarray, sample_weight: np.ndarray = None
) -> None:
    """
    Fit the model to the data.

    Parameters
    ----------
    X : np.ndarray
        Training data of shape (n_samples, n_features).
    y : np.ndarray
        Target values of shape (n_samples,).
    sample_weight : np.ndarray, optional
        Sample weights. Default is None.

    Raises
    ------
    AssertionError
        If sample weights are provided, as they are not supported.
    """
    assert sample_weight is None, "Sample weights are not supported"
    super().fit(X=X, y=y, sample_weight=sample_weight)

    self.trainX = X
    self.trainY = y
    if self.log:
        for tree, samples_idx in zip(self.estimators_, self.estimators_samples_):
            curX = X[samples_idx]
            curY = y[samples_idx]
            preds = tree.apply(curX)
            for k in np.unique(preds):
                tree.tree_.value[k, 0, 0] = np.log(np.exp(curY[preds == k]).mean())

load(file_path)

Load the model from a file.

Parameters

file_path : str Path to the file from which the model will be loaded.

Returns

EPMRandomForest The loaded model.

Source code in asf/predictors/epm_random_forest.py
def load(self, file_path: str) -> "EPMRandomForest":
    """
    Load the model from a file.

    Parameters
    ----------
    file_path : str
        Path to the file from which the model will be loaded.

    Returns
    -------
    EPMRandomForest
        The loaded model.
    """
    import joblib

    return joblib.load(file_path)

predict(X)

Predict using the model.

Parameters

X : np.ndarray Data to predict on of shape (n_samples, n_features).

Returns

tuple[np.ndarray, np.ndarray] A tuple containing: - Predicted means of shape (n_samples, 1). - Predicted variances of shape (n_samples, 1).

Source code in asf/predictors/epm_random_forest.py
def predict(self, X: np.ndarray) -> tuple[np.ndarray, np.ndarray]:
    """
    Predict using the model.

    Parameters
    ----------
    X : np.ndarray
        Data to predict on of shape (n_samples, n_features).

    Returns
    -------
    tuple[np.ndarray, np.ndarray]
        A tuple containing:
        - Predicted means of shape (n_samples, 1).
        - Predicted variances of shape (n_samples, 1).
    """
    preds = []
    for tree, samples_idx in zip(self.estimators_, self.estimators_samples_):
        preds.append(tree.predict(X))
    preds = np.array(preds).T

    means = preds.mean(axis=1)
    vars = preds.var(axis=1)

    if self.return_var:
        return means, vars
    else:
        return means

save(file_path)

Save the model to a file.

Parameters

file_path : str Path to the file where the model will be saved.

Source code in asf/predictors/epm_random_forest.py
def save(self, file_path: str) -> None:
    """
    Save the model to a file.

    Parameters
    ----------
    file_path : str
        Path to the file where the model will be saved.
    """
    import joblib

    joblib.dump(self, file_path)

LinearClassifierWrapper

Bases: SklearnWrapper

A wrapper for the SGDClassifier from scikit-learn, providing additional functionality for configuration space generation and parameter extraction.

Source code in asf/predictors/linear_model.py
class LinearClassifierWrapper(SklearnWrapper):
    """
    A wrapper for the SGDClassifier from scikit-learn, providing additional functionality
    for configuration space generation and parameter extraction.
    """

    PREFIX = "linear_classifier"

    def __init__(self, init_params: Optional[Dict[str, Any]] = None):
        """
        Initialize the LinearClassifierWrapper.

        Parameters
        ----------
        init_params : dict, optional
            A dictionary of initialization parameters for the SGDClassifier.
        """
        super().__init__(SGDClassifier, init_params or {})

    if CONFIGSPACE_AVAILABLE:

        @staticmethod
        def get_configuration_space(
            cs: Optional[ConfigurationSpace] = None,
            pre_prefix: str = "",
            parent_param: Optional[Hyperparameter] = None,
            parent_value: Optional[str] = None,
        ) -> ConfigurationSpace:
            """
            Get the configuration space for the Linear Classifier.

            Parameters
            ----------
            cs : ConfigurationSpace, optional
                The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

            Returns
            -------
            ConfigurationSpace
                The configuration space with the Linear Classifier parameters.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{LinearClassifierWrapper.PREFIX}"
            else:
                prefix = LinearClassifierWrapper.PREFIX

            if cs is None:
                cs = ConfigurationSpace(name="Linear Classifier")

            alpha = Float(
                f"{prefix}:alpha",
                (1e-5, 1),
                log=True,
                default=1e-3,
            )
            eta0 = Float(
                f"{prefix}:eta0",
                (1e-5, 1),
                log=True,
                default=1e-2,
            )

            params = [
                alpha,
                eta0,
            ]

            if parent_param is not None:
                conditions = [
                    EqualsCondition(
                        child=param,
                        parent=parent_param,
                        value=parent_value,
                    )
                    for param in params
                ]
            else:
                conditions = []

            cs.add(params + conditions)

            return cs

        @staticmethod
        def get_from_configuration(
            configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
        ) -> partial:
            """
            Create a partial function to initialize LinearClassifierWrapper with parameters from a configuration.

            Parameters
            ----------
            configuration : dict
                A dictionary containing the configuration parameters.
            additional_params : dict, optional
                Additional parameters to include in the initialization.

            Returns
            -------
            partial
                A partial function to initialize LinearClassifierWrapper.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{LinearClassifierWrapper.PREFIX}"
            else:
                prefix = LinearClassifierWrapper.PREFIX

            linear_classifier_params = {
                "alpha": configuration[f"{prefix}:alpha"],
                "eta0": configuration[f"{prefix}:eta0"],
                **kwargs,
            }

            return partial(
                LinearClassifierWrapper, init_params=linear_classifier_params
            )

__init__(init_params=None)

Initialize the LinearClassifierWrapper.

Parameters

init_params : dict, optional A dictionary of initialization parameters for the SGDClassifier.

Source code in asf/predictors/linear_model.py
def __init__(self, init_params: Optional[Dict[str, Any]] = None):
    """
    Initialize the LinearClassifierWrapper.

    Parameters
    ----------
    init_params : dict, optional
        A dictionary of initialization parameters for the SGDClassifier.
    """
    super().__init__(SGDClassifier, init_params or {})

get_configuration_space(cs=None, pre_prefix='', parent_param=None, parent_value=None) staticmethod

Get the configuration space for the Linear Classifier.

Parameters

cs : ConfigurationSpace, optional The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

Returns

ConfigurationSpace The configuration space with the Linear Classifier parameters.

Source code in asf/predictors/linear_model.py
@staticmethod
def get_configuration_space(
    cs: Optional[ConfigurationSpace] = None,
    pre_prefix: str = "",
    parent_param: Optional[Hyperparameter] = None,
    parent_value: Optional[str] = None,
) -> ConfigurationSpace:
    """
    Get the configuration space for the Linear Classifier.

    Parameters
    ----------
    cs : ConfigurationSpace, optional
        The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

    Returns
    -------
    ConfigurationSpace
        The configuration space with the Linear Classifier parameters.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{LinearClassifierWrapper.PREFIX}"
    else:
        prefix = LinearClassifierWrapper.PREFIX

    if cs is None:
        cs = ConfigurationSpace(name="Linear Classifier")

    alpha = Float(
        f"{prefix}:alpha",
        (1e-5, 1),
        log=True,
        default=1e-3,
    )
    eta0 = Float(
        f"{prefix}:eta0",
        (1e-5, 1),
        log=True,
        default=1e-2,
    )

    params = [
        alpha,
        eta0,
    ]

    if parent_param is not None:
        conditions = [
            EqualsCondition(
                child=param,
                parent=parent_param,
                value=parent_value,
            )
            for param in params
        ]
    else:
        conditions = []

    cs.add(params + conditions)

    return cs

get_from_configuration(configuration, pre_prefix='', **kwargs) staticmethod

Create a partial function to initialize LinearClassifierWrapper with parameters from a configuration.

Parameters

configuration : dict A dictionary containing the configuration parameters. additional_params : dict, optional Additional parameters to include in the initialization.

Returns

partial A partial function to initialize LinearClassifierWrapper.

Source code in asf/predictors/linear_model.py
@staticmethod
def get_from_configuration(
    configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
) -> partial:
    """
    Create a partial function to initialize LinearClassifierWrapper with parameters from a configuration.

    Parameters
    ----------
    configuration : dict
        A dictionary containing the configuration parameters.
    additional_params : dict, optional
        Additional parameters to include in the initialization.

    Returns
    -------
    partial
        A partial function to initialize LinearClassifierWrapper.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{LinearClassifierWrapper.PREFIX}"
    else:
        prefix = LinearClassifierWrapper.PREFIX

    linear_classifier_params = {
        "alpha": configuration[f"{prefix}:alpha"],
        "eta0": configuration[f"{prefix}:eta0"],
        **kwargs,
    }

    return partial(
        LinearClassifierWrapper, init_params=linear_classifier_params
    )

LinearRegressorWrapper

Bases: SklearnWrapper

A wrapper for the SGDRegressor from scikit-learn, providing additional functionality for configuration space generation and parameter extraction.

Source code in asf/predictors/linear_model.py
class LinearRegressorWrapper(SklearnWrapper):
    """
    A wrapper for the SGDRegressor from scikit-learn, providing additional functionality
    for configuration space generation and parameter extraction.
    """

    PREFIX = "linear_regressor"

    def __init__(self, init_params: Optional[Dict[str, Any]] = None):
        """
        Initialize the LinearRegressorWrapper.

        Parameters
        ----------
        init_params : dict, optional
            A dictionary of initialization parameters for the SGDRegressor.
        """
        super().__init__(SGDRegressor, init_params or {})

    if CONFIGSPACE_AVAILABLE:

        @staticmethod
        def get_configuration_space(
            cs: Optional[ConfigurationSpace] = None,
            pre_prefix: str = "",
            parent_param: Optional[Hyperparameter] = None,
            parent_value: Optional[str] = None,
        ) -> ConfigurationSpace:
            """
            Get the configuration space for the Linear Regressor.

            Parameters
            ----------
            cs : ConfigurationSpace, optional
                The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

            Returns
            -------
            ConfigurationSpace
                The configuration space with the Linear Regressor parameters.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{LinearRegressorWrapper.PREFIX}"
            else:
                prefix = LinearRegressorWrapper.PREFIX

            if cs is None:
                cs = ConfigurationSpace(name="Linear Regressor")

            alpha = Float(
                f"{prefix}:alpha",
                (1e-5, 1),
                log=True,
                default=1e-3,
            )
            eta0 = Float(
                f"{prefix}:eta0",
                (1e-5, 1),
                log=True,
                default=1e-2,
            )

            params = [alpha, eta0]

            if parent_param is not None:
                conditions = [
                    EqualsCondition(
                        child=param,
                        parent=parent_param,
                        value=parent_value,
                    )
                    for param in params
                ]
            else:
                conditions = []

            cs.add(params + conditions)

            return cs

        @staticmethod
        def get_from_configuration(
            configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
        ) -> partial:
            """
            Create a partial function to initialize LinearRegressorWrapper with parameters from a configuration.

            Parameters
            ----------
            configuration : dict
                A dictionary containing the configuration parameters.
            additional_params : dict, optional
                Additional parameters to include in the initialization.

            Returns
            -------
            partial
                A partial function to initialize LinearRegressorWrapper.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{LinearRegressorWrapper.PREFIX}"
            else:
                prefix = LinearRegressorWrapper.PREFIX

            linear_regressor_params = {
                "alpha": configuration[f"{prefix}:alpha"],
                "eta0": configuration[f"{prefix}:eta0"],
                **kwargs,
            }

            return partial(LinearRegressorWrapper, init_params=linear_regressor_params)

__init__(init_params=None)

Initialize the LinearRegressorWrapper.

Parameters

init_params : dict, optional A dictionary of initialization parameters for the SGDRegressor.

Source code in asf/predictors/linear_model.py
def __init__(self, init_params: Optional[Dict[str, Any]] = None):
    """
    Initialize the LinearRegressorWrapper.

    Parameters
    ----------
    init_params : dict, optional
        A dictionary of initialization parameters for the SGDRegressor.
    """
    super().__init__(SGDRegressor, init_params or {})

get_configuration_space(cs=None, pre_prefix='', parent_param=None, parent_value=None) staticmethod

Get the configuration space for the Linear Regressor.

Parameters

cs : ConfigurationSpace, optional The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

Returns

ConfigurationSpace The configuration space with the Linear Regressor parameters.

Source code in asf/predictors/linear_model.py
@staticmethod
def get_configuration_space(
    cs: Optional[ConfigurationSpace] = None,
    pre_prefix: str = "",
    parent_param: Optional[Hyperparameter] = None,
    parent_value: Optional[str] = None,
) -> ConfigurationSpace:
    """
    Get the configuration space for the Linear Regressor.

    Parameters
    ----------
    cs : ConfigurationSpace, optional
        The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

    Returns
    -------
    ConfigurationSpace
        The configuration space with the Linear Regressor parameters.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{LinearRegressorWrapper.PREFIX}"
    else:
        prefix = LinearRegressorWrapper.PREFIX

    if cs is None:
        cs = ConfigurationSpace(name="Linear Regressor")

    alpha = Float(
        f"{prefix}:alpha",
        (1e-5, 1),
        log=True,
        default=1e-3,
    )
    eta0 = Float(
        f"{prefix}:eta0",
        (1e-5, 1),
        log=True,
        default=1e-2,
    )

    params = [alpha, eta0]

    if parent_param is not None:
        conditions = [
            EqualsCondition(
                child=param,
                parent=parent_param,
                value=parent_value,
            )
            for param in params
        ]
    else:
        conditions = []

    cs.add(params + conditions)

    return cs

get_from_configuration(configuration, pre_prefix='', **kwargs) staticmethod

Create a partial function to initialize LinearRegressorWrapper with parameters from a configuration.

Parameters

configuration : dict A dictionary containing the configuration parameters. additional_params : dict, optional Additional parameters to include in the initialization.

Returns

partial A partial function to initialize LinearRegressorWrapper.

Source code in asf/predictors/linear_model.py
@staticmethod
def get_from_configuration(
    configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
) -> partial:
    """
    Create a partial function to initialize LinearRegressorWrapper with parameters from a configuration.

    Parameters
    ----------
    configuration : dict
        A dictionary containing the configuration parameters.
    additional_params : dict, optional
        Additional parameters to include in the initialization.

    Returns
    -------
    partial
        A partial function to initialize LinearRegressorWrapper.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{LinearRegressorWrapper.PREFIX}"
    else:
        prefix = LinearRegressorWrapper.PREFIX

    linear_regressor_params = {
        "alpha": configuration[f"{prefix}:alpha"],
        "eta0": configuration[f"{prefix}:eta0"],
        **kwargs,
    }

    return partial(LinearRegressorWrapper, init_params=linear_regressor_params)

MLPClassifierWrapper

Bases: SklearnWrapper

A wrapper for the MLPClassifier from scikit-learn, providing additional functionality for configuration space and parameter handling.

Source code in asf/predictors/mlp.py
class MLPClassifierWrapper(SklearnWrapper):
    """
    A wrapper for the MLPClassifier from scikit-learn, providing additional functionality
    for configuration space and parameter handling.
    """

    PREFIX = "mlp_classifier"

    def __init__(self, init_params: Optional[Dict[str, Any]] = None):
        """
        Initialize the MLPClassifierWrapper.

        Parameters
        ----------
        init_params : dict, optional
            Initial parameters for the MLPClassifier.
        """
        super().__init__(MLPClassifier, init_params or {})

    def fit(
        self, X: Any, Y: Any, sample_weight: Optional[Any] = None, **kwargs: Any
    ) -> None:
        """
        Fit the model to the data.

        Parameters
        ----------
        X : array-like
            Training data.
        Y : array-like
            Target values.
        sample_weight : array-like, optional
            Sample weights. Not supported for MLPClassifier.
        kwargs : dict
            Additional arguments for the fit method.
        """
        assert sample_weight is None, (
            "Sample weights are not supported for MLPClassifier"
        )
        self.model_class.fit(X, Y, **kwargs)

    if CONFIGSPACE_AVAILABLE:

        @staticmethod
        def get_configuration_space(
            cs: Optional[ConfigurationSpace] = None,
            pre_prefix: str = "",
            parent_param: Optional[Hyperparameter] = None,
            parent_value: Optional[str] = None,
        ) -> ConfigurationSpace:
            """
            Get the configuration space for the MLP Classifier.

            Parameters
            ----------
            cs : ConfigurationSpace, optional
                The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

            Returns
            -------
            ConfigurationSpace
                The configuration space with the MLP Classifier parameters.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{MLPClassifierWrapper.PREFIX}"
            else:
                prefix = MLPClassifierWrapper.PREFIX

            if cs is None:
                cs = ConfigurationSpace(name="MLP Classifier")

            depth = Integer(f"{prefix}:depth", (1, 3), default=3, log=False)

            width = Integer(f"{prefix}:width", (16, 1024), default=64, log=True)

            batch_size = Integer(
                f"{prefix}:batch_size",
                (256, 1024),
                default=32,
                log=True,
            )  # MODIFIED from HPOBENCH

            alpha = Float(
                f"{prefix}:alpha",
                (10**-8, 1),
                default=10**-3,
                log=True,
            )

            learning_rate_init = Float(
                f"{prefix}:learning_rate_init",
                (10**-5, 1),
                default=10**-3,
                log=True,
            )

            params = [depth, width, batch_size, alpha, learning_rate_init]
            if parent_param is not None:
                conditions = [
                    EqualsCondition(
                        child=param,
                        parent=parent_param,
                        value=parent_value,
                    )
                    for param in params
                ]
            else:
                conditions = []

            cs.add(params + conditions)

            return cs

        @staticmethod
        def get_from_configuration(
            configuration: ConfigurationSpace, pre_prefix: str = "", **kwargs
        ) -> partial:
            """
            Create an MLPClassifierWrapper instance from a configuration.

            Parameters
            ----------
            configuration : ConfigurationSpace
                The configuration containing the parameters.
            additional_params : dict, optional
                Additional parameters to override the default configuration.

            Returns
            -------
            partial
                A partial function to create an MLPClassifierWrapper instance.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{MLPClassifierWrapper.PREFIX}"
            else:
                prefix = MLPClassifierWrapper.PREFIX

            hidden_layers = [configuration[f"{prefix}:width"]] * configuration[
                f"{prefix}:depth"
            ]

            if "activation" not in kwargs:
                kwargs["activation"] = "relu"
            if "solver" not in kwargs:
                kwargs["solver"] = "adam"

            mlp_params = {
                "hidden_layer_sizes": hidden_layers,
                "batch_size": configuration[f"{prefix}:batch_size"],
                "alpha": configuration[f"{prefix}:alpha"],
                "learning_rate_init": configuration[f"{prefix}:learning_rate_init"],
                **kwargs,
            }

            return partial(MLPClassifierWrapper, init_params=mlp_params)

__init__(init_params=None)

Initialize the MLPClassifierWrapper.

Parameters

init_params : dict, optional Initial parameters for the MLPClassifier.

Source code in asf/predictors/mlp.py
def __init__(self, init_params: Optional[Dict[str, Any]] = None):
    """
    Initialize the MLPClassifierWrapper.

    Parameters
    ----------
    init_params : dict, optional
        Initial parameters for the MLPClassifier.
    """
    super().__init__(MLPClassifier, init_params or {})

fit(X, Y, sample_weight=None, **kwargs)

Fit the model to the data.

Parameters

X : array-like Training data. Y : array-like Target values. sample_weight : array-like, optional Sample weights. Not supported for MLPClassifier. kwargs : dict Additional arguments for the fit method.

Source code in asf/predictors/mlp.py
def fit(
    self, X: Any, Y: Any, sample_weight: Optional[Any] = None, **kwargs: Any
) -> None:
    """
    Fit the model to the data.

    Parameters
    ----------
    X : array-like
        Training data.
    Y : array-like
        Target values.
    sample_weight : array-like, optional
        Sample weights. Not supported for MLPClassifier.
    kwargs : dict
        Additional arguments for the fit method.
    """
    assert sample_weight is None, (
        "Sample weights are not supported for MLPClassifier"
    )
    self.model_class.fit(X, Y, **kwargs)

get_configuration_space(cs=None, pre_prefix='', parent_param=None, parent_value=None) staticmethod

Get the configuration space for the MLP Classifier.

Parameters

cs : ConfigurationSpace, optional The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

Returns

ConfigurationSpace The configuration space with the MLP Classifier parameters.

Source code in asf/predictors/mlp.py
@staticmethod
def get_configuration_space(
    cs: Optional[ConfigurationSpace] = None,
    pre_prefix: str = "",
    parent_param: Optional[Hyperparameter] = None,
    parent_value: Optional[str] = None,
) -> ConfigurationSpace:
    """
    Get the configuration space for the MLP Classifier.

    Parameters
    ----------
    cs : ConfigurationSpace, optional
        The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

    Returns
    -------
    ConfigurationSpace
        The configuration space with the MLP Classifier parameters.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{MLPClassifierWrapper.PREFIX}"
    else:
        prefix = MLPClassifierWrapper.PREFIX

    if cs is None:
        cs = ConfigurationSpace(name="MLP Classifier")

    depth = Integer(f"{prefix}:depth", (1, 3), default=3, log=False)

    width = Integer(f"{prefix}:width", (16, 1024), default=64, log=True)

    batch_size = Integer(
        f"{prefix}:batch_size",
        (256, 1024),
        default=32,
        log=True,
    )  # MODIFIED from HPOBENCH

    alpha = Float(
        f"{prefix}:alpha",
        (10**-8, 1),
        default=10**-3,
        log=True,
    )

    learning_rate_init = Float(
        f"{prefix}:learning_rate_init",
        (10**-5, 1),
        default=10**-3,
        log=True,
    )

    params = [depth, width, batch_size, alpha, learning_rate_init]
    if parent_param is not None:
        conditions = [
            EqualsCondition(
                child=param,
                parent=parent_param,
                value=parent_value,
            )
            for param in params
        ]
    else:
        conditions = []

    cs.add(params + conditions)

    return cs

get_from_configuration(configuration, pre_prefix='', **kwargs) staticmethod

Create an MLPClassifierWrapper instance from a configuration.

Parameters

configuration : ConfigurationSpace The configuration containing the parameters. additional_params : dict, optional Additional parameters to override the default configuration.

Returns

partial A partial function to create an MLPClassifierWrapper instance.

Source code in asf/predictors/mlp.py
@staticmethod
def get_from_configuration(
    configuration: ConfigurationSpace, pre_prefix: str = "", **kwargs
) -> partial:
    """
    Create an MLPClassifierWrapper instance from a configuration.

    Parameters
    ----------
    configuration : ConfigurationSpace
        The configuration containing the parameters.
    additional_params : dict, optional
        Additional parameters to override the default configuration.

    Returns
    -------
    partial
        A partial function to create an MLPClassifierWrapper instance.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{MLPClassifierWrapper.PREFIX}"
    else:
        prefix = MLPClassifierWrapper.PREFIX

    hidden_layers = [configuration[f"{prefix}:width"]] * configuration[
        f"{prefix}:depth"
    ]

    if "activation" not in kwargs:
        kwargs["activation"] = "relu"
    if "solver" not in kwargs:
        kwargs["solver"] = "adam"

    mlp_params = {
        "hidden_layer_sizes": hidden_layers,
        "batch_size": configuration[f"{prefix}:batch_size"],
        "alpha": configuration[f"{prefix}:alpha"],
        "learning_rate_init": configuration[f"{prefix}:learning_rate_init"],
        **kwargs,
    }

    return partial(MLPClassifierWrapper, init_params=mlp_params)

MLPRegressorWrapper

Bases: SklearnWrapper

A wrapper for the MLPRegressor from scikit-learn, providing additional functionality for configuration space and parameter handling.

Source code in asf/predictors/mlp.py
class MLPRegressorWrapper(SklearnWrapper):
    """
    A wrapper for the MLPRegressor from scikit-learn, providing additional functionality
    for configuration space and parameter handling.
    """

    PREFIX = "mlp_regressor"

    def __init__(self, init_params: Optional[Dict[str, Any]] = None):
        """
        Initialize the MLPRegressorWrapper.

        Parameters
        ----------
        init_params : dict, optional
            Initial parameters for the MLPRegressor.
        """
        super().__init__(MLPRegressor, init_params or {})

    def fit(
        self, X: Any, Y: Any, sample_weight: Optional[Any] = None, **kwargs: Any
    ) -> None:
        """
        Fit the model to the data.

        Parameters
        ----------
        X : array-like
            Training data.
        Y : array-like
            Target values.
        sample_weight : array-like, optional
            Sample weights. Not supported for MLPRegressor.
        kwargs : dict
            Additional arguments for the fit method.
        """
        assert sample_weight is None, (
            "Sample weights are not supported for MLPRegressor"
        )
        self.model_class.fit(X, Y, **kwargs)

    if CONFIGSPACE_AVAILABLE:

        @staticmethod
        def get_configuration_space(
            cs: Optional[ConfigurationSpace] = None,
            pre_prefix: str = "",
            parent_param: Optional[Hyperparameter] = None,
            parent_value: Optional[str] = None,
        ) -> ConfigurationSpace:
            """
            Get the configuration space for the MLP Regressor.

            Parameters
            ----------
            cs : ConfigurationSpace, optional
                The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

            Returns
            -------
            ConfigurationSpace
                The configuration space with the MLP Regressor parameters.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{MLPRegressorWrapper.PREFIX}"
            else:
                prefix = MLPRegressorWrapper.PREFIX

            if cs is None:
                cs = ConfigurationSpace(name="MLP Regressor")

            depth = Integer(f"{prefix}:depth", (1, 3), default=3, log=False)

            width = Integer(f"{prefix}:width", (16, 1024), default=64, log=True)

            batch_size = Integer(
                f"{prefix}:batch_size",
                (256, 1024),
                default=256,
                log=True,
            )

            alpha = Float(
                f"{prefix}:alpha",
                (10**-8, 1),
                default=10**-3,
                log=True,
            )

            learning_rate_init = Float(
                f"{prefix}:learning_rate_init",
                (10**-5, 1),
                default=10**-3,
                log=True,
            )

            params = [depth, width, batch_size, alpha, learning_rate_init]
            if parent_param is not None:
                conditions = [
                    EqualsCondition(
                        child=param,
                        parent=parent_param,
                        value=parent_value,
                    )
                    for param in params
                ]
            else:
                conditions = []

            cs.add(params + conditions)

            return cs

        @staticmethod
        def get_from_configuration(
            configuration: ConfigurationSpace, pre_prefix: str = "", **kwargs
        ) -> partial:
            """
            Create an MLPRegressorWrapper instance from a configuration.

            Parameters
            ----------
            configuration : ConfigurationSpace
                The configuration containing the parameters.
            additional_params : dict, optional
                Additional parameters to override the default configuration.

            Returns
            -------
            partial
                A partial function to create an MLPRegressorWrapper instance.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{MLPRegressorWrapper.PREFIX}"
            else:
                prefix = MLPRegressorWrapper.PREFIX

            hidden_layers = [configuration[f"{prefix}:width"]] * configuration[
                f"{prefix}:depth"
            ]

            if "activation" not in kwargs:
                kwargs["activation"] = "relu"
            if "solver" not in kwargs:
                kwargs["solver"] = "adam"

            mlp_params = {
                "hidden_layer_sizes": hidden_layers,
                "batch_size": configuration[f"{prefix}:batch_size"],
                "alpha": configuration[f"{prefix}:alpha"],
                "learning_rate_init": configuration[f"{prefix}:learning_rate_init"],
                **kwargs,
            }

            return partial(MLPRegressorWrapper, init_params=mlp_params)

__init__(init_params=None)

Initialize the MLPRegressorWrapper.

Parameters

init_params : dict, optional Initial parameters for the MLPRegressor.

Source code in asf/predictors/mlp.py
def __init__(self, init_params: Optional[Dict[str, Any]] = None):
    """
    Initialize the MLPRegressorWrapper.

    Parameters
    ----------
    init_params : dict, optional
        Initial parameters for the MLPRegressor.
    """
    super().__init__(MLPRegressor, init_params or {})

fit(X, Y, sample_weight=None, **kwargs)

Fit the model to the data.

Parameters

X : array-like Training data. Y : array-like Target values. sample_weight : array-like, optional Sample weights. Not supported for MLPRegressor. kwargs : dict Additional arguments for the fit method.

Source code in asf/predictors/mlp.py
def fit(
    self, X: Any, Y: Any, sample_weight: Optional[Any] = None, **kwargs: Any
) -> None:
    """
    Fit the model to the data.

    Parameters
    ----------
    X : array-like
        Training data.
    Y : array-like
        Target values.
    sample_weight : array-like, optional
        Sample weights. Not supported for MLPRegressor.
    kwargs : dict
        Additional arguments for the fit method.
    """
    assert sample_weight is None, (
        "Sample weights are not supported for MLPRegressor"
    )
    self.model_class.fit(X, Y, **kwargs)

get_configuration_space(cs=None, pre_prefix='', parent_param=None, parent_value=None) staticmethod

Get the configuration space for the MLP Regressor.

Parameters

cs : ConfigurationSpace, optional The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

Returns

ConfigurationSpace The configuration space with the MLP Regressor parameters.

Source code in asf/predictors/mlp.py
@staticmethod
def get_configuration_space(
    cs: Optional[ConfigurationSpace] = None,
    pre_prefix: str = "",
    parent_param: Optional[Hyperparameter] = None,
    parent_value: Optional[str] = None,
) -> ConfigurationSpace:
    """
    Get the configuration space for the MLP Regressor.

    Parameters
    ----------
    cs : ConfigurationSpace, optional
        The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

    Returns
    -------
    ConfigurationSpace
        The configuration space with the MLP Regressor parameters.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{MLPRegressorWrapper.PREFIX}"
    else:
        prefix = MLPRegressorWrapper.PREFIX

    if cs is None:
        cs = ConfigurationSpace(name="MLP Regressor")

    depth = Integer(f"{prefix}:depth", (1, 3), default=3, log=False)

    width = Integer(f"{prefix}:width", (16, 1024), default=64, log=True)

    batch_size = Integer(
        f"{prefix}:batch_size",
        (256, 1024),
        default=256,
        log=True,
    )

    alpha = Float(
        f"{prefix}:alpha",
        (10**-8, 1),
        default=10**-3,
        log=True,
    )

    learning_rate_init = Float(
        f"{prefix}:learning_rate_init",
        (10**-5, 1),
        default=10**-3,
        log=True,
    )

    params = [depth, width, batch_size, alpha, learning_rate_init]
    if parent_param is not None:
        conditions = [
            EqualsCondition(
                child=param,
                parent=parent_param,
                value=parent_value,
            )
            for param in params
        ]
    else:
        conditions = []

    cs.add(params + conditions)

    return cs

get_from_configuration(configuration, pre_prefix='', **kwargs) staticmethod

Create an MLPRegressorWrapper instance from a configuration.

Parameters

configuration : ConfigurationSpace The configuration containing the parameters. additional_params : dict, optional Additional parameters to override the default configuration.

Returns

partial A partial function to create an MLPRegressorWrapper instance.

Source code in asf/predictors/mlp.py
@staticmethod
def get_from_configuration(
    configuration: ConfigurationSpace, pre_prefix: str = "", **kwargs
) -> partial:
    """
    Create an MLPRegressorWrapper instance from a configuration.

    Parameters
    ----------
    configuration : ConfigurationSpace
        The configuration containing the parameters.
    additional_params : dict, optional
        Additional parameters to override the default configuration.

    Returns
    -------
    partial
        A partial function to create an MLPRegressorWrapper instance.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{MLPRegressorWrapper.PREFIX}"
    else:
        prefix = MLPRegressorWrapper.PREFIX

    hidden_layers = [configuration[f"{prefix}:width"]] * configuration[
        f"{prefix}:depth"
    ]

    if "activation" not in kwargs:
        kwargs["activation"] = "relu"
    if "solver" not in kwargs:
        kwargs["solver"] = "adam"

    mlp_params = {
        "hidden_layer_sizes": hidden_layers,
        "batch_size": configuration[f"{prefix}:batch_size"],
        "alpha": configuration[f"{prefix}:alpha"],
        "learning_rate_init": configuration[f"{prefix}:learning_rate_init"],
        **kwargs,
    }

    return partial(MLPRegressorWrapper, init_params=mlp_params)

RandomForestClassifierWrapper

Bases: SklearnWrapper

A wrapper for the RandomForestClassifier from scikit-learn, providing additional functionality for configuration space management.

Source code in asf/predictors/random_forest.py
class RandomForestClassifierWrapper(SklearnWrapper):
    """
    A wrapper for the RandomForestClassifier from scikit-learn, providing
    additional functionality for configuration space management.
    """

    PREFIX = "rf_classifier"

    def __init__(self, init_params: Dict[str, Any] = {}):
        """
        Initialize the RandomForestClassifierWrapper.

        Parameters
        ----------
        init_params : dict, optional
            A dictionary of initialization parameters for the RandomForestClassifier.
        """
        super().__init__(RandomForestClassifier, init_params)

    if CONFIGSPACE_AVAILABLE:

        @staticmethod
        def get_configuration_space(
            cs: Optional[ConfigurationSpace] = None,
            pre_prefix: str = "",
            parent_param: Optional[Hyperparameter] = None,
            parent_value: Optional[str] = None,
        ) -> ConfigurationSpace:
            """
            Get the configuration space for the Random Forest Classifier.

            Parameters
            ----------
            cs : ConfigurationSpace, optional
                The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

            Returns
            -------
            ConfigurationSpace
                The configuration space with the Random Forest Classifier parameters.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{RandomForestClassifierWrapper.PREFIX}:"
            else:
                prefix = RandomForestClassifierWrapper.PREFIX

            if cs is None:
                cs = ConfigurationSpace(name="RandomForest")

            n_estimators = Integer(
                f"{prefix}n_estimators",
                (16, 128),
                log=True,
                default=116,
            )
            min_samples_split = Integer(
                f"{prefix}min_samples_split",
                (2, 20),
                log=False,
                default=2,
            )
            min_samples_leaf = Integer(
                f"{prefix}min_samples_leaf",
                (1, 20),
                log=False,
                default=2,
            )
            max_features = Float(
                f"{prefix}max_features",
                (0.1, 1.0),
                log=False,
                default=0.17055852159745608,
            )
            bootstrap = Categorical(
                f"{prefix}bootstrap",
                items=[True, False],
                default=False,
            )

            params = [
                n_estimators,
                min_samples_split,
                min_samples_leaf,
                max_features,
                bootstrap,
            ]
            if parent_param is not None:
                conditions = [
                    EqualsCondition(
                        child=param,
                        parent=parent_param,
                        value=parent_value,
                    )
                    for param in params
                ]
            else:
                conditions = []

            cs.add(params + conditions)

            return cs

        @staticmethod
        def get_from_configuration(
            configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
        ) -> partial:
            """
            Create a RandomForestClassifierWrapper instance from a configuration.

            Parameters
            ----------
            configuration : dict
                A dictionary containing the configuration parameters.
            additional_params : dict, optional
                Additional parameters to override or extend the configuration.

            Returns
            -------
            partial
                A partial function to create a RandomForestClassifierWrapper instance.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{RandomForestClassifierWrapper.PREFIX}:"
            else:
                prefix = RandomForestClassifierWrapper.PREFIX

            rf_params = {
                "n_estimators": configuration[f"{prefix}n_estimators"],
                "min_samples_split": configuration[f"{prefix}min_samples_split"],
                "min_samples_leaf": configuration[f"{prefix}min_samples_leaf"],
                "max_features": configuration[f"{prefix}max_features"],
                "bootstrap": configuration[f"{prefix}bootstrap"],
                **kwargs,
            }

            return partial(RandomForestClassifierWrapper, init_params=rf_params)

__init__(init_params={})

Initialize the RandomForestClassifierWrapper.

Parameters

init_params : dict, optional A dictionary of initialization parameters for the RandomForestClassifier.

Source code in asf/predictors/random_forest.py
def __init__(self, init_params: Dict[str, Any] = {}):
    """
    Initialize the RandomForestClassifierWrapper.

    Parameters
    ----------
    init_params : dict, optional
        A dictionary of initialization parameters for the RandomForestClassifier.
    """
    super().__init__(RandomForestClassifier, init_params)

get_configuration_space(cs=None, pre_prefix='', parent_param=None, parent_value=None) staticmethod

Get the configuration space for the Random Forest Classifier.

Parameters

cs : ConfigurationSpace, optional The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

Returns

ConfigurationSpace The configuration space with the Random Forest Classifier parameters.

Source code in asf/predictors/random_forest.py
@staticmethod
def get_configuration_space(
    cs: Optional[ConfigurationSpace] = None,
    pre_prefix: str = "",
    parent_param: Optional[Hyperparameter] = None,
    parent_value: Optional[str] = None,
) -> ConfigurationSpace:
    """
    Get the configuration space for the Random Forest Classifier.

    Parameters
    ----------
    cs : ConfigurationSpace, optional
        The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

    Returns
    -------
    ConfigurationSpace
        The configuration space with the Random Forest Classifier parameters.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{RandomForestClassifierWrapper.PREFIX}:"
    else:
        prefix = RandomForestClassifierWrapper.PREFIX

    if cs is None:
        cs = ConfigurationSpace(name="RandomForest")

    n_estimators = Integer(
        f"{prefix}n_estimators",
        (16, 128),
        log=True,
        default=116,
    )
    min_samples_split = Integer(
        f"{prefix}min_samples_split",
        (2, 20),
        log=False,
        default=2,
    )
    min_samples_leaf = Integer(
        f"{prefix}min_samples_leaf",
        (1, 20),
        log=False,
        default=2,
    )
    max_features = Float(
        f"{prefix}max_features",
        (0.1, 1.0),
        log=False,
        default=0.17055852159745608,
    )
    bootstrap = Categorical(
        f"{prefix}bootstrap",
        items=[True, False],
        default=False,
    )

    params = [
        n_estimators,
        min_samples_split,
        min_samples_leaf,
        max_features,
        bootstrap,
    ]
    if parent_param is not None:
        conditions = [
            EqualsCondition(
                child=param,
                parent=parent_param,
                value=parent_value,
            )
            for param in params
        ]
    else:
        conditions = []

    cs.add(params + conditions)

    return cs

get_from_configuration(configuration, pre_prefix='', **kwargs) staticmethod

Create a RandomForestClassifierWrapper instance from a configuration.

Parameters

configuration : dict A dictionary containing the configuration parameters. additional_params : dict, optional Additional parameters to override or extend the configuration.

Returns

partial A partial function to create a RandomForestClassifierWrapper instance.

Source code in asf/predictors/random_forest.py
@staticmethod
def get_from_configuration(
    configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
) -> partial:
    """
    Create a RandomForestClassifierWrapper instance from a configuration.

    Parameters
    ----------
    configuration : dict
        A dictionary containing the configuration parameters.
    additional_params : dict, optional
        Additional parameters to override or extend the configuration.

    Returns
    -------
    partial
        A partial function to create a RandomForestClassifierWrapper instance.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{RandomForestClassifierWrapper.PREFIX}:"
    else:
        prefix = RandomForestClassifierWrapper.PREFIX

    rf_params = {
        "n_estimators": configuration[f"{prefix}n_estimators"],
        "min_samples_split": configuration[f"{prefix}min_samples_split"],
        "min_samples_leaf": configuration[f"{prefix}min_samples_leaf"],
        "max_features": configuration[f"{prefix}max_features"],
        "bootstrap": configuration[f"{prefix}bootstrap"],
        **kwargs,
    }

    return partial(RandomForestClassifierWrapper, init_params=rf_params)

RandomForestRegressorWrapper

Bases: SklearnWrapper

A wrapper for the RandomForestRegressor from scikit-learn, providing additional functionality for configuration space management.

Source code in asf/predictors/random_forest.py
class RandomForestRegressorWrapper(SklearnWrapper):
    """
    A wrapper for the RandomForestRegressor from scikit-learn, providing
    additional functionality for configuration space management.
    """

    PREFIX = "rf_regressor"

    def __init__(self, init_params: Dict[str, Any] = {}):
        """
        Initialize the RandomForestRegressorWrapper.

        Parameters
        ----------
        init_params : dict, optional
            A dictionary of initialization parameters for the RandomForestRegressor.
        """
        super().__init__(RandomForestRegressor, init_params)

    if CONFIGSPACE_AVAILABLE:

        @staticmethod
        def get_configuration_space(
            cs: Optional[ConfigurationSpace] = None,
            pre_prefix: str = "",
            parent_param: Optional[Hyperparameter] = None,
            parent_value: Optional[str] = None,
        ) -> ConfigurationSpace:
            """
            Get the configuration space for the Random Forest Regressor.

            Parameters
            ----------
            cs : ConfigurationSpace, optional
                The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

            Returns
            -------
            ConfigurationSpace
                The configuration space with the Random Forest Regressor parameters.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{RandomForestRegressorWrapper.PREFIX}:"
            else:
                prefix = RandomForestRegressorWrapper.PREFIX

            if cs is None:
                cs = ConfigurationSpace(name="RandomForestRegressor")

            n_estimators = Integer(
                f"{prefix}n_estimators",
                (16, 128),
                log=True,
                default=116,
            )
            min_samples_split = Integer(
                f"{prefix}min_samples_split",
                (2, 20),
                log=False,
                default=2,
            )
            min_samples_leaf = Integer(
                f"{prefix}min_samples_leaf",
                (1, 20),
                log=False,
                default=2,
            )
            max_features = Float(
                f"{prefix}max_features",
                (0.1, 1.0),
                log=False,
                default=0.17055852159745608,
            )
            bootstrap = Categorical(
                f"{prefix}bootstrap",
                items=[True, False],
                default=False,
            )
            params = [
                n_estimators,
                min_samples_split,
                min_samples_leaf,
                max_features,
                bootstrap,
            ]
            if parent_param is not None:
                conditions = [
                    EqualsCondition(
                        child=param,
                        parent=parent_param,
                        value=parent_value,
                    )
                    for param in params
                ]
            else:
                conditions = []

            cs.add(params + conditions)

            return cs

        @staticmethod
        def get_from_configuration(
            configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
        ) -> partial:
            """
            Create a RandomForestRegressorWrapper instance from a configuration.

            Parameters
            ----------
            configuration : dict
                A dictionary containing the configuration parameters.
            additional_params : dict, optional
                Additional parameters to override or extend the configuration.

            Returns
            -------
            partial
                A partial function to create a RandomForestRegressorWrapper instance.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{RandomForestRegressorWrapper.PREFIX}:"
            else:
                prefix = RandomForestRegressorWrapper.PREFIX

            rf_params = {
                "n_estimators": configuration[f"{prefix}n_estimators"],
                "min_samples_split": configuration[f"{prefix}min_samples_split"],
                "min_samples_leaf": configuration[f"{prefix}min_samples_leaf"],
                "max_features": configuration[f"{prefix}max_features"],
                "bootstrap": configuration[f"{prefix}bootstrap"],
                **kwargs,
            }

            return partial(RandomForestRegressorWrapper, init_params=rf_params)

__init__(init_params={})

Initialize the RandomForestRegressorWrapper.

Parameters

init_params : dict, optional A dictionary of initialization parameters for the RandomForestRegressor.

Source code in asf/predictors/random_forest.py
def __init__(self, init_params: Dict[str, Any] = {}):
    """
    Initialize the RandomForestRegressorWrapper.

    Parameters
    ----------
    init_params : dict, optional
        A dictionary of initialization parameters for the RandomForestRegressor.
    """
    super().__init__(RandomForestRegressor, init_params)

get_configuration_space(cs=None, pre_prefix='', parent_param=None, parent_value=None) staticmethod

Get the configuration space for the Random Forest Regressor.

Parameters

cs : ConfigurationSpace, optional The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

Returns

ConfigurationSpace The configuration space with the Random Forest Regressor parameters.

Source code in asf/predictors/random_forest.py
@staticmethod
def get_configuration_space(
    cs: Optional[ConfigurationSpace] = None,
    pre_prefix: str = "",
    parent_param: Optional[Hyperparameter] = None,
    parent_value: Optional[str] = None,
) -> ConfigurationSpace:
    """
    Get the configuration space for the Random Forest Regressor.

    Parameters
    ----------
    cs : ConfigurationSpace, optional
        The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

    Returns
    -------
    ConfigurationSpace
        The configuration space with the Random Forest Regressor parameters.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{RandomForestRegressorWrapper.PREFIX}:"
    else:
        prefix = RandomForestRegressorWrapper.PREFIX

    if cs is None:
        cs = ConfigurationSpace(name="RandomForestRegressor")

    n_estimators = Integer(
        f"{prefix}n_estimators",
        (16, 128),
        log=True,
        default=116,
    )
    min_samples_split = Integer(
        f"{prefix}min_samples_split",
        (2, 20),
        log=False,
        default=2,
    )
    min_samples_leaf = Integer(
        f"{prefix}min_samples_leaf",
        (1, 20),
        log=False,
        default=2,
    )
    max_features = Float(
        f"{prefix}max_features",
        (0.1, 1.0),
        log=False,
        default=0.17055852159745608,
    )
    bootstrap = Categorical(
        f"{prefix}bootstrap",
        items=[True, False],
        default=False,
    )
    params = [
        n_estimators,
        min_samples_split,
        min_samples_leaf,
        max_features,
        bootstrap,
    ]
    if parent_param is not None:
        conditions = [
            EqualsCondition(
                child=param,
                parent=parent_param,
                value=parent_value,
            )
            for param in params
        ]
    else:
        conditions = []

    cs.add(params + conditions)

    return cs

get_from_configuration(configuration, pre_prefix='', **kwargs) staticmethod

Create a RandomForestRegressorWrapper instance from a configuration.

Parameters

configuration : dict A dictionary containing the configuration parameters. additional_params : dict, optional Additional parameters to override or extend the configuration.

Returns

partial A partial function to create a RandomForestRegressorWrapper instance.

Source code in asf/predictors/random_forest.py
@staticmethod
def get_from_configuration(
    configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
) -> partial:
    """
    Create a RandomForestRegressorWrapper instance from a configuration.

    Parameters
    ----------
    configuration : dict
        A dictionary containing the configuration parameters.
    additional_params : dict, optional
        Additional parameters to override or extend the configuration.

    Returns
    -------
    partial
        A partial function to create a RandomForestRegressorWrapper instance.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{RandomForestRegressorWrapper.PREFIX}:"
    else:
        prefix = RandomForestRegressorWrapper.PREFIX

    rf_params = {
        "n_estimators": configuration[f"{prefix}n_estimators"],
        "min_samples_split": configuration[f"{prefix}min_samples_split"],
        "min_samples_leaf": configuration[f"{prefix}min_samples_leaf"],
        "max_features": configuration[f"{prefix}max_features"],
        "bootstrap": configuration[f"{prefix}bootstrap"],
        **kwargs,
    }

    return partial(RandomForestRegressorWrapper, init_params=rf_params)

RankingMLP

Bases: AbstractPredictor

A ranking-based predictor using a Multi-Layer Perceptron (MLP).

This class implements a ranking model that uses an MLP to predict the performance of algorithms based on input features.

Source code in asf/predictors/ranking_mlp.py
class RankingMLP(AbstractPredictor):
    """
    A ranking-based predictor using a Multi-Layer Perceptron (MLP).

    This class implements a ranking model that uses an MLP to predict
    the performance of algorithms based on input features.
    """

    def __init__(
        self,
        model: Union[torch.nn.Module, None] = None,
        input_size: Union[int, None] = None,
        loss: Callable = bpr_loss,
        optimizer: Callable[..., Optimizer] = torch.optim.Adam,
        batch_size: int = 128,
        epochs: int = 500,
        seed: int = 42,
        device: str = "cpu",
        compile: bool = True,
        **kwargs,
    ):
        """
        Initializes the RankingMLP with the given parameters.

        Args:
            model (torch.nn.Module | None): The pre-defined PyTorch model to use. If None, a new MLP is created.
            input_size (int | None): The input size for the MLP. Required if `model` is None.
            loss (Callable): The loss function to use. Defaults to `bpr_loss`.
            optimizer (Callable[..., torch.optim.Optimizer]): The optimizer class to use. Defaults to `torch.optim.Adam`.
            batch_size (int): The batch size for training. Defaults to 128.
            epochs (int): The number of training epochs. Defaults to 500.
            seed (int): The random seed for reproducibility. Defaults to 42.
            device (str): The device to use for training (e.g., "cpu" or "cuda"). Defaults to "cpu".
            compile (bool): Whether to compile the model using `torch.compile`. Defaults to True.
            **kwargs: Additional arguments for the parent class.
        """
        super().__init__(**kwargs)
        assert TORCH_AVAILABLE, "PyTorch is not available. Please install it."

        assert model is not None or input_size is not None, (
            "Either model or input_size must be provided."
        )

        torch.manual_seed(seed)

        if model is None:
            self.model = get_mlp(input_size=input_size, output_size=1)
        else:
            self.model = model

        self.model.to(device)
        self.device = device

        self.loss = loss
        self.batch_size = batch_size
        self.optimizer = optimizer
        self.epochs = epochs

        if compile:
            self.model = torch.compile(self.model)

    def _get_dataloader(
        self,
        features: pd.DataFrame,
        performance: pd.DataFrame,
        algorithm_features: pd.DataFrame,
    ) -> DataLoader:
        """
        Creates a DataLoader for the given features and performance data.

        Args:
            features (pd.DataFrame): DataFrame containing the feature data.
            performance (pd.DataFrame): DataFrame containing the performance data.
            algorithm_features (pd.DataFrame): DataFrame containing algorithm-specific features.

        Returns:
            torch.utils.data.DataLoader: A DataLoader for the dataset.
        """
        dataset = RankingDataset(features, performance, algorithm_features)
        return torch.utils.data.DataLoader(
            dataset, batch_size=self.batch_size, shuffle=True, num_workers=4
        )

    def fit(
        self,
        features: pd.DataFrame,
        performance: pd.DataFrame,
        algorithm_features: pd.DataFrame,
    ) -> "RankingMLP":
        """
        Fits the model to the given feature and performance data.

        Args:
            features (pd.DataFrame): DataFrame containing the feature data.
            performance (pd.DataFrame): DataFrame containing the performance data.
            algorithm_features (pd.DataFrame): DataFrame containing algorithm-specific features.

        Returns:
            RankingMLP: The fitted model.
        """
        dataloader = self._get_dataloader(features, performance, algorithm_features)

        optimizer = self.optimizer(self.model.parameters())
        self.model.train()
        for epoch in range(self.epochs):
            total_loss = 0
            for i, ((Xc, Xs, Xl), (yc, ys, yl)) in enumerate(dataloader):
                Xc, Xs, Xl = Xc.to(self.device), Xs.to(self.device), Xl.to(self.device)
                yc, ys, yl = yc.to(self.device), ys.to(self.device), yl.to(self.device)

                yc = yc.float().unsqueeze(1)
                ys = ys.float().unsqueeze(1)
                yl = yl.float().unsqueeze(1)

                optimizer.zero_grad()

                y_pred = self.model(Xc)
                y_pred_s = self.model(Xs)
                y_pred_l = self.model(Xl)

                loss = self.loss(y_pred, y_pred_s, y_pred_l, yc, ys, yl)
                total_loss += loss.item()

                loss.backward()
                optimizer.step()

            logging.debug(f"Epoch {epoch}, Loss: {total_loss / len(dataloader)}")

        return self

    def predict(self, features: pd.DataFrame) -> pd.DataFrame:
        """
        Predicts the performance of algorithms for the given features.

        Args:
            features (pd.DataFrame): DataFrame containing the feature data.

        Returns:
            pd.DataFrame: DataFrame containing the predicted performance data.
        """
        self.model.eval()

        features = torch.from_numpy(features.values).to(self.device).float()
        predictions = self.model(features).detach().numpy()

        return predictions

    def save(self, file_path: str) -> None:
        """
        Saves the model to the specified file path.

        Args:
            file_path (str): The path to save the model.
        """
        torch.save(self.model, file_path)

    def load(self, file_path: str) -> None:
        """
        Loads the model from the specified file path.

        Args:
            file_path (str): The path to load the model from.
        """
        self.model = torch.load(file_path)

__init__(model=None, input_size=None, loss=bpr_loss, optimizer=torch.optim.Adam, batch_size=128, epochs=500, seed=42, device='cpu', compile=True, **kwargs)

Initializes the RankingMLP with the given parameters.

Parameters:

Name Type Description Default
model Module | None

The pre-defined PyTorch model to use. If None, a new MLP is created.

None
input_size int | None

The input size for the MLP. Required if model is None.

None
loss Callable

The loss function to use. Defaults to bpr_loss.

bpr_loss
optimizer Callable[..., Optimizer]

The optimizer class to use. Defaults to torch.optim.Adam.

Adam
batch_size int

The batch size for training. Defaults to 128.

128
epochs int

The number of training epochs. Defaults to 500.

500
seed int

The random seed for reproducibility. Defaults to 42.

42
device str

The device to use for training (e.g., "cpu" or "cuda"). Defaults to "cpu".

'cpu'
compile bool

Whether to compile the model using torch.compile. Defaults to True.

True
**kwargs

Additional arguments for the parent class.

{}
Source code in asf/predictors/ranking_mlp.py
def __init__(
    self,
    model: Union[torch.nn.Module, None] = None,
    input_size: Union[int, None] = None,
    loss: Callable = bpr_loss,
    optimizer: Callable[..., Optimizer] = torch.optim.Adam,
    batch_size: int = 128,
    epochs: int = 500,
    seed: int = 42,
    device: str = "cpu",
    compile: bool = True,
    **kwargs,
):
    """
    Initializes the RankingMLP with the given parameters.

    Args:
        model (torch.nn.Module | None): The pre-defined PyTorch model to use. If None, a new MLP is created.
        input_size (int | None): The input size for the MLP. Required if `model` is None.
        loss (Callable): The loss function to use. Defaults to `bpr_loss`.
        optimizer (Callable[..., torch.optim.Optimizer]): The optimizer class to use. Defaults to `torch.optim.Adam`.
        batch_size (int): The batch size for training. Defaults to 128.
        epochs (int): The number of training epochs. Defaults to 500.
        seed (int): The random seed for reproducibility. Defaults to 42.
        device (str): The device to use for training (e.g., "cpu" or "cuda"). Defaults to "cpu".
        compile (bool): Whether to compile the model using `torch.compile`. Defaults to True.
        **kwargs: Additional arguments for the parent class.
    """
    super().__init__(**kwargs)
    assert TORCH_AVAILABLE, "PyTorch is not available. Please install it."

    assert model is not None or input_size is not None, (
        "Either model or input_size must be provided."
    )

    torch.manual_seed(seed)

    if model is None:
        self.model = get_mlp(input_size=input_size, output_size=1)
    else:
        self.model = model

    self.model.to(device)
    self.device = device

    self.loss = loss
    self.batch_size = batch_size
    self.optimizer = optimizer
    self.epochs = epochs

    if compile:
        self.model = torch.compile(self.model)

fit(features, performance, algorithm_features)

Fits the model to the given feature and performance data.

Parameters:

Name Type Description Default
features DataFrame

DataFrame containing the feature data.

required
performance DataFrame

DataFrame containing the performance data.

required
algorithm_features DataFrame

DataFrame containing algorithm-specific features.

required

Returns:

Name Type Description
RankingMLP RankingMLP

The fitted model.

Source code in asf/predictors/ranking_mlp.py
def fit(
    self,
    features: pd.DataFrame,
    performance: pd.DataFrame,
    algorithm_features: pd.DataFrame,
) -> "RankingMLP":
    """
    Fits the model to the given feature and performance data.

    Args:
        features (pd.DataFrame): DataFrame containing the feature data.
        performance (pd.DataFrame): DataFrame containing the performance data.
        algorithm_features (pd.DataFrame): DataFrame containing algorithm-specific features.

    Returns:
        RankingMLP: The fitted model.
    """
    dataloader = self._get_dataloader(features, performance, algorithm_features)

    optimizer = self.optimizer(self.model.parameters())
    self.model.train()
    for epoch in range(self.epochs):
        total_loss = 0
        for i, ((Xc, Xs, Xl), (yc, ys, yl)) in enumerate(dataloader):
            Xc, Xs, Xl = Xc.to(self.device), Xs.to(self.device), Xl.to(self.device)
            yc, ys, yl = yc.to(self.device), ys.to(self.device), yl.to(self.device)

            yc = yc.float().unsqueeze(1)
            ys = ys.float().unsqueeze(1)
            yl = yl.float().unsqueeze(1)

            optimizer.zero_grad()

            y_pred = self.model(Xc)
            y_pred_s = self.model(Xs)
            y_pred_l = self.model(Xl)

            loss = self.loss(y_pred, y_pred_s, y_pred_l, yc, ys, yl)
            total_loss += loss.item()

            loss.backward()
            optimizer.step()

        logging.debug(f"Epoch {epoch}, Loss: {total_loss / len(dataloader)}")

    return self

load(file_path)

Loads the model from the specified file path.

Parameters:

Name Type Description Default
file_path str

The path to load the model from.

required
Source code in asf/predictors/ranking_mlp.py
def load(self, file_path: str) -> None:
    """
    Loads the model from the specified file path.

    Args:
        file_path (str): The path to load the model from.
    """
    self.model = torch.load(file_path)

predict(features)

Predicts the performance of algorithms for the given features.

Parameters:

Name Type Description Default
features DataFrame

DataFrame containing the feature data.

required

Returns:

Type Description
DataFrame

pd.DataFrame: DataFrame containing the predicted performance data.

Source code in asf/predictors/ranking_mlp.py
def predict(self, features: pd.DataFrame) -> pd.DataFrame:
    """
    Predicts the performance of algorithms for the given features.

    Args:
        features (pd.DataFrame): DataFrame containing the feature data.

    Returns:
        pd.DataFrame: DataFrame containing the predicted performance data.
    """
    self.model.eval()

    features = torch.from_numpy(features.values).to(self.device).float()
    predictions = self.model(features).detach().numpy()

    return predictions

save(file_path)

Saves the model to the specified file path.

Parameters:

Name Type Description Default
file_path str

The path to save the model.

required
Source code in asf/predictors/ranking_mlp.py
def save(self, file_path: str) -> None:
    """
    Saves the model to the specified file path.

    Args:
        file_path (str): The path to save the model.
    """
    torch.save(self.model, file_path)

RegressionMLP

Bases: AbstractPredictor

Source code in asf/predictors/regression_mlp.py
class RegressionMLP(AbstractPredictor):
    def __init__(
        self,
        model: torch.nn.Module | None = None,
        loss: torch.nn.modules.loss._Loss | None = torch.nn.MSELoss(),
        optimizer: type[torch.optim.Optimizer] | None = torch.optim.Adam,
        batch_size: int = 128,
        epochs: int = 2000,
        seed: int = 42,
        device: str = "cpu",
        compile: bool = True,
        **kwargs,
    ):
        """
        Initializes the RegressionMLP with the given parameters.

        Args:
            model (torch.nn.Module | None): The PyTorch model to be used. If None, a new MLP model will be created.
            input_size (int | None): The size of the input features. Required if `model` is None.
            loss (torch.nn.modules.loss._Loss | None): The loss function to be used. Defaults to Mean Squared Error Loss.
            optimizer (type[torch.optim.Optimizer] | None): The optimizer class to be used. Defaults to Adam.
            batch_size (int): The batch size for training. Defaults to 128.
            epochs (int): The number of epochs for training. Defaults to 2000.
            seed (int): The random seed for reproducibility. Defaults to 42.
            device (str): The device to run the model on ('cpu' or 'cuda'). Defaults to 'cpu'.
            compile (bool): Whether to compile the model using `torch.compile`. Defaults to True.
            **kwargs: Additional keyword arguments passed to the parent class.
        """
        super().__init__(**kwargs)

        assert TORCH_AVAILABLE, "PyTorch is not available. Please install it."

        torch.manual_seed(seed)

        self.model = model
        self.device = device

        self.loss = loss
        self.batch_size = batch_size
        self.optimizer = optimizer
        self.epochs = epochs
        self.compile = compile

    def _get_dataloader(
        self, features: pd.DataFrame, performance: pd.DataFrame
    ) -> torch.utils.data.DataLoader:
        """
        Creates a DataLoader for the given features and performance data.

        Args:
            features (pd.DataFrame): DataFrame containing the feature data.
            performance (pd.DataFrame): DataFrame containing the performance data.

        Returns:
            torch.utils.data.DataLoader: DataLoader for the dataset.
        """
        dataset = RegressionDataset(features, performance)
        return torch.utils.data.DataLoader(
            dataset, batch_size=self.batch_size, shuffle=True
        )

    def fit(
        self, features: pd.DataFrame, performance: pd.DataFrame, sample_weight=None
    ) -> "RegressionMLP":
        """
        Fits the model to the given feature and performance data.

        Args:
            features (pd.DataFrame): DataFrame containing the feature data.
            performance (pd.DataFrame): DataFrame containing the performance data.

        Returns:
            RegressionMLP: The fitted model instance.
        """
        assert sample_weight is None, "Sample weights are not supported."

        if self.model is None:
            self.model = get_mlp(input_size=features.shape[1], output_size=1)

        self.model.to(self.device)

        if self.compile:
            self.model = torch.compile(self.model)

        features = pd.DataFrame(
            SimpleImputer().fit_transform(features.values),
            index=features.index,
            columns=features.columns,
        )
        dataloader = self._get_dataloader(features, performance)

        optimizer = self.optimizer(self.model.parameters())
        self.model.train()
        for epoch in range(self.epochs):
            total_loss = 0
            for i, (X, y) in enumerate(dataloader):
                X, y = X.to(self.device), y.to(self.device)
                X = X.float()
                y = y.unsqueeze(-1)
                optimizer.zero_grad()
                y_pred = self.model(X)
                loss = self.loss(y_pred, y)
                total_loss += loss.item()
                loss.backward()
                optimizer.step()

        return self

    def predict(self, features: pd.DataFrame) -> pd.DataFrame:
        """
        Predicts the performance of algorithms for the given features.

        Args:
            features (pd.DataFrame): DataFrame containing the feature data.

        Returns:
            pd.DataFrame: DataFrame containing the predicted performance data.
        """
        self.model.eval()

        features = torch.from_numpy(features.values).to(self.device).float()
        predictions = self.model(features).detach().numpy().squeeze(1)

        return predictions

    def save(self, file_path: str) -> None:
        """
        Saves the model to the specified file path.

        Args:
            file_path (str): The path to save the model.
        """
        torch.save(self.model, file_path)

    def load(self, file_path: str) -> None:
        """
        Loads the model from the specified file path.

        Args:
            file_path (str): The path to load the model from.
        """
        self.model = torch.load(file_path)

__init__(model=None, loss=torch.nn.MSELoss(), optimizer=torch.optim.Adam, batch_size=128, epochs=2000, seed=42, device='cpu', compile=True, **kwargs)

Initializes the RegressionMLP with the given parameters.

Parameters:

Name Type Description Default
model Module | None

The PyTorch model to be used. If None, a new MLP model will be created.

None
input_size int | None

The size of the input features. Required if model is None.

required
loss _Loss | None

The loss function to be used. Defaults to Mean Squared Error Loss.

MSELoss()
optimizer type[Optimizer] | None

The optimizer class to be used. Defaults to Adam.

Adam
batch_size int

The batch size for training. Defaults to 128.

128
epochs int

The number of epochs for training. Defaults to 2000.

2000
seed int

The random seed for reproducibility. Defaults to 42.

42
device str

The device to run the model on ('cpu' or 'cuda'). Defaults to 'cpu'.

'cpu'
compile bool

Whether to compile the model using torch.compile. Defaults to True.

True
**kwargs

Additional keyword arguments passed to the parent class.

{}
Source code in asf/predictors/regression_mlp.py
def __init__(
    self,
    model: torch.nn.Module | None = None,
    loss: torch.nn.modules.loss._Loss | None = torch.nn.MSELoss(),
    optimizer: type[torch.optim.Optimizer] | None = torch.optim.Adam,
    batch_size: int = 128,
    epochs: int = 2000,
    seed: int = 42,
    device: str = "cpu",
    compile: bool = True,
    **kwargs,
):
    """
    Initializes the RegressionMLP with the given parameters.

    Args:
        model (torch.nn.Module | None): The PyTorch model to be used. If None, a new MLP model will be created.
        input_size (int | None): The size of the input features. Required if `model` is None.
        loss (torch.nn.modules.loss._Loss | None): The loss function to be used. Defaults to Mean Squared Error Loss.
        optimizer (type[torch.optim.Optimizer] | None): The optimizer class to be used. Defaults to Adam.
        batch_size (int): The batch size for training. Defaults to 128.
        epochs (int): The number of epochs for training. Defaults to 2000.
        seed (int): The random seed for reproducibility. Defaults to 42.
        device (str): The device to run the model on ('cpu' or 'cuda'). Defaults to 'cpu'.
        compile (bool): Whether to compile the model using `torch.compile`. Defaults to True.
        **kwargs: Additional keyword arguments passed to the parent class.
    """
    super().__init__(**kwargs)

    assert TORCH_AVAILABLE, "PyTorch is not available. Please install it."

    torch.manual_seed(seed)

    self.model = model
    self.device = device

    self.loss = loss
    self.batch_size = batch_size
    self.optimizer = optimizer
    self.epochs = epochs
    self.compile = compile

fit(features, performance, sample_weight=None)

Fits the model to the given feature and performance data.

Parameters:

Name Type Description Default
features DataFrame

DataFrame containing the feature data.

required
performance DataFrame

DataFrame containing the performance data.

required

Returns:

Name Type Description
RegressionMLP RegressionMLP

The fitted model instance.

Source code in asf/predictors/regression_mlp.py
def fit(
    self, features: pd.DataFrame, performance: pd.DataFrame, sample_weight=None
) -> "RegressionMLP":
    """
    Fits the model to the given feature and performance data.

    Args:
        features (pd.DataFrame): DataFrame containing the feature data.
        performance (pd.DataFrame): DataFrame containing the performance data.

    Returns:
        RegressionMLP: The fitted model instance.
    """
    assert sample_weight is None, "Sample weights are not supported."

    if self.model is None:
        self.model = get_mlp(input_size=features.shape[1], output_size=1)

    self.model.to(self.device)

    if self.compile:
        self.model = torch.compile(self.model)

    features = pd.DataFrame(
        SimpleImputer().fit_transform(features.values),
        index=features.index,
        columns=features.columns,
    )
    dataloader = self._get_dataloader(features, performance)

    optimizer = self.optimizer(self.model.parameters())
    self.model.train()
    for epoch in range(self.epochs):
        total_loss = 0
        for i, (X, y) in enumerate(dataloader):
            X, y = X.to(self.device), y.to(self.device)
            X = X.float()
            y = y.unsqueeze(-1)
            optimizer.zero_grad()
            y_pred = self.model(X)
            loss = self.loss(y_pred, y)
            total_loss += loss.item()
            loss.backward()
            optimizer.step()

    return self

load(file_path)

Loads the model from the specified file path.

Parameters:

Name Type Description Default
file_path str

The path to load the model from.

required
Source code in asf/predictors/regression_mlp.py
def load(self, file_path: str) -> None:
    """
    Loads the model from the specified file path.

    Args:
        file_path (str): The path to load the model from.
    """
    self.model = torch.load(file_path)

predict(features)

Predicts the performance of algorithms for the given features.

Parameters:

Name Type Description Default
features DataFrame

DataFrame containing the feature data.

required

Returns:

Type Description
DataFrame

pd.DataFrame: DataFrame containing the predicted performance data.

Source code in asf/predictors/regression_mlp.py
def predict(self, features: pd.DataFrame) -> pd.DataFrame:
    """
    Predicts the performance of algorithms for the given features.

    Args:
        features (pd.DataFrame): DataFrame containing the feature data.

    Returns:
        pd.DataFrame: DataFrame containing the predicted performance data.
    """
    self.model.eval()

    features = torch.from_numpy(features.values).to(self.device).float()
    predictions = self.model(features).detach().numpy().squeeze(1)

    return predictions

save(file_path)

Saves the model to the specified file path.

Parameters:

Name Type Description Default
file_path str

The path to save the model.

required
Source code in asf/predictors/regression_mlp.py
def save(self, file_path: str) -> None:
    """
    Saves the model to the specified file path.

    Args:
        file_path (str): The path to save the model.
    """
    torch.save(self.model, file_path)

SVMClassifierWrapper

Bases: SklearnWrapper

A wrapper for the Scikit-learn SVC (Support Vector Classifier) model. Provides methods to define a configuration space and create an instance of the classifier from a configuration.

Attributes

PREFIX : str Prefix used for parameter names in the configuration space.

Source code in asf/predictors/svm.py
class SVMClassifierWrapper(SklearnWrapper):
    """
    A wrapper for the Scikit-learn SVC (Support Vector Classifier) model.
    Provides methods to define a configuration space and create an instance
    of the classifier from a configuration.

    Attributes
    ----------
    PREFIX : str
        Prefix used for parameter names in the configuration space.
    """

    PREFIX = "svm_classifier"

    def __init__(self, init_params: Dict[str, Any] = {}):
        """
        Initialize the SVMClassifierWrapper.

        Parameters
        ----------
        init_params : dict, optional
            Dictionary of parameters to initialize the SVC model.
        """
        super().__init__(SVC, init_params)

    if CONFIGSPACE_AVAILABLE:

        @staticmethod
        def get_configuration_space(
            cs: Optional[ConfigurationSpace] = None,
            pre_prefix: str = "",
            parent_param: Optional[Hyperparameter] = None,
            parent_value: Optional[str] = None,
        ) -> ConfigurationSpace:
            """
            Define the configuration space for the SVM classifier.

            Returns
            -------
            ConfigurationSpace
                The configuration space containing hyperparameters for the SVM classifier.
            """
            if cs is None:
                cs = ConfigurationSpace(name="SVM")

            if pre_prefix != "":
                prefix = f"{pre_prefix}:{SVMClassifierWrapper.PREFIX}"
            else:
                prefix = SVMClassifierWrapper.PREFIX

            kernel = Categorical(
                f"{prefix}:kernel",
                items=["linear", "rbf", "poly", "sigmoid"],
                default="rbf",
            )
            degree = Integer(f"{prefix}:degree", (1, 128), log=True, default=1)
            coef0 = Float(
                f"{prefix}:coef0",
                (-0.5, 0.5),
                log=False,
                default=0.49070634552851977,
            )
            tol = Float(
                f"{prefix}:tol",
                (1e-4, 1e-2),
                log=True,
                default=0.0002154969698207585,
            )
            gamma = Categorical(
                f"{prefix}:gamma",
                items=["scale", "auto"],
                default="scale",
            )
            C = Float(
                f"{prefix}:C",
                (1.0, 20),
                log=True,
                default=3.2333262862494365,
            )
            shrinking = Categorical(
                f"{prefix}:shrinking",
                items=[True, False],
                default=True,
            )

            params = [kernel, degree, coef0, tol, gamma, C, shrinking]

            if parent_param is not None:
                conditions = [
                    EqualsCondition(
                        child=param,
                        parent=parent_param,
                        value=parent_value,
                    )
                    for param in params
                ]
            else:
                conditions = []

            cs.add(params + conditions)

            return cs

        @staticmethod
        def get_from_configuration(
            configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
        ) -> partial:
            """
            Create an SVMClassifierWrapper instance from a configuration.

            Parameters
            ----------
            configuration : dict
                Dictionary containing the configuration parameters.
            additional_params : dict, optional
                Additional parameters to include in the model initialization.

            Returns
            -------
            partial
                A partial function to create an SVMClassifierWrapper instance.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{SVMClassifierWrapper.PREFIX}"
            else:
                prefix = SVMClassifierWrapper.PREFIX

            svm_params = {
                "kernel": configuration[f"{prefix}:kernel"],
                "degree": configuration[f"{prefix}:degree"],
                "coef0": configuration[f"{prefix}:coef0"],
                "tol": configuration[f"{prefix}:tol"],
                "gamma": configuration[f"{prefix}:gamma"],
                "C": configuration[f"{prefix}:C"],
                "shrinking": configuration[f"{prefix}:shrinking"],
                **kwargs,
            }

            return partial(SVMClassifierWrapper, init_params=svm_params)

__init__(init_params={})

Initialize the SVMClassifierWrapper.

Parameters

init_params : dict, optional Dictionary of parameters to initialize the SVC model.

Source code in asf/predictors/svm.py
def __init__(self, init_params: Dict[str, Any] = {}):
    """
    Initialize the SVMClassifierWrapper.

    Parameters
    ----------
    init_params : dict, optional
        Dictionary of parameters to initialize the SVC model.
    """
    super().__init__(SVC, init_params)

get_configuration_space(cs=None, pre_prefix='', parent_param=None, parent_value=None) staticmethod

Define the configuration space for the SVM classifier.

Returns

ConfigurationSpace The configuration space containing hyperparameters for the SVM classifier.

Source code in asf/predictors/svm.py
@staticmethod
def get_configuration_space(
    cs: Optional[ConfigurationSpace] = None,
    pre_prefix: str = "",
    parent_param: Optional[Hyperparameter] = None,
    parent_value: Optional[str] = None,
) -> ConfigurationSpace:
    """
    Define the configuration space for the SVM classifier.

    Returns
    -------
    ConfigurationSpace
        The configuration space containing hyperparameters for the SVM classifier.
    """
    if cs is None:
        cs = ConfigurationSpace(name="SVM")

    if pre_prefix != "":
        prefix = f"{pre_prefix}:{SVMClassifierWrapper.PREFIX}"
    else:
        prefix = SVMClassifierWrapper.PREFIX

    kernel = Categorical(
        f"{prefix}:kernel",
        items=["linear", "rbf", "poly", "sigmoid"],
        default="rbf",
    )
    degree = Integer(f"{prefix}:degree", (1, 128), log=True, default=1)
    coef0 = Float(
        f"{prefix}:coef0",
        (-0.5, 0.5),
        log=False,
        default=0.49070634552851977,
    )
    tol = Float(
        f"{prefix}:tol",
        (1e-4, 1e-2),
        log=True,
        default=0.0002154969698207585,
    )
    gamma = Categorical(
        f"{prefix}:gamma",
        items=["scale", "auto"],
        default="scale",
    )
    C = Float(
        f"{prefix}:C",
        (1.0, 20),
        log=True,
        default=3.2333262862494365,
    )
    shrinking = Categorical(
        f"{prefix}:shrinking",
        items=[True, False],
        default=True,
    )

    params = [kernel, degree, coef0, tol, gamma, C, shrinking]

    if parent_param is not None:
        conditions = [
            EqualsCondition(
                child=param,
                parent=parent_param,
                value=parent_value,
            )
            for param in params
        ]
    else:
        conditions = []

    cs.add(params + conditions)

    return cs

get_from_configuration(configuration, pre_prefix='', **kwargs) staticmethod

Create an SVMClassifierWrapper instance from a configuration.

Parameters

configuration : dict Dictionary containing the configuration parameters. additional_params : dict, optional Additional parameters to include in the model initialization.

Returns

partial A partial function to create an SVMClassifierWrapper instance.

Source code in asf/predictors/svm.py
@staticmethod
def get_from_configuration(
    configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
) -> partial:
    """
    Create an SVMClassifierWrapper instance from a configuration.

    Parameters
    ----------
    configuration : dict
        Dictionary containing the configuration parameters.
    additional_params : dict, optional
        Additional parameters to include in the model initialization.

    Returns
    -------
    partial
        A partial function to create an SVMClassifierWrapper instance.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{SVMClassifierWrapper.PREFIX}"
    else:
        prefix = SVMClassifierWrapper.PREFIX

    svm_params = {
        "kernel": configuration[f"{prefix}:kernel"],
        "degree": configuration[f"{prefix}:degree"],
        "coef0": configuration[f"{prefix}:coef0"],
        "tol": configuration[f"{prefix}:tol"],
        "gamma": configuration[f"{prefix}:gamma"],
        "C": configuration[f"{prefix}:C"],
        "shrinking": configuration[f"{prefix}:shrinking"],
        **kwargs,
    }

    return partial(SVMClassifierWrapper, init_params=svm_params)

SVMRegressorWrapper

Bases: SklearnWrapper

A wrapper for the Scikit-learn SVR (Support Vector Regressor) model. Provides methods to define a configuration space and create an instance of the regressor from a configuration.

Attributes

PREFIX : str Prefix used for parameter names in the configuration space.

Source code in asf/predictors/svm.py
class SVMRegressorWrapper(SklearnWrapper):
    """
    A wrapper for the Scikit-learn SVR (Support Vector Regressor) model.
    Provides methods to define a configuration space and create an instance
    of the regressor from a configuration.

    Attributes
    ----------
    PREFIX : str
        Prefix used for parameter names in the configuration space.
    """

    PREFIX = "svm_regressor"

    def __init__(self, init_params: Dict[str, Any] = {}):
        """
        Initialize the SVMRegressorWrapper.

        Parameters
        ----------
        init_params : dict, optional
            Dictionary of parameters to initialize the SVR model.
        """
        super().__init__(SVR, init_params)

    if CONFIGSPACE_AVAILABLE:

        @staticmethod
        def get_configuration_space(
            cs: Optional[ConfigurationSpace] = None,
            pre_prefix: str = "",
            parent_param: Optional[Hyperparameter] = None,
            parent_value: Optional[str] = None,
        ) -> ConfigurationSpace:
            """
            Define the configuration space for the SVM regressor.

            Parameters
            ----------
            cs : ConfigurationSpace, optional
                The configuration space to add the parameters to. If None, a new
                ConfigurationSpace will be created.

            Returns
            -------
            ConfigurationSpace
                The configuration space containing hyperparameters for the SVM regressor.
            """

            if pre_prefix != "":
                prefix = f"{pre_prefix}:{SVMRegressorWrapper.PREFIX}"
            else:
                prefix = SVMRegressorWrapper.PREFIX

            if cs is None:
                cs = ConfigurationSpace(name="SVM Regressor")

            kernel = Categorical(
                f"{prefix}:kernel",
                items=["linear", "rbf", "poly", "sigmoid"],
                default="rbf",
            )
            degree = Integer(f"{prefix}:degree", (1, 128), log=True, default=1)
            coef0 = Float(
                f"{prefix}:coef0",
                (-0.5, 0.5),
                log=False,
                default=0.0,
            )
            tol = Float(
                f"{prefix}:tol",
                (1e-4, 1e-2),
                log=True,
                default=0.001,
            )
            gamma = Categorical(
                f"{prefix}:gamma",
                items=["scale", "auto"],
                default="scale",
            )
            C = Float(f"{prefix}:C", (1.0, 20), log=True, default=1.0)
            shrinking = Categorical(
                f"{prefix}:shrinking",
                items=[True, False],
                default=True,
            )
            params = [kernel, degree, coef0, tol, gamma, C, shrinking]
            if parent_param is not None:
                conditions = [
                    EqualsCondition(
                        child=param,
                        parent=parent_param,
                        value=parent_value,
                    )
                    for param in params
                ]
            else:
                conditions = []

            cs.add(params + conditions)

            return cs

        @staticmethod
        def get_from_configuration(
            configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
        ) -> partial:
            """
            Create an SVMRegressorWrapper instance from a configuration.

            Parameters
            ----------
            configuration : dict
                Dictionary containing the configuration parameters.
            additional_params : dict, optional
                Additional parameters to include in the model initialization.

            Returns
            -------
            partial
                A partial function to create an SVMRegressorWrapper instance.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{SVMRegressorWrapper.PREFIX}"
            else:
                prefix = SVMRegressorWrapper.PREFIX

            svr_params = {
                "kernel": configuration[f"{prefix}:kernel"],
                "degree": configuration[f"{prefix}:degree"],
                "coef0": configuration[f"{prefix}:coef0"],
                "tol": configuration[f"{prefix}:tol"],
                "gamma": configuration[f"{prefix}:gamma"],
                "C": configuration[f"{prefix}:C"],
                "shrinking": configuration[f"{prefix}:shrinking"],
                **kwargs,
            }

            return partial(SVMRegressorWrapper, init_params=svr_params)

__init__(init_params={})

Initialize the SVMRegressorWrapper.

Parameters

init_params : dict, optional Dictionary of parameters to initialize the SVR model.

Source code in asf/predictors/svm.py
def __init__(self, init_params: Dict[str, Any] = {}):
    """
    Initialize the SVMRegressorWrapper.

    Parameters
    ----------
    init_params : dict, optional
        Dictionary of parameters to initialize the SVR model.
    """
    super().__init__(SVR, init_params)

get_configuration_space(cs=None, pre_prefix='', parent_param=None, parent_value=None) staticmethod

Define the configuration space for the SVM regressor.

Parameters

cs : ConfigurationSpace, optional The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

Returns

ConfigurationSpace The configuration space containing hyperparameters for the SVM regressor.

Source code in asf/predictors/svm.py
@staticmethod
def get_configuration_space(
    cs: Optional[ConfigurationSpace] = None,
    pre_prefix: str = "",
    parent_param: Optional[Hyperparameter] = None,
    parent_value: Optional[str] = None,
) -> ConfigurationSpace:
    """
    Define the configuration space for the SVM regressor.

    Parameters
    ----------
    cs : ConfigurationSpace, optional
        The configuration space to add the parameters to. If None, a new
        ConfigurationSpace will be created.

    Returns
    -------
    ConfigurationSpace
        The configuration space containing hyperparameters for the SVM regressor.
    """

    if pre_prefix != "":
        prefix = f"{pre_prefix}:{SVMRegressorWrapper.PREFIX}"
    else:
        prefix = SVMRegressorWrapper.PREFIX

    if cs is None:
        cs = ConfigurationSpace(name="SVM Regressor")

    kernel = Categorical(
        f"{prefix}:kernel",
        items=["linear", "rbf", "poly", "sigmoid"],
        default="rbf",
    )
    degree = Integer(f"{prefix}:degree", (1, 128), log=True, default=1)
    coef0 = Float(
        f"{prefix}:coef0",
        (-0.5, 0.5),
        log=False,
        default=0.0,
    )
    tol = Float(
        f"{prefix}:tol",
        (1e-4, 1e-2),
        log=True,
        default=0.001,
    )
    gamma = Categorical(
        f"{prefix}:gamma",
        items=["scale", "auto"],
        default="scale",
    )
    C = Float(f"{prefix}:C", (1.0, 20), log=True, default=1.0)
    shrinking = Categorical(
        f"{prefix}:shrinking",
        items=[True, False],
        default=True,
    )
    params = [kernel, degree, coef0, tol, gamma, C, shrinking]
    if parent_param is not None:
        conditions = [
            EqualsCondition(
                child=param,
                parent=parent_param,
                value=parent_value,
            )
            for param in params
        ]
    else:
        conditions = []

    cs.add(params + conditions)

    return cs

get_from_configuration(configuration, pre_prefix='', **kwargs) staticmethod

Create an SVMRegressorWrapper instance from a configuration.

Parameters

configuration : dict Dictionary containing the configuration parameters. additional_params : dict, optional Additional parameters to include in the model initialization.

Returns

partial A partial function to create an SVMRegressorWrapper instance.

Source code in asf/predictors/svm.py
@staticmethod
def get_from_configuration(
    configuration: Dict[str, Any], pre_prefix: str = "", **kwargs
) -> partial:
    """
    Create an SVMRegressorWrapper instance from a configuration.

    Parameters
    ----------
    configuration : dict
        Dictionary containing the configuration parameters.
    additional_params : dict, optional
        Additional parameters to include in the model initialization.

    Returns
    -------
    partial
        A partial function to create an SVMRegressorWrapper instance.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{SVMRegressorWrapper.PREFIX}"
    else:
        prefix = SVMRegressorWrapper.PREFIX

    svr_params = {
        "kernel": configuration[f"{prefix}:kernel"],
        "degree": configuration[f"{prefix}:degree"],
        "coef0": configuration[f"{prefix}:coef0"],
        "tol": configuration[f"{prefix}:tol"],
        "gamma": configuration[f"{prefix}:gamma"],
        "C": configuration[f"{prefix}:C"],
        "shrinking": configuration[f"{prefix}:shrinking"],
        **kwargs,
    }

    return partial(SVMRegressorWrapper, init_params=svr_params)

SklearnWrapper

Bases: AbstractPredictor

A generic wrapper for scikit-learn models.

This class allows scikit-learn models to be used with the ASF framework.

Methods

fit(X, Y, sample_weight=None, kwargs) Fit the model to the data. predict(X, kwargs) Predict using the model. save(file_path) Save the model to a file. load(file_path) Load the model from a file.

Source code in asf/predictors/sklearn_wrapper.py
class SklearnWrapper(AbstractPredictor):
    """
    A generic wrapper for scikit-learn models.

    This class allows scikit-learn models to be used with the ASF framework.

    Methods
    -------
    fit(X, Y, sample_weight=None, **kwargs)
        Fit the model to the data.
    predict(X, **kwargs)
        Predict using the model.
    save(file_path)
        Save the model to a file.
    load(file_path)
        Load the model from a file.
    """

    def __init__(self, model_class: ClassifierMixin, init_params: Dict[str, Any] = {}):
        """
        Initialize the wrapper with a scikit-learn model.

        Parameters
        ----------
        model_class : ClassifierMixin
            A scikit-learn model class.
        init_params : dict, optional
            Initialization parameters for the scikit-learn model (default is an empty dictionary).
        """
        self.model_class = model_class(**init_params)

    def fit(
        self,
        X: np.ndarray,
        Y: np.ndarray,
        sample_weight: np.ndarray = None,
        **kwargs: Any,
    ) -> None:
        """
        Fit the model to the data.

        Parameters
        ----------
        X : np.ndarray
            Training data of shape (n_samples, n_features).
        Y : np.ndarray
            Target values of shape (n_samples,).
        sample_weight : np.ndarray, optional
            Sample weights of shape (n_samples,) (default is None).
        **kwargs : Any
            Additional keyword arguments for the scikit-learn model's `fit` method.
        """
        self.model_class.fit(X, Y, sample_weight=sample_weight, **kwargs)

    def predict(self, X: np.ndarray, **kwargs: Any) -> np.ndarray:
        """
        Predict using the model.

        Parameters
        ----------
        X : np.ndarray
            Data to predict on of shape (n_samples, n_features).
        **kwargs : Any
            Additional keyword arguments for the scikit-learn model's `predict` method.

        Returns
        -------
        np.ndarray
            Predicted values of shape (n_samples,).
        """
        return self.model_class.predict(X, **kwargs)

    def save(self, file_path: str) -> None:
        """
        Save the model to a file.

        Parameters
        ----------
        file_path : str
            Path to the file where the model will be saved.
        """
        import joblib

        joblib.dump(self, file_path)

    def load(self, file_path: str) -> "SklearnWrapper":
        """
        Load the model from a file.

        Parameters
        ----------
        file_path : str
            Path to the file from which the model will be loaded.

        Returns
        -------
        SklearnWrapper
            The loaded model.
        """
        import joblib

        return joblib.load(file_path)

__init__(model_class, init_params={})

Initialize the wrapper with a scikit-learn model.

Parameters

model_class : ClassifierMixin A scikit-learn model class. init_params : dict, optional Initialization parameters for the scikit-learn model (default is an empty dictionary).

Source code in asf/predictors/sklearn_wrapper.py
def __init__(self, model_class: ClassifierMixin, init_params: Dict[str, Any] = {}):
    """
    Initialize the wrapper with a scikit-learn model.

    Parameters
    ----------
    model_class : ClassifierMixin
        A scikit-learn model class.
    init_params : dict, optional
        Initialization parameters for the scikit-learn model (default is an empty dictionary).
    """
    self.model_class = model_class(**init_params)

fit(X, Y, sample_weight=None, **kwargs)

Fit the model to the data.

Parameters

X : np.ndarray Training data of shape (n_samples, n_features). Y : np.ndarray Target values of shape (n_samples,). sample_weight : np.ndarray, optional Sample weights of shape (n_samples,) (default is None). **kwargs : Any Additional keyword arguments for the scikit-learn model's fit method.

Source code in asf/predictors/sklearn_wrapper.py
def fit(
    self,
    X: np.ndarray,
    Y: np.ndarray,
    sample_weight: np.ndarray = None,
    **kwargs: Any,
) -> None:
    """
    Fit the model to the data.

    Parameters
    ----------
    X : np.ndarray
        Training data of shape (n_samples, n_features).
    Y : np.ndarray
        Target values of shape (n_samples,).
    sample_weight : np.ndarray, optional
        Sample weights of shape (n_samples,) (default is None).
    **kwargs : Any
        Additional keyword arguments for the scikit-learn model's `fit` method.
    """
    self.model_class.fit(X, Y, sample_weight=sample_weight, **kwargs)

load(file_path)

Load the model from a file.

Parameters

file_path : str Path to the file from which the model will be loaded.

Returns

SklearnWrapper The loaded model.

Source code in asf/predictors/sklearn_wrapper.py
def load(self, file_path: str) -> "SklearnWrapper":
    """
    Load the model from a file.

    Parameters
    ----------
    file_path : str
        Path to the file from which the model will be loaded.

    Returns
    -------
    SklearnWrapper
        The loaded model.
    """
    import joblib

    return joblib.load(file_path)

predict(X, **kwargs)

Predict using the model.

Parameters

X : np.ndarray Data to predict on of shape (n_samples, n_features). **kwargs : Any Additional keyword arguments for the scikit-learn model's predict method.

Returns

np.ndarray Predicted values of shape (n_samples,).

Source code in asf/predictors/sklearn_wrapper.py
def predict(self, X: np.ndarray, **kwargs: Any) -> np.ndarray:
    """
    Predict using the model.

    Parameters
    ----------
    X : np.ndarray
        Data to predict on of shape (n_samples, n_features).
    **kwargs : Any
        Additional keyword arguments for the scikit-learn model's `predict` method.

    Returns
    -------
    np.ndarray
        Predicted values of shape (n_samples,).
    """
    return self.model_class.predict(X, **kwargs)

save(file_path)

Save the model to a file.

Parameters

file_path : str Path to the file where the model will be saved.

Source code in asf/predictors/sklearn_wrapper.py
def save(self, file_path: str) -> None:
    """
    Save the model to a file.

    Parameters
    ----------
    file_path : str
        Path to the file where the model will be saved.
    """
    import joblib

    joblib.dump(self, file_path)

XGBoostClassifierWrapper

Bases: SklearnWrapper

Wrapper for the XGBoost classifier to integrate with the ASF framework.

Source code in asf/predictors/xgboost.py
class XGBoostClassifierWrapper(SklearnWrapper):
    """
    Wrapper for the XGBoost classifier to integrate with the ASF framework.
    """

    PREFIX: str = "xgb_classifier"

    def __init__(self, init_params: Optional[Dict[str, Any]] = None):
        """
        Initialize the XGBoostClassifierWrapper.

        Parameters
        ----------
        init_params : dict, optional
            Initialization parameters for the XGBoost classifier.
        """
        if not XGB_AVAILABLE:
            raise ImportError(
                "XGBoost is not installed. Please install it using pip install asf-lib[xgb]."
            )
        super().__init__(XGBClassifier, init_params or {})

    def fit(
        self,
        X: np.ndarray,
        Y: np.ndarray,
        sample_weight: np.ndarray = None,
        **kwargs: Any,
    ) -> None:
        """
        Fit the model to the data.

        Parameters
        ----------
        X : np.ndarray
            Training data of shape (n_samples, n_features).
        Y : np.ndarray
            Target values of shape (n_samples,).
        sample_weight : np.ndarray, optional
            Sample weights of shape (n_samples,) (default is None).
        **kwargs : Any
            Additional keyword arguments for the scikit-learn model's `fit` method.
        """
        if Y.dtype == bool:
            self.bool_labels = True
        else:
            self.bool_labels = False

        self.model_class.fit(X, Y, sample_weight=sample_weight, **kwargs)

    def predict(self, X: np.ndarray, **kwargs: Any) -> np.ndarray:
        """
        Predict using the model.

        Parameters
        ----------
        X : np.ndarray
            Data to predict on of shape (n_samples, n_features).
        **kwargs : Any
            Additional keyword arguments for the scikit-learn model's `predict` method.

        Returns
        -------
        np.ndarray
            Predicted values of shape (n_samples,).
        """
        if self.bool_labels:
            return self.model_class.predict(X, **kwargs).astype(bool)
        return self.model_class.predict(X, **kwargs)

    if CONFIGSPACE_AVAILABLE:

        @staticmethod
        def get_configuration_space(
            cs: Optional[ConfigurationSpace] = None,
            pre_prefix: str = "",
            parent_param: Optional[Hyperparameter] = None,
            parent_value: Optional[str] = None,
        ) -> ConfigurationSpace:
            """
            Get the configuration space for the XGBoost classifier.

            Parameters
            ----------
            cs : ConfigurationSpace, optional
                The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

            Returns
            -------
            ConfigurationSpace
                The configuration space with the XGBoost parameters.
            """
            if cs is None:
                cs = ConfigurationSpace(name="XGBoost")

            if pre_prefix != "":
                prefix = f"{pre_prefix}:{XGBoostClassifierWrapper.PREFIX}"
            else:
                prefix = XGBoostClassifierWrapper.PREFIX

            booster = Constant(f"{prefix}:booster", "gbtree")
            max_depth = Integer(
                f"{prefix}:max_depth",
                (1, 20),
                log=False,
                default=13,
            )
            min_child_weight = Integer(
                f"{prefix}:min_child_weight",
                (1, 100),
                log=True,
                default=39,
            )
            colsample_bytree = Float(
                f"{prefix}:colsample_bytree",
                (0.0, 1.0),
                log=False,
                default=0.2545374925231651,
            )
            colsample_bylevel = Float(
                f"{prefix}:colsample_bylevel",
                (0.0, 1.0),
                log=False,
                default=0.6909224923784677,
            )
            lambda_param = Float(
                f"{prefix}:lambda",
                (0.001, 1000),
                log=True,
                default=31.393252465064943,
            )
            alpha = Float(
                f"{prefix}:alpha",
                (0.001, 1000),
                log=True,
                default=0.24167936088332426,
            )
            learning_rate = Float(
                f"{prefix}:learning_rate",
                (0.001, 0.1),
                log=True,
                default=0.008237525103357958,
            )

            params = [
                booster,
                max_depth,
                min_child_weight,
                colsample_bytree,
                colsample_bylevel,
                lambda_param,
                alpha,
                learning_rate,
            ]

            if parent_param is not None:
                conditions = [
                    EqualsCondition(
                        child=param,
                        parent=parent_param,
                        value=parent_value,
                    )
                    for param in params
                ]
            else:
                conditions = []

            cs.add(params + conditions)

            return cs

        @staticmethod
        def get_from_configuration(
            configuration: Dict[str, Any],
            pre_prefix: str = "",
            **kwargs: Any,
        ) -> Callable[..., "XGBoostClassifierWrapper"]:
            """
            Create an XGBoostClassifierWrapper from a configuration.

            Parameters
            ----------
            configuration : dict
                The configuration dictionary.
            additional_params : dict, optional
                Additional parameters to include in the configuration.

            Returns
            -------
            Callable[..., XGBoostClassifierWrapper]
                A callable that initializes the wrapper with the given configuration.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{XGBoostClassifierWrapper.PREFIX}"
            else:
                prefix = XGBoostClassifierWrapper.PREFIX

            xgb_params = {
                "booster": configuration[f"{prefix}:booster"],
                "max_depth": configuration[f"{prefix}:max_depth"],
                "min_child_weight": configuration[f"{prefix}:min_child_weight"],
                "colsample_bytree": configuration[f"{prefix}:colsample_bytree"],
                "colsample_bylevel": configuration[f"{prefix}:colsample_bylevel"],
                "lambda": configuration[f"{prefix}:lambda"],
                "alpha": configuration[f"{prefix}:alpha"],
                "learning_rate": configuration[f"{prefix}:learning_rate"],
                **kwargs,
            }

            return partial(XGBoostClassifierWrapper, init_params=xgb_params)

__init__(init_params=None)

Initialize the XGBoostClassifierWrapper.

Parameters

init_params : dict, optional Initialization parameters for the XGBoost classifier.

Source code in asf/predictors/xgboost.py
def __init__(self, init_params: Optional[Dict[str, Any]] = None):
    """
    Initialize the XGBoostClassifierWrapper.

    Parameters
    ----------
    init_params : dict, optional
        Initialization parameters for the XGBoost classifier.
    """
    if not XGB_AVAILABLE:
        raise ImportError(
            "XGBoost is not installed. Please install it using pip install asf-lib[xgb]."
        )
    super().__init__(XGBClassifier, init_params or {})

fit(X, Y, sample_weight=None, **kwargs)

Fit the model to the data.

Parameters

X : np.ndarray Training data of shape (n_samples, n_features). Y : np.ndarray Target values of shape (n_samples,). sample_weight : np.ndarray, optional Sample weights of shape (n_samples,) (default is None). **kwargs : Any Additional keyword arguments for the scikit-learn model's fit method.

Source code in asf/predictors/xgboost.py
def fit(
    self,
    X: np.ndarray,
    Y: np.ndarray,
    sample_weight: np.ndarray = None,
    **kwargs: Any,
) -> None:
    """
    Fit the model to the data.

    Parameters
    ----------
    X : np.ndarray
        Training data of shape (n_samples, n_features).
    Y : np.ndarray
        Target values of shape (n_samples,).
    sample_weight : np.ndarray, optional
        Sample weights of shape (n_samples,) (default is None).
    **kwargs : Any
        Additional keyword arguments for the scikit-learn model's `fit` method.
    """
    if Y.dtype == bool:
        self.bool_labels = True
    else:
        self.bool_labels = False

    self.model_class.fit(X, Y, sample_weight=sample_weight, **kwargs)

get_configuration_space(cs=None, pre_prefix='', parent_param=None, parent_value=None) staticmethod

Get the configuration space for the XGBoost classifier.

Parameters

cs : ConfigurationSpace, optional The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

Returns

ConfigurationSpace The configuration space with the XGBoost parameters.

Source code in asf/predictors/xgboost.py
@staticmethod
def get_configuration_space(
    cs: Optional[ConfigurationSpace] = None,
    pre_prefix: str = "",
    parent_param: Optional[Hyperparameter] = None,
    parent_value: Optional[str] = None,
) -> ConfigurationSpace:
    """
    Get the configuration space for the XGBoost classifier.

    Parameters
    ----------
    cs : ConfigurationSpace, optional
        The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

    Returns
    -------
    ConfigurationSpace
        The configuration space with the XGBoost parameters.
    """
    if cs is None:
        cs = ConfigurationSpace(name="XGBoost")

    if pre_prefix != "":
        prefix = f"{pre_prefix}:{XGBoostClassifierWrapper.PREFIX}"
    else:
        prefix = XGBoostClassifierWrapper.PREFIX

    booster = Constant(f"{prefix}:booster", "gbtree")
    max_depth = Integer(
        f"{prefix}:max_depth",
        (1, 20),
        log=False,
        default=13,
    )
    min_child_weight = Integer(
        f"{prefix}:min_child_weight",
        (1, 100),
        log=True,
        default=39,
    )
    colsample_bytree = Float(
        f"{prefix}:colsample_bytree",
        (0.0, 1.0),
        log=False,
        default=0.2545374925231651,
    )
    colsample_bylevel = Float(
        f"{prefix}:colsample_bylevel",
        (0.0, 1.0),
        log=False,
        default=0.6909224923784677,
    )
    lambda_param = Float(
        f"{prefix}:lambda",
        (0.001, 1000),
        log=True,
        default=31.393252465064943,
    )
    alpha = Float(
        f"{prefix}:alpha",
        (0.001, 1000),
        log=True,
        default=0.24167936088332426,
    )
    learning_rate = Float(
        f"{prefix}:learning_rate",
        (0.001, 0.1),
        log=True,
        default=0.008237525103357958,
    )

    params = [
        booster,
        max_depth,
        min_child_weight,
        colsample_bytree,
        colsample_bylevel,
        lambda_param,
        alpha,
        learning_rate,
    ]

    if parent_param is not None:
        conditions = [
            EqualsCondition(
                child=param,
                parent=parent_param,
                value=parent_value,
            )
            for param in params
        ]
    else:
        conditions = []

    cs.add(params + conditions)

    return cs

get_from_configuration(configuration, pre_prefix='', **kwargs) staticmethod

Create an XGBoostClassifierWrapper from a configuration.

Parameters

configuration : dict The configuration dictionary. additional_params : dict, optional Additional parameters to include in the configuration.

Returns

Callable[..., XGBoostClassifierWrapper] A callable that initializes the wrapper with the given configuration.

Source code in asf/predictors/xgboost.py
@staticmethod
def get_from_configuration(
    configuration: Dict[str, Any],
    pre_prefix: str = "",
    **kwargs: Any,
) -> Callable[..., "XGBoostClassifierWrapper"]:
    """
    Create an XGBoostClassifierWrapper from a configuration.

    Parameters
    ----------
    configuration : dict
        The configuration dictionary.
    additional_params : dict, optional
        Additional parameters to include in the configuration.

    Returns
    -------
    Callable[..., XGBoostClassifierWrapper]
        A callable that initializes the wrapper with the given configuration.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{XGBoostClassifierWrapper.PREFIX}"
    else:
        prefix = XGBoostClassifierWrapper.PREFIX

    xgb_params = {
        "booster": configuration[f"{prefix}:booster"],
        "max_depth": configuration[f"{prefix}:max_depth"],
        "min_child_weight": configuration[f"{prefix}:min_child_weight"],
        "colsample_bytree": configuration[f"{prefix}:colsample_bytree"],
        "colsample_bylevel": configuration[f"{prefix}:colsample_bylevel"],
        "lambda": configuration[f"{prefix}:lambda"],
        "alpha": configuration[f"{prefix}:alpha"],
        "learning_rate": configuration[f"{prefix}:learning_rate"],
        **kwargs,
    }

    return partial(XGBoostClassifierWrapper, init_params=xgb_params)

predict(X, **kwargs)

Predict using the model.

Parameters

X : np.ndarray Data to predict on of shape (n_samples, n_features). **kwargs : Any Additional keyword arguments for the scikit-learn model's predict method.

Returns

np.ndarray Predicted values of shape (n_samples,).

Source code in asf/predictors/xgboost.py
def predict(self, X: np.ndarray, **kwargs: Any) -> np.ndarray:
    """
    Predict using the model.

    Parameters
    ----------
    X : np.ndarray
        Data to predict on of shape (n_samples, n_features).
    **kwargs : Any
        Additional keyword arguments for the scikit-learn model's `predict` method.

    Returns
    -------
    np.ndarray
        Predicted values of shape (n_samples,).
    """
    if self.bool_labels:
        return self.model_class.predict(X, **kwargs).astype(bool)
    return self.model_class.predict(X, **kwargs)

XGBoostRegressorWrapper

Bases: SklearnWrapper

Wrapper for the XGBoost regressor to integrate with the ASF framework.

Source code in asf/predictors/xgboost.py
class XGBoostRegressorWrapper(SklearnWrapper):
    """
    Wrapper for the XGBoost regressor to integrate with the ASF framework.
    """

    PREFIX: str = "xgb_regressor"

    def __init__(self, init_params: Optional[Dict[str, Any]] = None):
        """
        Initialize the XGBoostRegressorWrapper.

        Parameters
        ----------
        init_params : dict, optional
            Initialization parameters for the XGBoost regressor.
        """
        super().__init__(XGBRegressor, init_params or {})

    if CONFIGSPACE_AVAILABLE:

        @staticmethod
        def get_configuration_space(
            cs: Optional[ConfigurationSpace] = None,
            pre_prefix: str = "",
            parent_param: Optional[Hyperparameter] = None,
            parent_value: Optional[str] = None,
        ) -> ConfigurationSpace:
            """
            Get the configuration space for the XGBoost regressor.

            Parameters
            ----------
            cs : ConfigurationSpace, optional
                The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

            Returns
            -------
            ConfigurationSpace
                The configuration space with the XGBoost parameters.
            """
            if cs is None:
                cs = ConfigurationSpace(name="XGBoostRegressor")

            if pre_prefix != "":
                prefix = f"{pre_prefix}:{XGBoostRegressorWrapper.PREFIX}"
            else:
                prefix = XGBoostRegressorWrapper.PREFIX

            booster = Constant(f"{prefix}:booster", "gbtree")
            max_depth = Integer(
                f"{prefix}:max_depth",
                (1, 20),
                log=False,
                default=13,
            )
            min_child_weight = Integer(
                f"{prefix}:min_child_weight",
                (1, 100),
                log=True,
                default=39,
            )
            colsample_bytree = Float(
                f"{prefix}:colsample_bytree",
                (0.0, 1.0),
                log=False,
                default=0.2545374925231651,
            )
            colsample_bylevel = Float(
                f"{prefix}:colsample_bylevel",
                (0.0, 1.0),
                log=False,
                default=0.6909224923784677,
            )
            lambda_param = Float(
                f"{prefix}:lambda",
                (0.001, 1000),
                log=True,
                default=31.393252465064943,
            )
            alpha = Float(
                f"{prefix}:alpha",
                (0.001, 1000),
                log=True,
                default=0.24167936088332426,
            )
            learning_rate = Float(
                f"{prefix}:learning_rate",
                (0.001, 0.1),
                log=True,
                default=0.008237525103357958,
            )

            params = [
                booster,
                max_depth,
                min_child_weight,
                colsample_bytree,
                colsample_bylevel,
                lambda_param,
                alpha,
                learning_rate,
            ]
            if parent_param is not None:
                conditions = [
                    EqualsCondition(
                        child=param,
                        parent=parent_param,
                        value=parent_value,
                    )
                    for param in params
                ]
            else:
                conditions = []

            cs.add(params + conditions)

            return cs

        @staticmethod
        def get_from_configuration(
            configuration: Dict[str, Any],
            pre_prefix: str = "",
            **kwargs,
        ) -> Callable[..., "XGBoostRegressorWrapper"]:
            """
            Create an XGBoostRegressorWrapper from a configuration.

            Parameters
            ----------
            configuration : dict
                The configuration dictionary.
            additional_params : dict, optional
                Additional parameters to include in the configuration.

            Returns
            -------
            Callable[..., XGBoostRegressorWrapper]
                A callable that initializes the wrapper with the given configuration.
            """
            if pre_prefix != "":
                prefix = f"{pre_prefix}:{XGBoostRegressorWrapper.PREFIX}"
            else:
                prefix = XGBoostRegressorWrapper.PREFIX

            xgb_params = {
                "booster": configuration[f"{prefix}:booster"],
                "max_depth": configuration[f"{prefix}:max_depth"],
                "min_child_weight": configuration[f"{prefix}:min_child_weight"],
                "colsample_bytree": configuration[f"{prefix}:colsample_bytree"],
                "colsample_bylevel": configuration[f"{prefix}:colsample_bylevel"],
                "lambda": configuration[f"{prefix}:lambda"],
                "alpha": configuration[f"{prefix}:alpha"],
                "learning_rate": configuration[f"{prefix}:learning_rate"],
                **kwargs,
            }

            return partial(XGBoostRegressorWrapper, init_params=xgb_params)

__init__(init_params=None)

Initialize the XGBoostRegressorWrapper.

Parameters

init_params : dict, optional Initialization parameters for the XGBoost regressor.

Source code in asf/predictors/xgboost.py
def __init__(self, init_params: Optional[Dict[str, Any]] = None):
    """
    Initialize the XGBoostRegressorWrapper.

    Parameters
    ----------
    init_params : dict, optional
        Initialization parameters for the XGBoost regressor.
    """
    super().__init__(XGBRegressor, init_params or {})

get_configuration_space(cs=None, pre_prefix='', parent_param=None, parent_value=None) staticmethod

Get the configuration space for the XGBoost regressor.

Parameters

cs : ConfigurationSpace, optional The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

Returns

ConfigurationSpace The configuration space with the XGBoost parameters.

Source code in asf/predictors/xgboost.py
@staticmethod
def get_configuration_space(
    cs: Optional[ConfigurationSpace] = None,
    pre_prefix: str = "",
    parent_param: Optional[Hyperparameter] = None,
    parent_value: Optional[str] = None,
) -> ConfigurationSpace:
    """
    Get the configuration space for the XGBoost regressor.

    Parameters
    ----------
    cs : ConfigurationSpace, optional
        The configuration space to add the parameters to. If None, a new ConfigurationSpace will be created.

    Returns
    -------
    ConfigurationSpace
        The configuration space with the XGBoost parameters.
    """
    if cs is None:
        cs = ConfigurationSpace(name="XGBoostRegressor")

    if pre_prefix != "":
        prefix = f"{pre_prefix}:{XGBoostRegressorWrapper.PREFIX}"
    else:
        prefix = XGBoostRegressorWrapper.PREFIX

    booster = Constant(f"{prefix}:booster", "gbtree")
    max_depth = Integer(
        f"{prefix}:max_depth",
        (1, 20),
        log=False,
        default=13,
    )
    min_child_weight = Integer(
        f"{prefix}:min_child_weight",
        (1, 100),
        log=True,
        default=39,
    )
    colsample_bytree = Float(
        f"{prefix}:colsample_bytree",
        (0.0, 1.0),
        log=False,
        default=0.2545374925231651,
    )
    colsample_bylevel = Float(
        f"{prefix}:colsample_bylevel",
        (0.0, 1.0),
        log=False,
        default=0.6909224923784677,
    )
    lambda_param = Float(
        f"{prefix}:lambda",
        (0.001, 1000),
        log=True,
        default=31.393252465064943,
    )
    alpha = Float(
        f"{prefix}:alpha",
        (0.001, 1000),
        log=True,
        default=0.24167936088332426,
    )
    learning_rate = Float(
        f"{prefix}:learning_rate",
        (0.001, 0.1),
        log=True,
        default=0.008237525103357958,
    )

    params = [
        booster,
        max_depth,
        min_child_weight,
        colsample_bytree,
        colsample_bylevel,
        lambda_param,
        alpha,
        learning_rate,
    ]
    if parent_param is not None:
        conditions = [
            EqualsCondition(
                child=param,
                parent=parent_param,
                value=parent_value,
            )
            for param in params
        ]
    else:
        conditions = []

    cs.add(params + conditions)

    return cs

get_from_configuration(configuration, pre_prefix='', **kwargs) staticmethod

Create an XGBoostRegressorWrapper from a configuration.

Parameters

configuration : dict The configuration dictionary. additional_params : dict, optional Additional parameters to include in the configuration.

Returns

Callable[..., XGBoostRegressorWrapper] A callable that initializes the wrapper with the given configuration.

Source code in asf/predictors/xgboost.py
@staticmethod
def get_from_configuration(
    configuration: Dict[str, Any],
    pre_prefix: str = "",
    **kwargs,
) -> Callable[..., "XGBoostRegressorWrapper"]:
    """
    Create an XGBoostRegressorWrapper from a configuration.

    Parameters
    ----------
    configuration : dict
        The configuration dictionary.
    additional_params : dict, optional
        Additional parameters to include in the configuration.

    Returns
    -------
    Callable[..., XGBoostRegressorWrapper]
        A callable that initializes the wrapper with the given configuration.
    """
    if pre_prefix != "":
        prefix = f"{pre_prefix}:{XGBoostRegressorWrapper.PREFIX}"
    else:
        prefix = XGBoostRegressorWrapper.PREFIX

    xgb_params = {
        "booster": configuration[f"{prefix}:booster"],
        "max_depth": configuration[f"{prefix}:max_depth"],
        "min_child_weight": configuration[f"{prefix}:min_child_weight"],
        "colsample_bytree": configuration[f"{prefix}:colsample_bytree"],
        "colsample_bylevel": configuration[f"{prefix}:colsample_bylevel"],
        "lambda": configuration[f"{prefix}:lambda"],
        "alpha": configuration[f"{prefix}:alpha"],
        "learning_rate": configuration[f"{prefix}:learning_rate"],
        **kwargs,
    }

    return partial(XGBoostRegressorWrapper, init_params=xgb_params)