Expand source code
from copy import deepcopy
from functools import partial

import numpy as np
import sklearn
from sklearn.ensemble import AdaBoostClassifier, AdaBoostRegressor
from sklearn.base import BaseEstimator, ClassifierMixin, MetaEstimatorMixin
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import normalize
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.utils.multiclass import check_classification_targets, unique_labels
from sklearn.utils.validation import check_X_y, check_array, check_is_fitted

from imodels.rule_set.rule_set import RuleSet
from imodels.rule_set.slipper_util import SlipperBaseEstimator
from imodels.util.arguments import check_fit_arguments
from imodels.util.convert import tree_to_code, tree_to_rules, dict_to_rule
from imodels.util.rule import Rule, get_feature_dict, replace_feature_name


class BoostedRulesClassifier(AdaBoostClassifier):
    '''An easy-interpretable classifier optimizing simple logical rules.

    Params
    ------
    estimator: object with fit and predict methods
        Defaults to DecisionTreeClassifier with AdaBoost.
        For SLIPPER, should pass estimator=imodels.SlipperBaseEstimator
    '''

    def __init__(
        self,
        estimator=DecisionTreeClassifier(max_depth=1),
        *,
        n_estimators=15,
        learning_rate=1.0,
        random_state=None,
    ):
        try: # sklearn version >= 1.2
            super().__init__(
                estimator=estimator,
                n_estimators=n_estimators,
                learning_rate=learning_rate,
                random_state=random_state,
            )
        except: # sklearn version < 1.2
            super().__init__(
                base_estimator=estimator,
                n_estimators=n_estimators,
                learning_rate=learning_rate,
                random_state=random_state,
            )
            self.estimator = estimator


    def fit(self, X, y, feature_names=None, **kwargs):
        X, y, feature_names = check_fit_arguments(self, X, y, feature_names)
        super().fit(X, y, **kwargs)
        self.complexity_ = len(self.estimators_)

class BoostedRulesRegressor(AdaBoostRegressor):
    '''An easy-interpretable regressor optimizing simple logical rules.

    Params
    ------
    estimator: object with fit and predict methods
        Defaults to DecisionTreeRegressor with AdaBoost.
    '''

    def __init__(
        self,
        estimator=DecisionTreeRegressor(max_depth=1),
        *,
        n_estimators=15,
        learning_rate=1.0,
        random_state=13,
    ):
        try: # sklearn version >= 1.2
            super().__init__(
                estimator=estimator,
                n_estimators=n_estimators,
                learning_rate=learning_rate,
                random_state=random_state,
            )
        except: # sklearn version < 1.2
            super().__init__(
                base_estimator=estimator,
                n_estimators=n_estimators,
                learning_rate=learning_rate,
                random_state=random_state,
            )
            self.estimator = estimator

    def fit(self, X, y, feature_names=None, **kwargs):
        X, y, feature_names = check_fit_arguments(self, X, y, feature_names)
        super().fit(X, y, **kwargs)
        self.complexity_ = len(self.estimators_)


if __name__ == '__main__':
    np.random.seed(13)
    X, Y = sklearn.datasets.load_breast_cancer(as_frame=True, return_X_y=True)
    model = BoostedRulesClassifier(estimator=DecisionTreeClassifier)
    X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size = 0.3)
    model.fit(X_train, y_train, feature_names=X_train.columns)
    y_pred = model.predict(X_test)
    acc = model.score(X_test, y_test)
    print('acc', acc, 'complexity', model.complexity_)
    print(model)

Classes

class BoostedRulesClassifier (estimator=DecisionTreeClassifier(max_depth=1), *, n_estimators=15, learning_rate=1.0, random_state=None)

An easy-interpretable classifier optimizing simple logical rules.

Params

estimator: object with fit and predict methods Defaults to DecisionTreeClassifier with AdaBoost. For SLIPPER, should pass estimator=imodels.SlipperBaseEstimator

Expand source code
class BoostedRulesClassifier(AdaBoostClassifier):
    '''An easy-interpretable classifier optimizing simple logical rules.

    Params
    ------
    estimator: object with fit and predict methods
        Defaults to DecisionTreeClassifier with AdaBoost.
        For SLIPPER, should pass estimator=imodels.SlipperBaseEstimator
    '''

    def __init__(
        self,
        estimator=DecisionTreeClassifier(max_depth=1),
        *,
        n_estimators=15,
        learning_rate=1.0,
        random_state=None,
    ):
        try: # sklearn version >= 1.2
            super().__init__(
                estimator=estimator,
                n_estimators=n_estimators,
                learning_rate=learning_rate,
                random_state=random_state,
            )
        except: # sklearn version < 1.2
            super().__init__(
                base_estimator=estimator,
                n_estimators=n_estimators,
                learning_rate=learning_rate,
                random_state=random_state,
            )
            self.estimator = estimator


    def fit(self, X, y, feature_names=None, **kwargs):
        X, y, feature_names = check_fit_arguments(self, X, y, feature_names)
        super().fit(X, y, **kwargs)
        self.complexity_ = len(self.estimators_)

Ancestors

  • sklearn.ensemble._weight_boosting.AdaBoostClassifier
  • sklearn.utils._metadata_requests._RoutingNotSupportedMixin
  • sklearn.base.ClassifierMixin
  • sklearn.ensemble._weight_boosting.BaseWeightBoosting
  • sklearn.ensemble._base.BaseEnsemble
  • sklearn.base.MetaEstimatorMixin
  • sklearn.base.BaseEstimator
  • sklearn.utils._estimator_html_repr._HTMLDocumentationLinkMixin
  • sklearn.utils._metadata_requests._MetadataRequester

Subclasses

Methods

def fit(self, X, y, feature_names=None, **kwargs)

Build a boosted classifier/regressor from the training set (X, y).

Parameters

X : {array-like, sparse matrix} of shape (n_samples, n_features)
The training input samples. Sparse matrix can be CSC, CSR, COO, DOK, or LIL. COO, DOK, and LIL are converted to CSR.
y : array-like of shape (n_samples,)
The target values.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, the sample weights are initialized to 1 / n_samples.

Returns

self : object
Fitted estimator.
Expand source code
def fit(self, X, y, feature_names=None, **kwargs):
    X, y, feature_names = check_fit_arguments(self, X, y, feature_names)
    super().fit(X, y, **kwargs)
    self.complexity_ = len(self.estimators_)
def set_fit_request(self: BoostedRulesClassifier, *, feature_names: Union[bool, ForwardRef(None), str] = '$UNCHANGED$') ‑> BoostedRulesClassifier

Request metadata passed to the fit method.

Note that this method is only relevant if enable_metadata_routing=True (see :func:sklearn.set_config). Please see :ref:User Guide <metadata_routing> on how the routing mechanism works.

The options for each parameter are:

  • True: metadata is requested, and passed to fit if provided. The request is ignored if metadata is not provided.

  • False: metadata is not requested and the meta-estimator will not pass it to fit.

  • None: metadata is not requested, and the meta-estimator will raise an error if the user provides it.

  • str: metadata should be passed to the meta-estimator with this given alias instead of the original name.

The default (sklearn.utils.metadata_routing.UNCHANGED) retains the existing request. This allows you to change the request for some parameters and not others.

Added in version: 1.3

Note

This method is only relevant if this estimator is used as a sub-estimator of a meta-estimator, e.g. used inside a :class:~sklearn.pipeline.Pipeline. Otherwise it has no effect.

Parameters

feature_names : str, True, False, or None, default=sklearn.utils.metadata_routing.UNCHANGED
Metadata routing for feature_names parameter in fit.

Returns

self : object
The updated object.
Expand source code
def func(*args, **kw):
    """Updates the request for provided parameters

    This docstring is overwritten below.
    See REQUESTER_DOC for expected functionality
    """
    if not _routing_enabled():
        raise RuntimeError(
            "This method is only available when metadata routing is enabled."
            " You can enable it using"
            " sklearn.set_config(enable_metadata_routing=True)."
        )

    if self.validate_keys and (set(kw) - set(self.keys)):
        raise TypeError(
            f"Unexpected args: {set(kw) - set(self.keys)} in {self.name}. "
            f"Accepted arguments are: {set(self.keys)}"
        )

    # This makes it possible to use the decorated method as an unbound method,
    # for instance when monkeypatching.
    # https://github.com/scikit-learn/scikit-learn/issues/28632
    if instance is None:
        _instance = args[0]
        args = args[1:]
    else:
        _instance = instance

    # Replicating python's behavior when positional args are given other than
    # `self`, and `self` is only allowed if this method is unbound.
    if args:
        raise TypeError(
            f"set_{self.name}_request() takes 0 positional argument but"
            f" {len(args)} were given"
        )

    requests = _instance._get_metadata_request()
    method_metadata_request = getattr(requests, self.name)

    for prop, alias in kw.items():
        if alias is not UNCHANGED:
            method_metadata_request.add_request(param=prop, alias=alias)
    _instance._metadata_request = requests

    return _instance
def set_score_request(self: BoostedRulesClassifier, *, sample_weight: Union[bool, ForwardRef(None), str] = '$UNCHANGED$') ‑> BoostedRulesClassifier

Request metadata passed to the score method.

Note that this method is only relevant if enable_metadata_routing=True (see :func:sklearn.set_config). Please see :ref:User Guide <metadata_routing> on how the routing mechanism works.

The options for each parameter are:

  • True: metadata is requested, and passed to score if provided. The request is ignored if metadata is not provided.

  • False: metadata is not requested and the meta-estimator will not pass it to score.

  • None: metadata is not requested, and the meta-estimator will raise an error if the user provides it.

  • str: metadata should be passed to the meta-estimator with this given alias instead of the original name.

The default (sklearn.utils.metadata_routing.UNCHANGED) retains the existing request. This allows you to change the request for some parameters and not others.

Added in version: 1.3

Note

This method is only relevant if this estimator is used as a sub-estimator of a meta-estimator, e.g. used inside a :class:~sklearn.pipeline.Pipeline. Otherwise it has no effect.

Parameters

sample_weight : str, True, False, or None, default=sklearn.utils.metadata_routing.UNCHANGED
Metadata routing for sample_weight parameter in score.

Returns

self : object
The updated object.
Expand source code
def func(*args, **kw):
    """Updates the request for provided parameters

    This docstring is overwritten below.
    See REQUESTER_DOC for expected functionality
    """
    if not _routing_enabled():
        raise RuntimeError(
            "This method is only available when metadata routing is enabled."
            " You can enable it using"
            " sklearn.set_config(enable_metadata_routing=True)."
        )

    if self.validate_keys and (set(kw) - set(self.keys)):
        raise TypeError(
            f"Unexpected args: {set(kw) - set(self.keys)} in {self.name}. "
            f"Accepted arguments are: {set(self.keys)}"
        )

    # This makes it possible to use the decorated method as an unbound method,
    # for instance when monkeypatching.
    # https://github.com/scikit-learn/scikit-learn/issues/28632
    if instance is None:
        _instance = args[0]
        args = args[1:]
    else:
        _instance = instance

    # Replicating python's behavior when positional args are given other than
    # `self`, and `self` is only allowed if this method is unbound.
    if args:
        raise TypeError(
            f"set_{self.name}_request() takes 0 positional argument but"
            f" {len(args)} were given"
        )

    requests = _instance._get_metadata_request()
    method_metadata_request = getattr(requests, self.name)

    for prop, alias in kw.items():
        if alias is not UNCHANGED:
            method_metadata_request.add_request(param=prop, alias=alias)
    _instance._metadata_request = requests

    return _instance
class BoostedRulesRegressor (estimator=DecisionTreeRegressor(max_depth=1), *, n_estimators=15, learning_rate=1.0, random_state=13)

An easy-interpretable regressor optimizing simple logical rules.

Params

estimator: object with fit and predict methods Defaults to DecisionTreeRegressor with AdaBoost.

Expand source code
class BoostedRulesRegressor(AdaBoostRegressor):
    '''An easy-interpretable regressor optimizing simple logical rules.

    Params
    ------
    estimator: object with fit and predict methods
        Defaults to DecisionTreeRegressor with AdaBoost.
    '''

    def __init__(
        self,
        estimator=DecisionTreeRegressor(max_depth=1),
        *,
        n_estimators=15,
        learning_rate=1.0,
        random_state=13,
    ):
        try: # sklearn version >= 1.2
            super().__init__(
                estimator=estimator,
                n_estimators=n_estimators,
                learning_rate=learning_rate,
                random_state=random_state,
            )
        except: # sklearn version < 1.2
            super().__init__(
                base_estimator=estimator,
                n_estimators=n_estimators,
                learning_rate=learning_rate,
                random_state=random_state,
            )
            self.estimator = estimator

    def fit(self, X, y, feature_names=None, **kwargs):
        X, y, feature_names = check_fit_arguments(self, X, y, feature_names)
        super().fit(X, y, **kwargs)
        self.complexity_ = len(self.estimators_)

Ancestors

  • sklearn.ensemble._weight_boosting.AdaBoostRegressor
  • sklearn.utils._metadata_requests._RoutingNotSupportedMixin
  • sklearn.base.RegressorMixin
  • sklearn.ensemble._weight_boosting.BaseWeightBoosting
  • sklearn.ensemble._base.BaseEnsemble
  • sklearn.base.MetaEstimatorMixin
  • sklearn.base.BaseEstimator
  • sklearn.utils._estimator_html_repr._HTMLDocumentationLinkMixin
  • sklearn.utils._metadata_requests._MetadataRequester

Methods

def fit(self, X, y, feature_names=None, **kwargs)

Build a boosted classifier/regressor from the training set (X, y).

Parameters

X : {array-like, sparse matrix} of shape (n_samples, n_features)
The training input samples. Sparse matrix can be CSC, CSR, COO, DOK, or LIL. COO, DOK, and LIL are converted to CSR.
y : array-like of shape (n_samples,)
The target values.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, the sample weights are initialized to 1 / n_samples.

Returns

self : object
Fitted estimator.
Expand source code
def fit(self, X, y, feature_names=None, **kwargs):
    X, y, feature_names = check_fit_arguments(self, X, y, feature_names)
    super().fit(X, y, **kwargs)
    self.complexity_ = len(self.estimators_)
def set_fit_request(self: BoostedRulesRegressor, *, feature_names: Union[bool, ForwardRef(None), str] = '$UNCHANGED$') ‑> BoostedRulesRegressor

Request metadata passed to the fit method.

Note that this method is only relevant if enable_metadata_routing=True (see :func:sklearn.set_config). Please see :ref:User Guide <metadata_routing> on how the routing mechanism works.

The options for each parameter are:

  • True: metadata is requested, and passed to fit if provided. The request is ignored if metadata is not provided.

  • False: metadata is not requested and the meta-estimator will not pass it to fit.

  • None: metadata is not requested, and the meta-estimator will raise an error if the user provides it.

  • str: metadata should be passed to the meta-estimator with this given alias instead of the original name.

The default (sklearn.utils.metadata_routing.UNCHANGED) retains the existing request. This allows you to change the request for some parameters and not others.

Added in version: 1.3

Note

This method is only relevant if this estimator is used as a sub-estimator of a meta-estimator, e.g. used inside a :class:~sklearn.pipeline.Pipeline. Otherwise it has no effect.

Parameters

feature_names : str, True, False, or None, default=sklearn.utils.metadata_routing.UNCHANGED
Metadata routing for feature_names parameter in fit.

Returns

self : object
The updated object.
Expand source code
def func(*args, **kw):
    """Updates the request for provided parameters

    This docstring is overwritten below.
    See REQUESTER_DOC for expected functionality
    """
    if not _routing_enabled():
        raise RuntimeError(
            "This method is only available when metadata routing is enabled."
            " You can enable it using"
            " sklearn.set_config(enable_metadata_routing=True)."
        )

    if self.validate_keys and (set(kw) - set(self.keys)):
        raise TypeError(
            f"Unexpected args: {set(kw) - set(self.keys)} in {self.name}. "
            f"Accepted arguments are: {set(self.keys)}"
        )

    # This makes it possible to use the decorated method as an unbound method,
    # for instance when monkeypatching.
    # https://github.com/scikit-learn/scikit-learn/issues/28632
    if instance is None:
        _instance = args[0]
        args = args[1:]
    else:
        _instance = instance

    # Replicating python's behavior when positional args are given other than
    # `self`, and `self` is only allowed if this method is unbound.
    if args:
        raise TypeError(
            f"set_{self.name}_request() takes 0 positional argument but"
            f" {len(args)} were given"
        )

    requests = _instance._get_metadata_request()
    method_metadata_request = getattr(requests, self.name)

    for prop, alias in kw.items():
        if alias is not UNCHANGED:
            method_metadata_request.add_request(param=prop, alias=alias)
    _instance._metadata_request = requests

    return _instance
def set_score_request(self: BoostedRulesRegressor, *, sample_weight: Union[bool, ForwardRef(None), str] = '$UNCHANGED$') ‑> BoostedRulesRegressor

Request metadata passed to the score method.

Note that this method is only relevant if enable_metadata_routing=True (see :func:sklearn.set_config). Please see :ref:User Guide <metadata_routing> on how the routing mechanism works.

The options for each parameter are:

  • True: metadata is requested, and passed to score if provided. The request is ignored if metadata is not provided.

  • False: metadata is not requested and the meta-estimator will not pass it to score.

  • None: metadata is not requested, and the meta-estimator will raise an error if the user provides it.

  • str: metadata should be passed to the meta-estimator with this given alias instead of the original name.

The default (sklearn.utils.metadata_routing.UNCHANGED) retains the existing request. This allows you to change the request for some parameters and not others.

Added in version: 1.3

Note

This method is only relevant if this estimator is used as a sub-estimator of a meta-estimator, e.g. used inside a :class:~sklearn.pipeline.Pipeline. Otherwise it has no effect.

Parameters

sample_weight : str, True, False, or None, default=sklearn.utils.metadata_routing.UNCHANGED
Metadata routing for sample_weight parameter in score.

Returns

self : object
The updated object.
Expand source code
def func(*args, **kw):
    """Updates the request for provided parameters

    This docstring is overwritten below.
    See REQUESTER_DOC for expected functionality
    """
    if not _routing_enabled():
        raise RuntimeError(
            "This method is only available when metadata routing is enabled."
            " You can enable it using"
            " sklearn.set_config(enable_metadata_routing=True)."
        )

    if self.validate_keys and (set(kw) - set(self.keys)):
        raise TypeError(
            f"Unexpected args: {set(kw) - set(self.keys)} in {self.name}. "
            f"Accepted arguments are: {set(self.keys)}"
        )

    # This makes it possible to use the decorated method as an unbound method,
    # for instance when monkeypatching.
    # https://github.com/scikit-learn/scikit-learn/issues/28632
    if instance is None:
        _instance = args[0]
        args = args[1:]
    else:
        _instance = instance

    # Replicating python's behavior when positional args are given other than
    # `self`, and `self` is only allowed if this method is unbound.
    if args:
        raise TypeError(
            f"set_{self.name}_request() takes 0 positional argument but"
            f" {len(args)} were given"
        )

    requests = _instance._get_metadata_request()
    method_metadata_request = getattr(requests, self.name)

    for prop, alias in kw.items():
        if alias is not UNCHANGED:
            method_metadata_request.add_request(param=prop, alias=alias)
    _instance._metadata_request = requests

    return _instance