Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement hyperparameter optimization via Gaussian search from scikit-optimize #1292

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
131 changes: 131 additions & 0 deletions catboost/python-package/catboost/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -3451,6 +3451,137 @@ def randomized_search(self, param_distributions, X, y=None, cv=3, n_iter=10, par
def _convert_to_asymmetric_representation(self):
self._object._convert_oblivious_to_asymmetric()

def gaussian_search(self, param_distributions, X, y=None, cv=3, n_random_starts=10,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the interface should be the same as in random_search and grid_search except for parameter distribution

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

docs are needed for this function, so that the user will understand the sense of all the parameters

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

since the method requires skopt params, let's name it skopt_parameter_search

random_state=None, n_calls=100, search_by_train_test_split=True,
partition_random_seed=None, n_jobs=1, const_params={}, to_minimize_objective=True,
refit=True, train_size=0.8, verbose=True, plot=False):
import skopt
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

please import only the things that are used inside the method

if n_calls<= 0:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

please follow the codestyle everywhere

assert CatBoostError("n_iter should be a positive number")
if not isinstance(param_distributions, Mapping):
assert CatBoostError("param_distributions should be a dictionary")
for param in param_distributions:
if not isinstance(param, skopt.space.space.Dimension):
raise TypeError('Parameter grid value is not from skopt.space.space.Dimension')

if X is None:
raise CatBoostError("X must not be None")

if y is None and not isinstance(X, STRING_TYPES + (Pool,)):
raise CatBoostError("y may be None only when X is an instance of catboost. Pool or string")

return self._gaussian_search(param_distributions=param_distributions, X=X, y=y, cv=cv, n_random_starts=n_random_starts,
random_state=random_state, n_calls=n_calls, search_by_train_test_split=search_by_train_test_split,
partition_random_seed=partition_random_seed, n_jobs=n_jobs, const_params=const_params, to_minimize_objective=to_minimize_objective,
refit=refit, train_size=train_size, verbose=verbose, plot=plot)

def _gaussian_search(self, param_distributions, X, y=None, cv=3, n_random_starts=10,
random_state=None, n_calls=100, search_by_train_test_split=True,
partition_random_seed=None, n_jobs=1, const_params={}, to_minimize_objective=True,
refit=True, train_size=0.8, verbose=True, plot=False):


train_params = self._prepare_train_params(
X, y, None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, True, None, None, None, None, None
)
params = train_params["params"]
loss_function = params.get('loss_function', None)

self.set_params(**const_params)

from skopt.utils import use_named_args
from skopt import gp_minimize
from skopt.space import Real, Categorical, Integer
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

please remove all unused imports


optimized_params_names = [distibution.name for distibution in param_distributions]
init_params = self._init_params.copy()

objective = SkoptObjective(X,
y,
random_state,
train_size,
cv,
partition_random_seed,
optimized_params_names,
self,
loss_function,
train_params["train_pool"],
search_by_train_test_split,
to_minimize_objective,
const_params,
init_params)

results = gp_minimize(objective,
param_distributions,
n_calls=n_calls,
n_random_starts=n_random_starts,
random_state=random_state)
best_params = {optimized_params_names[i]: results.x[i] for i in range(len(optimized_params_names))}
self.set_params(**best_params)
if refit:
self.set_params(**best_params)
self.fit(X, y, silent=True)
return best_params

def _convert_to_asymmetric_representation(self):
self._object._convert_oblivious_to_asymmetric()


class SkoptObjective(object):
def __init__(self, X, y, random_state, train_size, cv,
partition_random_seed, optimized_params_names,
model, loss_function, train_pool, search_by_train_test_split,
to_minimize_objective, const_params, init_params):
self.X = X
self.y = y
self.random_state = random_state
self.train_size = train_size
self.cv = cv
self.partition_random_seed = partition_random_seed
self.optimized_params_names = optimized_params_names
self.model = model
self.loss_function = loss_function
self.train_pool = train_pool
self.to_minimize_objective = to_minimize_objective
self.search_by_train_test_split = search_by_train_test_split
self.const_params = const_params
self.init_params = init_params

def __call__(self, params):
from sklearn.model_selection import train_test_split

params_dict = dict(zip(self.optimized_params_names, params))
if isinstance(self.model, CatBoostClassifier):
self.model = CatBoostClassifier(**self.init_params)
elif isinstance(self.model, CatBoostRegressor):
self.model = CatBoostRegressor(**self.init_params)
elif isinstance(self.model, CatBoost):
self.model = CatBoost(**self.init_params)

self.model.set_params(**self.const_params)
self.model.set_params(**params_dict)
if self.search_by_train_test_split:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Train-test split must be done once for the whole parameter tuning process. Quantization must be done once or if quantization parameters are among the optimized ones, then it must be done every time when quantization changes.

if isinstance(self.X, Pool):
self.X = self.X.get_features()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will not work in case if you have quantized pool, if there are categorical features or if there are texts.

self.y = self.X.get_label()
X_train, X_val, y_train, y_val = train_test_split(self.X,
self.y,
random_state=self.partition_random_seed,
train_size=self.train_size)
self.model.fit(X_train, y_train, silent=True, eval_set=(X_val, y_val))
result = self.model.get_best_score()['validation'][self.loss_function]
else:
result = self.model.cv(self.train_params["train_pool"],
params=params_dict,
fold_count=self.cv,
partition_random_seed=self.partition_random_seed)
result = list(result["test-" + self.loss_function + "-mean"])[-1]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this will work not for all loss_functions, plus loss_function might be None in this code

if not self.to_minimize_objective:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This value should not be specifier by the user, this is always fully specified by the evaluation metric.

result = -result
return result


class CatBoostClassifier(CatBoost):

_estimator_type = 'classifier'
Expand Down