Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[python-package] Expose ObjectiveFunction class #6586

Open
wants to merge 18 commits into
base: master
Choose a base branch
from
55 changes: 55 additions & 0 deletions include/LightGBM/c_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ typedef void* DatasetHandle; /*!< \brief Handle of dataset. */
typedef void* BoosterHandle; /*!< \brief Handle of booster. */
typedef void* FastConfigHandle; /*!< \brief Handle of FastConfig. */
typedef void* ByteBufferHandle; /*!< \brief Handle of ByteBuffer. */
typedef void* ObjectiveFunctionHandle; /*!< \brief Handle of ObjectiveFunction. */

#define C_API_DTYPE_FLOAT32 (0) /*!< \brief float32 (single precision float). */
#define C_API_DTYPE_FLOAT64 (1) /*!< \brief float64 (double precision float). */
Expand Down Expand Up @@ -1563,6 +1564,60 @@ LIGHTGBM_C_EXPORT int LGBM_BoosterGetUpperBoundValue(BoosterHandle handle,
LIGHTGBM_C_EXPORT int LGBM_BoosterGetLowerBoundValue(BoosterHandle handle,
double* out_results);

/*!
* \brief Create an objective function.
* \param typ Type of the objective function
* \param parameter Parameters for the objective function
* \param[out] out Handle pointing to the created objective function
* \return 0 when succeed, -1 when failure happens
*/
LIGHTGBM_C_EXPORT int LGBM_ObjectiveFunctionCreate(const char *typ,
const char *parameter,
ObjectiveFunctionHandle *out);

/*!
* \brief Initialize an objective function with the dataset.
* \param handle Handle of the objective function
* \param dataset Handle of the dataset used for initialization
* \param[out] num_data Number of data points; this may be modified within the function
* \return 0 when succeed, -1 when failure happens
*/
LIGHTGBM_C_EXPORT int LGBM_ObjectiveFunctionInit(ObjectiveFunctionHandle handle,
DatasetHandle dataset,
int *num_data);

/*!
* \brief Evaluate the objective function given model scores.
* \param handle Handle of the objective function
* \param score Array of scores predicted by the model
* \param[out] grad Gradient result array
* \param[out] hess Hessian result array
* \return 0 when succeed, -1 when failure happens
*/
LIGHTGBM_C_EXPORT int LGBM_ObjectiveFunctionGetGradients(ObjectiveFunctionHandle handle,
const double* score,
float* grad,
float* hess);

/*!
* \brief Free the memory allocated for an objective function.
* \param handle Handle of the objective function
* \return 0 when succeed, -1 when failure happens
*/
LIGHTGBM_C_EXPORT int LGBM_ObjectiveFunctionFree(ObjectiveFunctionHandle handle);

/*!
* \brief Convert raw scores to outputs.
* \param handle Handle of the objective function
* \param num_data Number of data points
* \param inputs Array of raw scores
* \param[out] outputs Array of outputs
*/
LIGHTGBM_C_EXPORT int LGBM_ObjectiveFunctionConvertOutputs(ObjectiveFunctionHandle handle,
const int num_data,
const double* inputs,
double* outputs);

/*!
* \brief Initialize the network.
* \param machines List of machines in format 'ip1:port1,ip2:port2'
Expand Down
6 changes: 6 additions & 0 deletions include/LightGBM/objective_function.h
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,12 @@ class ObjectiveFunction {
/*! \brief Return the number of positive samples. Return 0 if no binary classification tasks.*/
virtual data_size_t NumPositiveData() const { return 0; }

virtual void ConvertOutputs(const int num_data, const double* inputs, double* outputs) const {
for (int i = 0; i < num_data; i ++) {
neNasko1 marked this conversation as resolved.
Show resolved Hide resolved
ConvertOutput(inputs + i, outputs + i);
neNasko1 marked this conversation as resolved.
Show resolved Hide resolved
}
}

virtual void ConvertOutput(const double* input, double* output) const {
output[0] = input[0];
}
Expand Down
3 changes: 2 additions & 1 deletion python-package/lightgbm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from pathlib import Path

from .basic import Booster, Dataset, Sequence, register_logger
from .basic import Booster, Dataset, ObjectiveFunction, Sequence, register_logger
from .callback import EarlyStopException, early_stopping, log_evaluation, record_evaluation, reset_parameter
from .engine import CVBooster, cv, train

Expand All @@ -31,6 +31,7 @@
__all__ = [
"Dataset",
"Booster",
"ObjectiveFunction",
"CVBooster",
"Sequence",
"register_logger",
Expand Down
159 changes: 159 additions & 0 deletions python-package/lightgbm/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -5281,3 +5281,162 @@ def __get_eval_info(self) -> None:
self.__higher_better_inner_eval = [
name.startswith(("auc", "ndcg@", "map@", "average_precision")) for name in self.__name_inner_eval
]


class ObjectiveFunction:
"""
ObjectiveFunction in LightGBM.

This class exposes the builtin objective functions for evaluating gradients and hessians
on external datasets. LightGBM does not use this wrapper during its training as it is
using the underlying C++ class.
"""
neNasko1 marked this conversation as resolved.
Show resolved Hide resolved

def __init__(self, name: str, params: Dict[str, Any]):
"""
Initialize the ObjectiveFunction.

Parameters
----------
name : str
The name of the objective function.
params : dict
Dictionary of parameters for the objective function.
These are the parameters that would have been passed to ``booster.train``.
The ``name`` should be consistent with the ``params["objective"]`` field.
"""
self.name = name
self.params = params
self.num_data = None
self.num_class = params.get("num_class", 1)

if "objective" in params and params["objective"] != self.name:
raise ValueError('The name should be consistent with the params["objective"] field.')

self.__create()

def init(self, dataset: Dataset) -> "ObjectiveFunction":
"""
Initialize the objective function using the provided dataset.

Parameters
----------
dataset : Dataset
The dataset object used for initialization.

Returns
-------
self : ObjectiveFunction
Initialized objective function object.
"""
return self.__init_from_dataset(dataset)

def convert_outputs(self, scores: np.ndarray) -> np.ndarray:
neNasko1 marked this conversation as resolved.
Show resolved Hide resolved
"""
Convert the raw scores to the final predictions.

Parameters
----------
scores : numpy.ndarray
Raw scores from the model.

Returns
-------
result : numpy.ndarray
"""
if self._handle is None:
raise ValueError("Objective function seems uninitialized")

if self.num_class == 1:
scores = _list_to_1d_numpy(scores, dtype=np.float64, name="scores")
else:
scores = _data_to_2d_numpy(scores, dtype=np.float64, name="scores")

num_data = scores.size
out_preds = np.zeros_like(scores, dtype=np.float64)
neNasko1 marked this conversation as resolved.
Show resolved Hide resolved

_safe_call(
_LIB.LGBM_ObjectiveFunctionConvertOutputs(
self._handle,
ctypes.c_int(num_data),
scores.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
out_preds.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
)
)

return out_preds

def get_gradients(self, y_pred: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""
Evaluate the objective function given model predictions.

Parameters
----------
y_pred : numpy.ndarray
Predicted scores from the model.

Returns
-------
(grad, hess) : Tuple[np.ndarray, np.ndarray]
A tuple containing gradients and Hessians.
"""
if self._handle is None:
raise ValueError("Objective function seems uninitialized")

if self.num_data is None or self.num_class is None:
raise ValueError("ObjectiveFunction was not created properly")

data_shape = self.num_data * self.num_class
grad = np.zeros(dtype=np.float32, shape=data_shape)
hess = np.zeros(dtype=np.float32, shape=data_shape)
neNasko1 marked this conversation as resolved.
Show resolved Hide resolved

_safe_call(
_LIB.LGBM_ObjectiveFunctionGetGradients(
self._handle,
y_pred.ctypes.data_as(ctypes.POINTER(ctypes.c_double)),
grad.ctypes.data_as(ctypes.POINTER(ctypes.c_float)),
hess.ctypes.data_as(ctypes.POINTER(ctypes.c_float)),
)
)

return (grad, hess)

def __create(self):
self._handle = ctypes.c_void_p()
_safe_call(
_LIB.LGBM_ObjectiveFunctionCreate(
_c_str(self.name),
_c_str(_param_dict_to_str(self.params)),
ctypes.byref(self._handle),
)
)

def __init_from_dataset(self, dataset: Dataset) -> "ObjectiveFunction":
if dataset._handle is None:
raise ValueError("Cannot create ObjectiveFunction from uninitialised Dataset")

if self._handle is None:
raise ValueError("Dealocated ObjectiveFunction cannot be initialized")

tmp_num_data = ctypes.c_int(0)
_safe_call(
_LIB.LGBM_ObjectiveFunctionInit(
self._handle,
dataset._handle,
ctypes.byref(tmp_num_data),
)
)
self.num_data = tmp_num_data.value
return self

def __del__(self) -> None:
try:
self._free_handle()
except AttributeError:
pass

def _free_handle(self) -> "ObjectiveFunction":
if self._handle is not None:
_safe_call(_LIB.LGBM_ObjectiveFunctionFree(self._handle))
self._handle = None
return self
69 changes: 69 additions & 0 deletions src/c_api.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -907,6 +907,7 @@ using LightGBM::kZeroThreshold;
using LightGBM::LGBM_APIHandleException;
using LightGBM::Log;
using LightGBM::Network;
using LightGBM::ObjectiveFunction;
using LightGBM::Random;
using LightGBM::ReduceScatterFunction;
using LightGBM::SingleRowPredictor;
Expand Down Expand Up @@ -2747,6 +2748,74 @@ int LGBM_BoosterGetLowerBoundValue(BoosterHandle handle,
API_END();
}

LIGHTGBM_C_EXPORT int LGBM_ObjectiveFunctionCreate(const char *typ,
const char *parameter,
ObjectiveFunctionHandle *out) {
API_BEGIN();
auto param = Config::Str2Map(parameter);
Config config(param);
if (config.device_type != std::string("cpu")) {
Log::Fatal("Currently the ObjectiveFunction class is only exposed for CPU devices.");
} else {
*out = ObjectiveFunction::CreateObjectiveFunction(std::string(typ), config);
}
API_END();
}

LIGHTGBM_C_EXPORT int LGBM_ObjectiveFunctionInit(ObjectiveFunctionHandle handle,
DatasetHandle dataset,
int *num_data) {
API_BEGIN();
ObjectiveFunction* ref_fobj = reinterpret_cast<ObjectiveFunction*>(handle);
Dataset* ref_dataset = reinterpret_cast<Dataset*>(dataset);
ref_fobj->Init(ref_dataset->metadata(), ref_dataset->num_data());
*num_data = ref_dataset->num_data();
API_END();
}

LIGHTGBM_C_EXPORT int LGBM_ObjectiveFunctionGetGradients(ObjectiveFunctionHandle handle,
const double* score,
float* grad,
float* hess) {
API_BEGIN();
#ifdef SCORE_T_USE_DOUBLE
(void) handle; // UNUSED VARIABLE
(void) grad; // UNUSED VARIABLE
(void) hess; // UNUSED VARIABLE
Log::Fatal("Don't support evaluating objective function when SCORE_T_USE_DOUBLE is enabled");
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is that?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This would require a huge amount of work on the python side, so I leave it as that. There is already precedence for that in the file.

#else
ObjectiveFunction* ref_fobj = reinterpret_cast<ObjectiveFunction*>(handle);
ref_fobj->GetGradients(score, grad, hess);
#endif
API_END();
}

LIGHTGBM_C_EXPORT int LGBM_ObjectiveFunctionConvertOutputs(ObjectiveFunctionHandle handle,
const int num_data,
const double* inputs,
double* outputs) {
API_BEGIN();
#ifdef SCORE_T_USE_DOUBLE
(void) handle; // UNUSED VARIABLE
(void) num_data; // UNUSED VARIABLE
(void) inputs; // UNUSED VARIABLE
(void) outputs; // UNUSED VARIABLE
Log::Fatal("Don't support evaluating objective function when SCORE_T_USE_DOUBLE is enabled");
#else
ObjectiveFunction* ref_fobj = reinterpret_cast<ObjectiveFunction*>(handle);
ref_fobj->ConvertOutputs(num_data, inputs, outputs);
#endif
API_END();
}

/*!
*/
LIGHTGBM_C_EXPORT int LGBM_ObjectiveFunctionFree(ObjectiveFunctionHandle handle) {
API_BEGIN();
delete reinterpret_cast<ObjectiveFunction*>(handle);
API_END();
}

int LGBM_NetworkInit(const char* machines,
int local_listen_port,
int listen_time_out,
Expand Down
12 changes: 12 additions & 0 deletions src/objective/multiclass_objective.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,12 @@ class MulticlassSoftmax: public ObjectiveFunction {
}
}

void ConvertOutputs(const int num_data, const double* inputs, double* outputs) const override {
for (int i = 0; i < num_data; i += num_class_) {
neNasko1 marked this conversation as resolved.
Show resolved Hide resolved
ConvertOutput(inputs + i, outputs + i);
}
}

void ConvertOutput(const double* input, double* output) const override {
Common::Softmax(input, output, num_class_);
}
Expand Down Expand Up @@ -236,6 +242,12 @@ class MulticlassOVA: public ObjectiveFunction {
return "multiclassova";
}

void ConvertOutputs(const int num_data, const double* inputs, double* outputs) const override {
for (int i = 0; i < num_data; i += num_class_) {
ConvertOutput(inputs + i, outputs + i);
}
}

void ConvertOutput(const double* input, double* output) const override {
for (int i = 0; i < num_class_; ++i) {
output[i] = 1.0f / (1.0f + std::exp(-sigmoid_ * input[i]));
Expand Down
Loading
Loading