先看迭代效果图(附带sklearn的LinearRegression对比)

xgboost回归损失函数自定义【二】不同目标函数对比-LMLPHP

def huber_approx_obj(real, predict):
    d = predict - real
    h = 1  # h is delta in the graphic
    scale = 1 + (d / h) ** 2
    scale_sqrt = np.sqrt(scale)
    grad = d / scale_sqrt
    hess = 1 / scale / scale_sqrt
    return grad, hess
def fair_obj(real, predict):
    """y = c * abs(x) - c**2 * np.log(abs(x)/c + 1)"""
    x = predict - real
    c = 1
    den = abs(x) + c
    grad = c * x / den
    hess = c * c / den ** 2
    return grad, hess
def log_cosh_obj(real, predict):
    x = predict - real
    grad = np.tanh(x)
    # hess = 1 / np.cosh(x)**2 带除法的原方法,可能报ZeroDivisionException
    hess = 1.0 - np.tanh(x) ** 2
    return grad, hess
def m4e(real, predict):
    grad = 4.0 * predict * predict * predict - 12.0 * predict * predict * real + 12.0 * predict * real * real - 4.0 * real * real
    hess = 12.0 * predict * predict - 24.0 * predict * real + 12.0 * real * real
    return grad, hess
02-09 22:00