代码实现(基于逻辑回归算法):

 # -*- coding: utf-8 -*-
"""
Created on Sat Sep 1 11:54:48 2018 @author: zhen 交叉验证
"""
import numpy as np
from sklearn import datasets
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import GridSearchCV
import matplotlib.pyplot as plt iris = datasets.load_iris()
x = iris['data'][:, 3:]
y = iris['target'] def report(results, n_top=3):
for i in range(1, n_top + 1):
candidates = np.flatnonzero(results['rank_test_score'] == i)
for candidate in candidates:
print("Model with rank: {0}".format(i))
print("Mean validation score: {0:.3f} (std: {1:.3f})".format(
results['mean_test_score'][candidate],
results['std_test_score'][candidate]))
print("Parameters: {0}".format(results['params'][candidate]))
print("") param_grid = {"tol":[1e-4, 1e-3,1e-2], "C":[0.4, 0.6, 0.8]} log_reg = LogisticRegression(multi_class='ovr', solver='sag')
# 采用3折交叉验证
grid_search = GridSearchCV(log_reg, param_grid=param_grid, cv=3)
grid_search.fit(x, y) report(grid_search.cv_results_) x_new = np.linspace(0, 3, 1000).reshape(-1, 1)
y_proba = grid_search.predict_proba(x_new)
y_hat = grid_search.predict(x_new) plt.plot(x_new, y_proba[:, 2], 'g-', label='Iris-Virginica')
plt.plot(x_new, y_proba[:, 1], 'r-', label='Iris-Versicolour')
plt.plot(x_new, y_proba[:, 0], 'b-', label='Iris-Setosa')
plt.show() print(grid_search.predict([[1.7], [1.5]]))

结果:

GridSearchCV交叉验证-LMLPHP

GridSearchCV交叉验证-LMLPHP

总结:使用交叉验证可以实现代码自动对设定范围参数的模型进行分别训练,最后选出效果最好的参数所训练出的模型进行预测,以求达到最好的预测效果!

04-29 21:21