数据建模及数据分析浅析
监督学习:
回归(线型回归)和分类(knn最临近分类)
非监督学习:
聚类(PCA主成分分析,k-means聚类)
随机算法:
蒙特卡洛模拟--蒙特卡洛算法
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
%matplotlib inline
#简单线型回归,一元线型回归
from sklearn.linear_model import LinearRegression
rng = np.random.RandomState(2)
xtrain = 10 * rng.rand(30)
ytrain = 8 + 4 * xtrain + rng.rand(30)
#np.random.RandomState 随机数种子,对于一个随机数发生器,只要该种子相同,产生的随机数就是相同的
#np.newaxis 为 numpy.ndarray(多维数组)增加一个轴
model = LinearRegression()
model.fit(xtrain[:,np.newaxis],ytrain)
[output]:
LinearRegression(copy_X=True, fit_intercept=True, n_jobs=1, normalize=False)
#coef_斜率
#截距intercept_
print(model.coef_,model.intercept_)
[4.04484138] 7.999245734574714
[output]:
xtest = np.linspace(0,10,30)
ytest = model.predict(xtest[:,np.newaxis])
fig = plt.figure(figsize=(12,3))
xlim=[0, 20]
plt.scatter(xtrain, ytrain, marker='.',color='k')
plt.scatter(xtest,ytest,marker='.',linestyle='-.', alpha=0.5)
plt.plot(xtest,ytest,color='r',linestyle='--')
plt.scatter(xtrain,ytrain,marker='.',color='k')
ytest2 = model.predict(xtrain[:,np.newaxis])
plt.scatter(xtrain, ytest2, marker='x', color='g')
plt.plot([xtrain, xtrain],[ytrain, ytest2],color='gray')
#多元线型回归
rng = np.random.RandomState(5)
xtrain = 10*rng.rand(150,4)
ytrain = 20 + np.dot(xtrain,[1.5,2,-4,3])
df =pd.DataFrame(xtrain,columns=['b1','b2','b3','b4'])
df['y'] = ytrain
b1 b2 b3 b4 y
0 2.219932 8.707323 2.067192 9.186109 60.034105
1 4.884112 6.117439 7.659079 5.184180 24.477270
2 2.968005 1.877212 0.807413 7.384403 47.129990
3 4.413092 1.583099 8.799370 2.740865 2.810948
4 4.142350 2.960799 6.287879 5.798378 24.378742
pd.scatter_matrix(df[['b1','b2','b3','b4']], figsize=(10,6),diagonal='kde',range_padding=0.1,alpha=0.5)
model = LinearRegression()
model.fit(df[['b1','b2','b3','b4']],df['y'])
print(model.coef_)
print(model.intercept_)
print('线性回归函数为:\n y= %.1fx1 + %.1fx2 + %.1fx3 + %.1fx4 + %.1f'
%(model.coef_[0], model.coef_[1], model.coef_[2], model.coef_[3], model.intercept_))
[output]:
[ 1.5 2. -4. 3. ]
19.999999999999964
线性回归函数为:
y= 1.5x1 + 2.0x2 + -4.0x3 + 3.0x4 + 20.0
#确定系数:
[0,1]越接近1,表明方程的变量对y的解释能力越强,模型对数据的拟合越好
r22 = model.score(xtrain[:,np.newaxis], ytrain)
0.9946452159694995