import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn import linear_model
import matplotlib.pyplot as plt
x = [1,3,4,6,8,9,11,14]
y = [1,2,4,4,5,7,8,9]
plt.scatter(x,y)
plt.xlabel("x values")
plt.ylabel("y values")
plt.show()
x = np.array([1,3,4,6,8,9,11,14]).reshape(-1,1)
y = np.array([1,2,4,4,5,7,8,9])
regr = linear_model.LinearRegression()
i = regr.fit(x,y)
j = regr.coef_
print("Regression Coefficient : ",j)
k = regr.intercept_
print("Intercept : ",k)
y = j*x + k
plt.plot(x,y)
plt.xlabel("x values")
plt.ylabel("y values")
plt.show()
x = [1,3,4,6,8,9,11,14]
y = [1,2,4,4,5,7,8,9]
reg_line = [((j*i) + k) for i in x]
plt.scatter(x,y,color='
plt.plot(x,reg_line,color='r')
plt.xlabel("x values")
plt.ylabel("y values")
plt.legend("Li")
plt.show()
#where "i" is for true data points and "L" is for linear line in the above plot.
Conclusion: So it is clear from the last plot that all data points are not on the line, this shows that we can fit a high degree polynomial to the data to get the best fit.
No comments:
Post a Comment
If you have any doubt, let me know