-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpyExercise3.py
More file actions
74 lines (54 loc) · 1.63 KB
/
pyExercise3.py
File metadata and controls
74 lines (54 loc) · 1.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import pandas as pd
import numpy as np
learning_rate = 0.01
fit_intercept = True
weights = 0
def fit(X, y):
global weights
if fit_intercept:
X = np.concatenate((np.ones((X.shape[0], 1)), X), axis=1)
weights = np.zeros(X.shape[1])
for i in range(1000):
# gradient descent
current_prediction = np.dot(X, weights)
gradient = np.dot(X.T, (current_prediction - y)) / y.size
weights -= learning_rate * gradient
def predict_prob(X):
global weights
if fit_intercept:
X = np.concatenate((np.ones((X.shape[0], 1)), X), axis=1)
return np.dot(X, weights)
###
from sklearn.datasets import load_boston
boston_data = load_boston()
X = boston_data.data
y = boston_data.target
X_df = pd.DataFrame(X, columns = boston_data.feature_names)
print(boston_data.DESCR)
###
import matplotlib.pyplot as plt
import seaborn as sns
sns.distplot(y)
###
from sklearn.metrics import mean_squared_error
y_pred = [0] * 506
mean_squared_error(y, y_pred)
sns.jointplot(X[:, 5], y)
def manual_model(house):
return (house[5] - 4) * 10
y_pred = [manual_model(x) for x in X]
mean_squared_error(y, y_pred)
sns.jointplot(X[:, 5], y)
###
from sklearn.linear_model import LinearRegression
model = LinearRegression()
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.33, random_state = 42)
model.fit(X_train, y_train)
y_pred_ml = model.predict(X_test)
mean_squared_error(y_test, y_pred_ml)
###
model_2 = LinearRegression(normalize = True)
model_2.fit(X_train, y_train)
y_pred_ml2 = model.predict(X_test)
mean_squared_error(y_test, y_pred_ml2)