# SC0828_machine learning/deep learning
## lab1_env_check
```python
import sys, os
import sklearn # scikit-learn
import scipy
import pandas
import numpy
import matplotlib
print(sys.version, os.getcwd())
print(sklearn.__version__, scipy.__version__)
print(pandas.__version__, numpy.__version__)
print(matplotlib.__version__)
```
* https://scikit-learn.org/
* https://www.tensorflow.org/
* https://pytorch.org/
* https://caffe.berkeleyvision.org/model_zoo.html
## lab2_simple_linear_model
```python=3.6
import matplotlib.pyplot as plt
import numpy as np
b = 5
a = 3
x = np.arange(-10, 10, 0.1)
y = a * x + b
plt.plot(x, y, label=f"y={a}x+{b}")
plt.legend()
plt.axhline(0, color='black')
plt.axvline(0, color='black')
plt.show()
```
## lab3_simple_linear_model2
```python=3.6
import matplotlib.pyplot as plt
import numpy as np
b = np.linspace(5, -5, 10)
a = 3
x = np.arange(-5, 5, 0.1)
for b1 in b:
y = a * x + b1
plt.plot(x,y,label=f"y={a}x+{b1:.1f}")
plt.legend()
plt.axhline(0, color='black')
plt.axvline(0, color='black')
plt.show()
```
## lab4_simple_linear_model3
```python=3.6
import matplotlib.pyplot as plt
import numpy as np
b = 5
a = np.linspace(3, -3, 10)
x = np.arange(-5, 5, 0.1)
for a1 in a:
y = a1 * x + b
plt.plot(x, y, label=f"y={a1:.1f}x+{b}")
plt.legend()
plt.axhline(0, color='black')
plt.axvline(0, color='black')
plt.show()
```
## lab5_logistic1
```python=3.6
import matplotlib.pyplot as plt
import numpy as np
x = np.arange(-20, 20, 0.1)
f = 1 / (1 + np.exp(-x))
plt.xlabel(x)
plt.ylabel('f(x)')
plt.plot(x, f)
plt.axhline(0, color='black')
plt.axvline(0, color='black')
plt.show()
```
## lab6_logistic2
```python=3.6
import matplotlib.pyplot as plt
import numpy as np
w1 = 0.5
w2 = 1.0
w3 = 2.0
l1 = 'w=0.5'
l2 = 'w=1.0'
l3 = 'w=1.5'
x = np.arange(-10, 10, 0.1)
for w, l in [(w1, l1), (w2, l2), (w3, l3)]:
f = 1 / (1 + np.exp(-x * w - 0))
plt.plot(x, f, label=l)
plt.legend(loc=2)
plt.show()
```
### lab6_logistic3
```python=3.6
import matplotlib.pyplot as plt
import numpy as np
w1 = 3.0
b1 = -8
b2 = 0
b3 = 8
l1 = 'b=-8.0'
l2 = 'b=0'
l3 = 'b=8.0'
x = np.arange(-10, 10, 0.1)
for b, l in [(b1, l1), (b2, l2), (b3, l3)]:
f = 1 / (1 + np.exp(-(w1 * x + b)))
plt.plot(x, f, label=l)
plt.legend()
plt.show()
```
* https://orange.biolab.si/
* https://software.intel.com/content/www/us/en/develop/tools/openvino-toolkit.html
* https://aws.amazon.com/tw/deepracer/
* https://gym.openai.com/
## lab8_single_plot
```python=3.6
import matplotlib.pyplot as plt
from sklearn import linear_model
regression1 = linear_model.LinearRegression()
features = [[1], [2], [3], [4]]
values = [1, 4, 15, 17]
plt.scatter(features, values, c='green')
regression1.fit(features, values)
print(f"coefficient={regression1.coef_[0]}")
print(f"intercept={regression1.intercept_}")
range1 = [-1, 5]
print(f"score={regression1.score(features, values)}")
plt.plot(range1, regression1.coef_*range1+regression1.intercept_,c='gray')
plt.show()
```
## lab9_dual_variable
```python=3.6
import matplotlib.pyplot as plt
from sklearn import linear_model
features = [[0, 1], [1, 3], [2, 8]]
values = [1, 4, 5.5]
# ctrl+alt+L
regression1 = linear_model.LinearRegression()
regression1.fit(features, values)
print(regression1.coef_)
print(regression1.intercept_)
# y = 4*x1+(-0.5)*x2+1.5
points = [[0.8, 0.8], [2, 1], [1, 2], [1, 4], [4, 4]]
predicts = regression1.predict(points)
print(predicts)
print(regression1.score(points, predicts))
print(regression1.score(points, [5, 10, 4, 3.5, 16]))
```
## lab10_generated_data
```python=3.6
import matplotlib.pyplot as plt
from sklearn import linear_model, datasets
data1 = datasets.make_regression(100, 1, noise=50)
print(type(data1), type(data1[0]), type(data1[1]))
print(data1[0].shape, data1[1].shape)
plt.scatter(data1[0], data1[1], c='red', marker='*')
plt.show()
regression1 = linear_model.LinearRegression()
regression1.fit(data1[0], data1[1])
print(regression1.coef_, regression1.intercept_)
range1 = [-3, 3]
plt.plot(range1, regression1.coef_ * range1 + regression1.intercept_)
plt.scatter(data1[0], data1[1], c='red', marker='*')
print(f"score={regression1.score(data1[0], data1[1])}")
plt.show()
```
## lab11_polynomial_regression
```python=3.6
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
from matplotlib import pyplot as plt
x = np.array([5, 15, 25, 35, 45, 55]).reshape((-1, 1))
y = np.array([15, 11, 2, 8, 25, 32])
plt.plot(x, y)
plt.scatter(x, y)
plt.show()
regression1 = LinearRegression()
regression1.fit(x, y)
plt.scatter(x, y)
plt.plot(x, y)
x_sequence = np.array(np.arange(5, 55, 0.1)).reshape((-1, 1))
plt.plot(x, regression1.coef_ * x + regression1.intercept_)
print(f"1st order score={regression1.score(x,y)}")
plt.show()
transformer = PolynomialFeatures(degree=2, include_bias=False)
transformer.fit(x)
x_2 = transformer.transform(x)
print(f"x_2 shape={x_2}, x shape={x}")
regression2 = LinearRegression()
regression2.fit(x_2, y)
print(regression2.coef_)
print(regression2.intercept_)
print(f"2nd order score={regression2.score(x_2,y)}")
x_sequence_2 = transformer.transform(x_sequence)
y_predict = regression2.predict(x_sequence_2)
plt.scatter(x, y)
plt.plot(x, y)
plt.plot(x_sequence, y_predict)
plt.show()
# order3
transformer2 = PolynomialFeatures(degree=6, include_bias=False)
transformer2.fit(x)
x_3 = transformer2.transform(x)
regression3 = LinearRegression()
regression3.fit(x_3, y)
print(f"6th order score={regression3.score(x_3, y)}")
x_sequence_3 = transformer2.transform(x_sequence)
y_predict = regression3.predict(x_sequence_3)
plt.scatter(x, y)
plt.plot(x, y)
plt.plot(x_sequence, y_predict)
plt.show()
```
## lab12_multiple_features
```python=3.6
import matplotlib.pyplot as plt
from sklearn import datasets
data1 = datasets.make_regression(20, 6, noise=5)
print(data1[0].shape)
for i in range(data1[0].shape[1]):
plt.scatter(data1[0][:, i], data1[1])
plt.show()
```
## lab13_sort
```python=3.6
import matplotlib.pyplot as plt
from sklearn import datasets
from pprint import pprint
data1 = datasets.make_regression(20, 5, noise=5)
regressionX = data1[0]
pprint(regressionX)
for i in range(regressionX.shape[1]):
print(f"sort by {i}th column")
sortedArray = sorted(regressionX, key=lambda t: t[i])
pprint(sortedArray)
```
## lab14_reshape
```python=3.6
import numpy as np
a = np.zeros((10, 2))
b = a.T
c = b.view()
print(a.shape, b.shape, c.shape)
d = np.reshape(b, (5, 4))
print(d)
e = np.reshape(b, (20,))
print(e)
f = np.reshape(b, (20, -1))
print(f)
g = np.reshape(b, (-1, 20))
print(g)
h = np.reshape(b, (1, -1))
print(h)
```
## lab15_array_view
```python=3.6
import numpy as np
a = np.array([[1, 2], [3, 4]])
b = a.view()
c = a
print(a, b, c, sep='\n')
b.shape = (4, -1)
print(a, b, c, sep='\n')
c.shape = (-1, 4)
print(a, b, c, sep='\n')
a[0][0] += 100
print(a, b, c, sep='\n')
```
## lab16_array_copy_reference_view
```python=3.6
import numpy as np
a = np.array([[1, 2], [3, 4]])
b = a
c = a.view()
d = a.copy()
print("Stage1", a, b, c, d, sep="\n")
a[0][0] = 100
print("Stage2", a, b, c, d, sep="\n")
a.shape = (4,)
print("Stage3", a, b, c, d, sep="\n")
```
## lab17_diabetes
```python=3.6
import numpy as np
from sklearn import linear_model, datasets
diabetes = datasets.load_diabetes()
print(type(diabetes), dir(diabetes))
print(diabetes.data.shape, diabetes.target.shape)
print(diabetes.target.mean(), diabetes.target.std())
dataForTest = -60
# training
data_train = diabetes.data[:dataForTest]
print("data trained:", data_train.shape)
target_train = diabetes.target[:dataForTest]
print("target trained:", target_train.shape)
# testing
data_test = diabetes.data[dataForTest:]
print("data test:", data_test.shape)
target_test = diabetes.target[dataForTest:]
print("target test:", target_test.shape)
#
regression1 = linear_model.LinearRegression()
regression1.fit(data_train, target_train)
print(regression1.coef_)
print(regression1.intercept_)
print('score:', regression1.score(data_test, target_test))
for i in range(dataForTest, 0):
dataArray = np.array(data_test[i]).reshape(1, -1)
print("predict={:.1f}, actual={}".
format(regression1.predict(dataArray)[0], target_test[i]))
mean_square_error = np.mean(regression1.predict(data_test) - target_test) ** 2
print("mean square error={}".format(mean_square_error))
```
## lab18_feature_selection
```python=3.7
from sklearn.datasets import make_regression
from sklearn.linear_model import LinearRegression
from matplotlib import pyplot as plt
x, y = make_regression(n_samples=1000, n_features=10, n_informative=5)
model = LinearRegression()
model.fit(x, y)
importance = model.coef_
for index, coef in enumerate(importance):
print(f"feature={index}, score={coef:.3f}")
plt.bar([x for x in range(len(importance))], importance)
plt.show()
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import f_regression
kBest = SelectKBest(f_regression, k=5).fit(x, y)
print(kBest.get_support())
newX = kBest.fit_transform(x, y)
print(x[:1])
print(newX[:1])
```
## lab19_array_operation
```python=3.6
import numpy as np
a1 = [1, 2]
a2 = [3, 4]
a3 = [5, 6]
print(np.c_[5, 6, 7])
print(np.c_[a1, a2, a3])
print(np.r_[5, 6, 7])
print(np.r_[a1, a2, a3])
print(np.hstack((np.array(a1), np.array(a2), np.array(a3))))
print(np.vstack((a1, a2, a3)))
```
## lab20_iris(https://bit.ly/31zUawp)
```python=3.6
import matplotlib.pyplot as plt
from sklearn import datasets
iris = datasets.load_iris()
print(dir(iris))
labels = iris.feature_names
print(labels)
X = iris.data
species = iris.target
counter = 1
for i in range(0, 4):
for j in range(i + 1, 4):
plt.figure(counter, figsize=(8, 6))
plt.clf()
counter += 1
xData = X[:, i]
yData = X[:, j]
x_min, x_max = xData.min() - 0.5, xData.max() + 0.5
y_min, y_max = yData.min() - 0.5, yData.max() + 0.5
plt.scatter(xData, yData, c=species, cmap=plt.cm.Paired)
plt.xlabel(labels[i])
plt.ylabel(labels[j])
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
plt.xticks(())
plt.yticks(())
plt.show()
```
## lab21_iris_classification
```python=
from sklearn.linear_model import LogisticRegression
from sklearn import datasets
import numpy as np
import matplotlib.pyplot as plt
iris = datasets.load_iris()
X = iris["data"][:, 3:]
y = (iris["target"] == 2).astype(np.int)
regression1 = LogisticRegression()
regression1.fit(X, y)
X_sequence = np.linspace(0, 3, 1000).reshape(-1, 1)
y_probability = regression1.predict_proba(X_sequence)
print(regression1.coef_, regression1.intercept_)
print(np.unique(y))
plt.plot(X, y, 'r.')
plt.plot(X_sequence, y_probability[:, 1], 'g--', label="is virginia",
linewidth=3)
a = regression1.coef_[0]
b = regression1.intercept_
y_calculate = 1 / (1 + np.exp(-(a * X_sequence + b)))
plt.plot(X_sequence, y_calculate, 'b-', label='by formula')
plt.plot(X_sequence, y_probability[:, 0], 'r--', label='not virginica')
plt.legend()
plt.show()
```
## lab22_iris_cross_validation
```python=
import sklearn.datasets as datasets
from sklearn import model_selection
from sklearn.linear_model import LogisticRegression
iris = datasets.load_iris()
data = iris.data
target = iris.target
logisticReegression1 = LogisticRegression()
score = model_selection.cross_val_score(logisticReegression1, data, target, cv=3)
print(score)
```
## lab23_simple_svm
```python=
import numpy as np
from sklearn.svm import SVC
X = np.array([[-1, -1], [-2, -1], [-3, -3],
[1, 1], [2, 1], [3, 3]])
y = np.array([1, 1, 1, 2, 2, 2])
classifier1 = SVC()
classifier1.fit(X, y)
print("predict:",
classifier1.predict([[0, 0], [0, 1], [1, 0], [4, 4], [4, 0]]))
```
## lab24_iris_pca_svc
```python=
from sklearn import datasets, svm
import matplotlib.pyplot as plt
import numpy as np
from sklearn.decomposition import PCA
iris = datasets.load_iris()
pca = PCA(n_components=2)
data = pca.fit(iris.data).transform(iris.data)
print(data.shape)
datamax = data.max(axis=0) + 1
datamin = data.min(axis=0) - 1
n = 2000
X, Y = np.meshgrid(np.linspace(datamin[0], datamax[0], n),
np.linspace(datamin[1], datamax[1], n))
# default kernel default = 0.96
# linear default(1)=0.9667 10=0.9733
# poly default=0.9467 10=0.9667
# rbf default=0.96 50 = 0.9733
# sigmoid default =0.86 0.9267
svc = svm.SVC(kernel='sigmoid', C=50)
svc.fit(data, iris.target)
Z = svc.predict(np.c_[X.ravel(), Y.ravel()])
plt.contour(X, Y, Z.reshape(X.shape), colors='K')
print(f"accuracy={svc.score(data, iris.target):.4f}") # default 0.96
for c, s in zip([0, 1, 2], ['o', '^', '*']):
d = data[iris.target == c]
plt.scatter(d[:, 0], d[:, 1], c='k', marker=s)
plt.show()
```
## lab25_iris_cross_validation.py
```python=3
import sklearn.datasets as datasets
from sklearn import model_selection, svm
from sklearn.linear_model import LogisticRegression
iris = datasets.load_iris()
data = iris.data
target = iris.target
logisticReegression1 = LogisticRegression()
svc1 = svm.SVC()
estimators = [logisticReegression1, svc1]
for e in estimators:
print(f"estimator={e}")
score = model_selection.cross_val_score(logisticReegression1, data, target, cv=5)
print(score)
print(score.mean(), score.std())
```
## lab26_decisio_tree
```python=
from sklearn import tree
from matplotlib import pyplot as plt
X = [[0, 0], [1, 1]]
Y = [0, 1]
classifier1 = tree.DecisionTreeClassifier()
classifier1.fit(X, Y)
tree.plot_tree(classifier1)
plt.show()
```
## lab27
```python=
import matplotlib.pyplot as plt
from sklearn import tree
X = [0, 0], [1, 1], [0, 1], [1, 0]
Y = [0, 0, 1, 1]
col = ['red', 'green']
marker = ['o', 'd']
index = 0
while index < len(X):
type = Y[index]
plt.scatter(X[index][0], X[index][1], c=col[type],
marker=marker[type])
index += 1
plt.show()
classifier1 = tree.DecisionTreeClassifier()
classifier1.fit(X, Y)
print(classifier1)
```
## lab28_iris_decision_tree
```python=
import pandas as pd
from sklearn import tree
from sklearn import datasets
from sklearn.tree import DecisionTreeClassifier
from matplotlib import pyplot as plt
iris = datasets.load_iris()
df1 = pd.DataFrame(iris.data, columns=iris.feature_names)
y = iris.target
plt.figure(figsize=(12, 12))
dtree = DecisionTreeClassifier()
dtree.fit(df1, y)
tree.plot_tree(dtree, fontsize=10, feature_names=iris.feature_names)
plt.show()
```
## lab29_random_forest
*
https://artifactory.global.standardchartered.com/artifactory/api/pypi/pypi/simple/seaborn/
```
pip install seaborn -i <CORPORATE_URL>
```
```python=
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import confusion_matrix
iris = datasets.load_iris()
df1 = pd.DataFrame(iris.data, columns=iris.feature_names)
df1['species'] = np.array([iris.target_names[i] for i in iris.target])
seaborn.pairplot(df1, hue='species')
plt.show()
X_train, X_test, y_train, y_test = train_test_split(df1[iris.feature_names], iris.target,
test_size=0.5, stratify=iris.target)
rf = RandomForestClassifier(n_estimators=100, oob_score=True)
rf.fit(X_train, y_train)
predicted = rf.predict(X_test)
accuracy = accuracy_score(y_test, predicted)
print(f'OOB score={rf.oob_score_}')
print(f"predict score={accuracy}")
cm = pd.DataFrame(confusion_matrix(y_test, predicted),columns=iris.target_names,
index = iris.target_names)
seaborn.heatmap(cm, annot=True)
plt.show()
```
## lab30_kmeans1
```python=3.7
from sklearn.cluster import KMeans
import numpy as np
X = np.array([[1, 0], [0, 1], [1, 2], [1, 4], [1, 8],
[4, 2], [4, 4], [4, 6], [4, 7]])
kmeans = KMeans(n_clusters=2)
kmeans.fit(X)
print(kmeans.labels_)
print(kmeans.predict([[0, 0], [5, 5]]))
print(kmeans.cluster_centers_)
print(kmeans.inertia_)
```
## lab31
```python=3.7
from copy import deepcopy
import matplotlib.pyplot as plt
import numpy as np
X = np.r_[np.random.randn(50, 2) + [2, 2],
np.random.randn(50, 2) + [0, -2],
np.random.randn(50, 2) + [-2, 2]]
[plt.scatter(e[0], e[1], c='black', s=7) for e in X]
k = 3
C_x = np.random.uniform(np.min(X[:, 0]), np.max(X[:, 0]), size=k)
C_y = np.random.uniform(np.min(X[:, 1]), np.max(X[:, 1]), size=k)
C = np.array(list(zip(C_x, C_y)), dtype=np.float32)
plt.scatter(C_x, C_y, marker='*', s=200, c='#C0FFEE')
plt.show()
def dist(a, b, ax=1):
return np.linalg.norm(a - b, axis=ax)
C_old = np.zeros(C.shape)
clusters = np.zeros(len(X))
delta = dist(C, C_old, None)
print(f"delta={delta}")
def plot_kmean(current_cluster, delta):
colors = ['r', 'g', 'b', 'c', 'm', 'y', 'k']
fig, ax = plt.subplots()
for index1 in range(k):
pts = np.array([X[j] for j in range(len(X))
if current_cluster[j] == index1])
ax.scatter(pts[:, 0], pts[:, 1], s=7, c=colors[index1])
ax.scatter(C[:, 0], C[:, 1], marker='*', s=200, c='#C000EE')
plt.title(f"delta will be:{delta}")
plt.show()
while delta != 0:
print("**start a new iteration")
for i in range(len(X)):
distances = dist(X[i], C)
cluster = np.argmin(distances)
clusters[i] = cluster
C_old = deepcopy(C)
for i in range(k):
points = [X[j] for j in range(len(X)) if clusters[j] == i]
C[i] = np.mean(points, axis=0)
delta = dist(C, C_old, None)
print(f"now, delta={delta}")
plot_kmean(clusters, delta)
```
## lab32_kmeans3
```python=
import matplotlib.pyplot as plt
import numpy as np
from sklearn.cluster import KMeans
X = np.r_[np.random.randn(500, 2) + [2, 2],
np.random.randn(500, 2) + [0, -2],
np.random.randn(500, 2) + [-2, 2]]
K = 4
kmeans = KMeans(n_clusters=K, n_init=1)
kmeans.fit(X)
print(f"centers are:{kmeans.cluster_centers_}")
print(kmeans.inertia_)
colors = ['c', 'm', 'y', 'k']
markers = ['.', '*', '^', 'x']
for i in range(K):
dataX = X[kmeans.labels_ == i]
plt.scatter(dataX[:, 0], dataX[:, 1], c=colors[i], marker=markers[i])
plt.scatter(kmeans.cluster_centers_[:,0],kmeans.cluster_centers_[:,1],
marker='*', s=200, c='#C000EE')
plt.show()
```
## lab33_kmeans4
```
import matplotlib.pyplot as plt
import numpy as np
from sklearn.cluster import KMeans
X = np.r_[np.random.randn(500, 2) + [2, 2],
np.random.randn(500, 2) + [0, -2],
np.random.randn(500, 2) + [-2, 2]]
interias = []
for k in range(1, 10):
kmeans = KMeans(n_clusters=k)
kmeans.fit(X)
interias.append(kmeans.inertia_)
print(interias)
plt.plot(range(1,10),interias)
plt.show()
```
## lab34_nearest_neighbor
```python=3.7
import numpy as np
from sklearn.neighbors import NearestNeighbors
X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
neighbors = NearestNeighbors(n_neighbors=2, algorithm='ball_tree').fit(X)
distanes, indices = neighbors.kneighbors(X, return_distance=True)
print(distanes)
print(indices)
print(neighbors.kneighbors_graph(X).toarray())
```
## lab35_knn1
* [link text](https://archive.ics.uci.edu/ml/datasets/Connectionist+Bench+(Sonar,+Mines+vs.+Rocks))
```python=
import pandas as pd
from sklearn.metrics import confusion_matrix
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split
df1 = pd.read_csv('data/sonar.all-data', header=None, prefix='X')
print(df1.shape)
data, labels = df1.iloc[:, :-1], df1.iloc[:, -1]
df1.rename(columns={'X60': 'Label'}, inplace=True)
clf1 = KNeighborsClassifier(n_neighbors=4)
X_train, X_test, y_train, y_test = train_test_split(data, labels, test_size=0.3)
clf1.fit(X_train, y_train)
y_predict = clf1.predict(X_test)
print(f"score={clf1.score(X_test, y_test)}")
result_cm1 = confusion_matrix(y_test, y_predict)
print(result_cm1)
from joblib import dump, load
dump(clf1, "knn1.joblib")
knn2 = load("knn1.joblib")
y_predict2 = knn2.predict(X_test)
result2 = confusion_matrix(y_predict, y_predict2)
print(result2)
```
## lab36_nb1
```python=
import numpy as np
from sklearn.naive_bayes import GaussianNB
X = np.array([[-1, -1], [-2, -1], [-3, -2],
[1, 1], [2, 1], [3, 2]])
Y = np.array([1, 1, 1, 2, 2, 2])
classifier1 = GaussianNB()
classifier1.fit(X, Y)
print(classifier1.predict([[0, 0], [-0.5, -0.5], [0, -0.5], [3, 3]]))
classifier2 = GaussianNB()
classifier2.partial_fit(X, Y, np.unique(Y))
# classifier2.partial_fit(X, Y)
classifier2.partial_fit([[0, 0]], [2])
print(classifier2.predict([[0, 0], [-0.5, -0.5], [0, -0.5], [3, 3]]))
```
## labb37_nb2
```
import matplotlib.pyplot as plt
import numpy as np
from sklearn.naive_bayes import GaussianNB
X = np.array([[-1, -1], [-2, -1], [-3, -2],
[1, 1], [2, 1], [3, 2]])
# Y = np.array([1, 1, 1, 2, 2, 2])
# Y = np.array([1, 2, 2, 1, 2, 2])
Y = np.array([1, 1, 2, 1, 1, 2])
x_min, x_max = -4, 4
y_min, y_max = -4, 4
h = .025
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
classifier = GaussianNB()
classifier.fit(X, Y)
Z = classifier.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.pcolormesh(xx, yy, Z)
XB = []
YB = []
XR = []
YR = []
index = 0
for index in range(0, len(Y)):
if Y[index] == 1:
XB.append(X[index, 0])
YB.append(X[index, 1])
elif Y[index] == 2:
XR.append(X[index, 0])
YR.append(X[index, 1])
plt.scatter(XB, YB, color="b", label="BLUE")
plt.scatter(XR, YR, color="r", label="RED")
plt.legend()
plt.show()
```
## lab38_pca_manual
```
from numpy import array
from sklearn.decomposition import PCA
A = array([[1, 2, 3], [3, 4, 5], [5, 6, 7], [7, 8, 9]])
print(A)
pca = PCA(2)
pca.fit(A)
print("components",pca.components_)
print("variance",pca.explained_variance_)
B = pca.transform(A)
print(B)
```
## lab39_PCA_iris
```
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sklearn import datasets
from sklearn.decomposition import PCA
iris = datasets.load_iris()
X = iris.data
species = iris.target
fig = plt.figure(1, figsize=(8, 8))
ax = Axes3D(fig, elev=-150, azim=110)
X_reduced = PCA(n_components=3).fit_transform(iris.data)
pca2 = PCA(n_components=3).fit(iris.data)
print(pca2.explained_variance_)
ax.scatter(X_reduced[:, 0], X_reduced[:, 1], X_reduced[:, 2],
c=species, cmap=plt.cm.Paired)
ax.set_xlabel("1st eigen vector")
ax.set_ylabel("2nd eigen vector")
ax.set_zlabel("3rd eigen vector")
plt.show()
```
## lab40_calculate_pca
```python=
from numpy import array, cov, mean
from numpy.linalg import eig
A = array([[1, 2, 3], [3, 4, 5], [5, 6, 7], [7, 8, 9]])
print(A)
M = mean(A.T, axis=1)
print("M=", M)
M2 = mean(A.T)
print("M2=", M2)
M3 = mean(A, axis=1)
print("M3=", M3)
C = A-M
print(C)
V = cov(C.T)
print(V)
values, vectors = eig(V)
print("vectors=",vectors)
print("values=",values)
P = vectors.T.dot(C.T)
print("project result:\n", P.T)
```
## lab4_env_check
```
import tensorflow as tf
import keras
import numpy as np
print(tf.__version__)
print(keras.__version__)
print(np.__version__)
```
## lab42_tensor1
```
import tensorflow as tf
hello1 = tf.constant('hello tensor in lab42')
print(type(hello1), dir(hello1))
print(hello1)
print(hello1.numpy())
```
## lab43_tensor1_legacy
```
import tensorflow as tf
# when you are running version1
tf.compat.v1.disable_eager_execution()
hello1 = tf.constant('hello tensor in lab43')
print(type(hello1), dir(hello1))
print(hello1)
session1 = tf.compat.v1.Session()
result1 = session1.run(hello1)
print(result1)
session1.close()
```
## lab44_tensor2
```
import tensorflow as tf
import numpy as np
a = np.array([1, 2, 3])
b = np.array([4, 5, 6])
c = np.add(a, b)
print(c)
d = tf.add(a, b)
print(d.numpy())
t1 = tf.constant(a)
t2 = tf.constant(b)
t3 = tf.add(t1, t2)
print(t3.numpy())
```
## lab45_tensor3
```python=
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
a = tf.compat.v1.placeholder(dtype=tf.int32, shape=(None,))
b = tf.compat.v1.placeholder(dtype=tf.int32, shape=(None,))
c = tf.add(a, b)
with tf.compat.v1.Session() as session1:
result = session1.run(c, feed_dict={a: [1, 2, 3], b: [4, 5, 6]})
print(result)
```
## lab46_tensor3_tf2
```python=3.7
import tensorflow as tf
@tf.function
def add(p, q):
return tf.math.add(p, q)
print(add([1, 2, 3], [4, 5, 6]))
```
## lab47_tensor4
```
import tensorflow as tf
from datetime import datetime
# manual make a directory logs
stamp = datetime.now().strftime("%Y%m%d-%H%M%S")
logdir = 'logs/%s' % stamp
writer = tf.summary.create_file_writer(logdir)
@tf.function
def computeArea(sides):
a = sides[:, 0]
b = sides[:, 1]
c = sides[:, 2]
s = (a + b + c) / 2
areaSquare = s * (s - a) * (s - b) * (s - c)
return areaSquare ** 0.5
tf.summary.trace_on(graph=True, profiler=True)
print(computeArea(tf.constant([[3.0, 4.0, 5.0], [6.0, 6.0, 6.0]])))
with writer.as_default():
tf.summary.trace_export(name="lab47", step=0, profiler_outdir=logdir)
tf.summary.trace_off()
```
```
tensorboard --logdir=logs\20200911-141015
```
## lab48_tensor4 (for v1)
```
import tensorflow as tf
from datetime import datetime
tf.compat.v1.disable_eager_execution()
# manual make a directory logs
stamp = datetime.now().strftime("%Y%m%d-%H%M%S")
logdir = 'logs/%s' % stamp
@tf.function
def computeArea(sides):
a = sides[:, 0]
b = sides[:, 1]
c = sides[:, 2]
s = (a + b + c) / 2
areaSquare = s * (s - a) * (s - b) * (s - c)
return areaSquare ** 0.5
with tf.compat.v1.Session() as session1:
with tf.compat.v1.summary.FileWriter(logdir, graph=session1.graph) as writer:
area = computeArea(tf.constant([[3.0, 4.0, 5.0], [6.0, 6.0, 6.0]]))
print(session1.run(area))
```
## tensorboard update
```
pip install -U setuptools -i "https://artifactory.global.standardchartered.com/artifactory/api/pypi/pypi/simple/"
```
## tensorboard_modify
```python=
import tensorflow as tf
from datetime import datetime
# manual make a directory logs
stamp = datetime.now().strftime("%Y%m%d-%H%M%S")
logdir = 'logs/%s' % stamp
writer = tf.summary.create_file_writer(logdir)
@tf.function
def computeArea(sides):
a = sides[:, 0]
b = sides[:, 1]
c = sides[:, 2]
temp = a + b
temp2 = temp + c
s = temp2 / 2
# s = (a + b + c) / 2
areaSquare = s * (s - a) * (s - b) * (s - c)
return areaSquare ** 0.5
tf.summary.trace_on(graph=True, profiler=True)
print(computeArea(tf.constant([[3.0, 4.0, 5.0], [6.0, 6.0, 6.0]])))
with writer.as_default():
tf.summary.trace_export(name="lab47", step=0, profiler_outdir=logdir)
tf.summary.trace_off()
```
## https://developer.nvidia.com/tensorrt
## https://software.intel.com/content/www/us/en/develop/tools/openvino-toolkit.html
* https://github.com/tensorflow/models
* https://software.intel.com/content/www/us/en/develop/tools/openvino-toolkit/pretrained-models.html
* https://pytorch.org/docs/stable/torchvision/models.html
## dataset lab49.data
```
Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome
6,148,72,35,0,33.6,0.627,50,1
1,85,66,29,0,26.6,0.351,31,0
8,183,64,0,0,23.3,0.672,32,1
1,89,66,23,94,28.1,0.167,21,0
0,137,40,35,168,43.1,2.288,33,1
5,116,74,0,0,25.6,0.201,30,0
3,78,50,32,88,31,0.248,26,1
10,115,0,0,0,35.3,0.134,29,0
2,197,70,45,543,30.5,0.158,53,1
8,125,96,0,0,0,0.232,54,1
4,110,92,0,0,37.6,0.191,30,0
10,168,74,0,0,38,0.537,34,1
10,139,80,0,0,27.1,1.441,57,0
1,189,60,23,846,30.1,0.398,59,1
5,166,72,19,175,25.8,0.587,51,1
7,100,0,0,0,30,0.484,32,1
0,118,84,47,230,45.8,0.551,31,1
7,107,74,0,0,29.6,0.254,31,1
1,103,30,38,83,43.3,0.183,33,0
1,115,70,30,96,34.6,0.529,32,1
3,126,88,41,235,39.3,0.704,27,0
8,99,84,0,0,35.4,0.388,50,0
7,196,90,0,0,39.8,0.451,41,1
9,119,80,35,0,29,0.263,29,1
11,143,94,33,146,36.6,0.254,51,1
10,125,70,26,115,31.1,0.205,41,1
7,147,76,0,0,39.4,0.257,43,1
1,97,66,15,140,23.2,0.487,22,0
13,145,82,19,110,22.2,0.245,57,0
5,117,92,0,0,34.1,0.337,38,0
5,109,75,26,0,36,0.546,60,0
3,158,76,36,245,31.6,0.851,28,1
3,88,58,11,54,24.8,0.267,22,0
6,92,92,0,0,19.9,0.188,28,0
10,122,78,31,0,27.6,0.512,45,0
4,103,60,33,192,24,0.966,33,0
11,138,76,0,0,33.2,0.42,35,0
9,102,76,37,0,32.9,0.665,46,1
2,90,68,42,0,38.2,0.503,27,1
4,111,72,47,207,37.1,1.39,56,1
3,180,64,25,70,34,0.271,26,0
7,133,84,0,0,40.2,0.696,37,0
7,106,92,18,0,22.7,0.235,48,0
9,171,110,24,240,45.4,0.721,54,1
7,159,64,0,0,27.4,0.294,40,0
0,180,66,39,0,42,1.893,25,1
1,146,56,0,0,29.7,0.564,29,0
2,71,70,27,0,28,0.586,22,0
7,103,66,32,0,39.1,0.344,31,1
7,105,0,0,0,0,0.305,24,0
1,103,80,11,82,19.4,0.491,22,0
1,101,50,15,36,24.2,0.526,26,0
5,88,66,21,23,24.4,0.342,30,0
8,176,90,34,300,33.7,0.467,58,1
7,150,66,42,342,34.7,0.718,42,0
1,73,50,10,0,23,0.248,21,0
7,187,68,39,304,37.7,0.254,41,1
0,100,88,60,110,46.8,0.962,31,0
0,146,82,0,0,40.5,1.781,44,0
0,105,64,41,142,41.5,0.173,22,0
2,84,0,0,0,0,0.304,21,0
8,133,72,0,0,32.9,0.27,39,1
5,44,62,0,0,25,0.587,36,0
2,141,58,34,128,25.4,0.699,24,0
7,114,66,0,0,32.8,0.258,42,1
5,99,74,27,0,29,0.203,32,0
0,109,88,30,0,32.5,0.855,38,1
2,109,92,0,0,42.7,0.845,54,0
1,95,66,13,38,19.6,0.334,25,0
4,146,85,27,100,28.9,0.189,27,0
2,100,66,20,90,32.9,0.867,28,1
5,139,64,35,140,28.6,0.411,26,0
13,126,90,0,0,43.4,0.583,42,1
4,129,86,20,270,35.1,0.231,23,0
1,79,75,30,0,32,0.396,22,0
1,0,48,20,0,24.7,0.14,22,0
7,62,78,0,0,32.6,0.391,41,0
5,95,72,33,0,37.7,0.37,27,0
0,131,0,0,0,43.2,0.27,26,1
2,112,66,22,0,25,0.307,24,0
3,113,44,13,0,22.4,0.14,22,0
2,74,0,0,0,0,0.102,22,0
7,83,78,26,71,29.3,0.767,36,0
0,101,65,28,0,24.6,0.237,22,0
5,137,108,0,0,48.8,0.227,37,1
2,110,74,29,125,32.4,0.698,27,0
13,106,72,54,0,36.6,0.178,45,0
2,100,68,25,71,38.5,0.324,26,0
15,136,70,32,110,37.1,0.153,43,1
1,107,68,19,0,26.5,0.165,24,0
1,80,55,0,0,19.1,0.258,21,0
4,123,80,15,176,32,0.443,34,0
7,81,78,40,48,46.7,0.261,42,0
4,134,72,0,0,23.8,0.277,60,1
2,142,82,18,64,24.7,0.761,21,0
6,144,72,27,228,33.9,0.255,40,0
2,92,62,28,0,31.6,0.13,24,0
1,71,48,18,76,20.4,0.323,22,0
6,93,50,30,64,28.7,0.356,23,0
1,122,90,51,220,49.7,0.325,31,1
1,163,72,0,0,39,1.222,33,1
1,151,60,0,0,26.1,0.179,22,0
0,125,96,0,0,22.5,0.262,21,0
1,81,72,18,40,26.6,0.283,24,0
2,85,65,0,0,39.6,0.93,27,0
1,126,56,29,152,28.7,0.801,21,0
1,96,122,0,0,22.4,0.207,27,0
4,144,58,28,140,29.5,0.287,37,0
3,83,58,31,18,34.3,0.336,25,0
0,95,85,25,36,37.4,0.247,24,1
3,171,72,33,135,33.3,0.199,24,1
8,155,62,26,495,34,0.543,46,1
1,89,76,34,37,31.2,0.192,23,0
4,76,62,0,0,34,0.391,25,0
7,160,54,32,175,30.5,0.588,39,1
4,146,92,0,0,31.2,0.539,61,1
5,124,74,0,0,34,0.22,38,1
5,78,48,0,0,33.7,0.654,25,0
4,97,60,23,0,28.2,0.443,22,0
4,99,76,15,51,23.2,0.223,21,0
0,162,76,56,100,53.2,0.759,25,1
6,111,64,39,0,34.2,0.26,24,0
2,107,74,30,100,33.6,0.404,23,0
5,132,80,0,0,26.8,0.186,69,0
0,113,76,0,0,33.3,0.278,23,1
1,88,30,42,99,55,0.496,26,1
3,120,70,30,135,42.9,0.452,30,0
1,118,58,36,94,33.3,0.261,23,0
1,117,88,24,145,34.5,0.403,40,1
0,105,84,0,0,27.9,0.741,62,1
4,173,70,14,168,29.7,0.361,33,1
9,122,56,0,0,33.3,1.114,33,1
3,170,64,37,225,34.5,0.356,30,1
8,84,74,31,0,38.3,0.457,39,0
2,96,68,13,49,21.1,0.647,26,0
2,125,60,20,140,33.8,0.088,31,0
0,100,70,26,50,30.8,0.597,21,0
0,93,60,25,92,28.7,0.532,22,0
0,129,80,0,0,31.2,0.703,29,0
5,105,72,29,325,36.9,0.159,28,0
3,128,78,0,0,21.1,0.268,55,0
5,106,82,30,0,39.5,0.286,38,0
2,108,52,26,63,32.5,0.318,22,0
10,108,66,0,0,32.4,0.272,42,1
4,154,62,31,284,32.8,0.237,23,0
0,102,75,23,0,0,0.572,21,0
9,57,80,37,0,32.8,0.096,41,0
2,106,64,35,119,30.5,1.4,34,0
5,147,78,0,0,33.7,0.218,65,0
2,90,70,17,0,27.3,0.085,22,0
1,136,74,50,204,37.4,0.399,24,0
4,114,65,0,0,21.9,0.432,37,0
9,156,86,28,155,34.3,1.189,42,1
1,153,82,42,485,40.6,0.687,23,0
8,188,78,0,0,47.9,0.137,43,1
7,152,88,44,0,50,0.337,36,1
2,99,52,15,94,24.6,0.637,21,0
1,109,56,21,135,25.2,0.833,23,0
2,88,74,19,53,29,0.229,22,0
17,163,72,41,114,40.9,0.817,47,1
4,151,90,38,0,29.7,0.294,36,0
7,102,74,40,105,37.2,0.204,45,0
0,114,80,34,285,44.2,0.167,27,0
2,100,64,23,0,29.7,0.368,21,0
0,131,88,0,0,31.6,0.743,32,1
6,104,74,18,156,29.9,0.722,41,1
3,148,66,25,0,32.5,0.256,22,0
4,120,68,0,0,29.6,0.709,34,0
4,110,66,0,0,31.9,0.471,29,0
3,111,90,12,78,28.4,0.495,29,0
6,102,82,0,0,30.8,0.18,36,1
6,134,70,23,130,35.4,0.542,29,1
2,87,0,23,0,28.9,0.773,25,0
1,79,60,42,48,43.5,0.678,23,0
2,75,64,24,55,29.7,0.37,33,0
8,179,72,42,130,32.7,0.719,36,1
6,85,78,0,0,31.2,0.382,42,0
0,129,110,46,130,67.1,0.319,26,1
5,143,78,0,0,45,0.19,47,0
5,130,82,0,0,39.1,0.956,37,1
6,87,80,0,0,23.2,0.084,32,0
0,119,64,18,92,34.9,0.725,23,0
1,0,74,20,23,27.7,0.299,21,0
5,73,60,0,0,26.8,0.268,27,0
4,141,74,0,0,27.6,0.244,40,0
7,194,68,28,0,35.9,0.745,41,1
8,181,68,36,495,30.1,0.615,60,1
1,128,98,41,58,32,1.321,33,1
8,109,76,39,114,27.9,0.64,31,1
5,139,80,35,160,31.6,0.361,25,1
3,111,62,0,0,22.6,0.142,21,0
9,123,70,44,94,33.1,0.374,40,0
7,159,66,0,0,30.4,0.383,36,1
11,135,0,0,0,52.3,0.578,40,1
8,85,55,20,0,24.4,0.136,42,0
5,158,84,41,210,39.4,0.395,29,1
1,105,58,0,0,24.3,0.187,21,0
3,107,62,13,48,22.9,0.678,23,1
4,109,64,44,99,34.8,0.905,26,1
4,148,60,27,318,30.9,0.15,29,1
0,113,80,16,0,31,0.874,21,0
1,138,82,0,0,40.1,0.236,28,0
0,108,68,20,0,27.3,0.787,32,0
2,99,70,16,44,20.4,0.235,27,0
6,103,72,32,190,37.7,0.324,55,0
5,111,72,28,0,23.9,0.407,27,0
8,196,76,29,280,37.5,0.605,57,1
5,162,104,0,0,37.7,0.151,52,1
1,96,64,27,87,33.2,0.289,21,0
7,184,84,33,0,35.5,0.355,41,1
2,81,60,22,0,27.7,0.29,25,0
0,147,85,54,0,42.8,0.375,24,0
7,179,95,31,0,34.2,0.164,60,0
0,140,65,26,130,42.6,0.431,24,1
9,112,82,32,175,34.2,0.26,36,1
12,151,70,40,271,41.8,0.742,38,1
5,109,62,41,129,35.8,0.514,25,1
6,125,68,30,120,30,0.464,32,0
5,85,74,22,0,29,1.224,32,1
5,112,66,0,0,37.8,0.261,41,1
0,177,60,29,478,34.6,1.072,21,1
2,158,90,0,0,31.6,0.805,66,1
7,119,0,0,0,25.2,0.209,37,0
7,142,60,33,190,28.8,0.687,61,0
1,100,66,15,56,23.6,0.666,26,0
1,87,78,27,32,34.6,0.101,22,0
0,101,76,0,0,35.7,0.198,26,0
3,162,52,38,0,37.2,0.652,24,1
4,197,70,39,744,36.7,2.329,31,0
0,117,80,31,53,45.2,0.089,24,0
4,142,86,0,0,44,0.645,22,1
6,134,80,37,370,46.2,0.238,46,1
1,79,80,25,37,25.4,0.583,22,0
4,122,68,0,0,35,0.394,29,0
3,74,68,28,45,29.7,0.293,23,0
4,171,72,0,0,43.6,0.479,26,1
7,181,84,21,192,35.9,0.586,51,1
0,179,90,27,0,44.1,0.686,23,1
9,164,84,21,0,30.8,0.831,32,1
0,104,76,0,0,18.4,0.582,27,0
1,91,64,24,0,29.2,0.192,21,0
4,91,70,32,88,33.1,0.446,22,0
3,139,54,0,0,25.6,0.402,22,1
6,119,50,22,176,27.1,1.318,33,1
2,146,76,35,194,38.2,0.329,29,0
9,184,85,15,0,30,1.213,49,1
10,122,68,0,0,31.2,0.258,41,0
0,165,90,33,680,52.3,0.427,23,0
9,124,70,33,402,35.4,0.282,34,0
1,111,86,19,0,30.1,0.143,23,0
9,106,52,0,0,31.2,0.38,42,0
2,129,84,0,0,28,0.284,27,0
2,90,80,14,55,24.4,0.249,24,0
0,86,68,32,0,35.8,0.238,25,0
12,92,62,7,258,27.6,0.926,44,1
1,113,64,35,0,33.6,0.543,21,1
3,111,56,39,0,30.1,0.557,30,0
2,114,68,22,0,28.7,0.092,25,0
1,193,50,16,375,25.9,0.655,24,0
11,155,76,28,150,33.3,1.353,51,1
3,191,68,15,130,30.9,0.299,34,0
3,141,0,0,0,30,0.761,27,1
4,95,70,32,0,32.1,0.612,24,0
3,142,80,15,0,32.4,0.2,63,0
4,123,62,0,0,32,0.226,35,1
5,96,74,18,67,33.6,0.997,43,0
0,138,0,0,0,36.3,0.933,25,1
2,128,64,42,0,40,1.101,24,0
0,102,52,0,0,25.1,0.078,21,0
2,146,0,0,0,27.5,0.24,28,1
10,101,86,37,0,45.6,1.136,38,1
2,108,62,32,56,25.2,0.128,21,0
3,122,78,0,0,23,0.254,40,0
1,71,78,50,45,33.2,0.422,21,0
13,106,70,0,0,34.2,0.251,52,0
2,100,70,52,57,40.5,0.677,25,0
7,106,60,24,0,26.5,0.296,29,1
0,104,64,23,116,27.8,0.454,23,0
5,114,74,0,0,24.9,0.744,57,0
2,108,62,10,278,25.3,0.881,22,0
0,146,70,0,0,37.9,0.334,28,1
10,129,76,28,122,35.9,0.28,39,0
7,133,88,15,155,32.4,0.262,37,0
7,161,86,0,0,30.4,0.165,47,1
2,108,80,0,0,27,0.259,52,1
7,136,74,26,135,26,0.647,51,0
5,155,84,44,545,38.7,0.619,34,0
1,119,86,39,220,45.6,0.808,29,1
4,96,56,17,49,20.8,0.34,26,0
5,108,72,43,75,36.1,0.263,33,0
0,78,88,29,40,36.9,0.434,21,0
0,107,62,30,74,36.6,0.757,25,1
2,128,78,37,182,43.3,1.224,31,1
1,128,48,45,194,40.5,0.613,24,1
0,161,50,0,0,21.9,0.254,65,0
6,151,62,31,120,35.5,0.692,28,0
2,146,70,38,360,28,0.337,29,1
0,126,84,29,215,30.7,0.52,24,0
14,100,78,25,184,36.6,0.412,46,1
8,112,72,0,0,23.6,0.84,58,0
0,167,0,0,0,32.3,0.839,30,1
2,144,58,33,135,31.6,0.422,25,1
5,77,82,41,42,35.8,0.156,35,0
5,115,98,0,0,52.9,0.209,28,1
3,150,76,0,0,21,0.207,37,0
2,120,76,37,105,39.7,0.215,29,0
10,161,68,23,132,25.5,0.326,47,1
0,137,68,14,148,24.8,0.143,21,0
0,128,68,19,180,30.5,1.391,25,1
2,124,68,28,205,32.9,0.875,30,1
6,80,66,30,0,26.2,0.313,41,0
0,106,70,37,148,39.4,0.605,22,0
2,155,74,17,96,26.6,0.433,27,1
3,113,50,10,85,29.5,0.626,25,0
7,109,80,31,0,35.9,1.127,43,1
2,112,68,22,94,34.1,0.315,26,0
3,99,80,11,64,19.3,0.284,30,0
3,182,74,0,0,30.5,0.345,29,1
3,115,66,39,140,38.1,0.15,28,0
6,194,78,0,0,23.5,0.129,59,1
4,129,60,12,231,27.5,0.527,31,0
3,112,74,30,0,31.6,0.197,25,1
0,124,70,20,0,27.4,0.254,36,1
13,152,90,33,29,26.8,0.731,43,1
2,112,75,32,0,35.7,0.148,21,0
1,157,72,21,168,25.6,0.123,24,0
1,122,64,32,156,35.1,0.692,30,1
10,179,70,0,0,35.1,0.2,37,0
2,102,86,36,120,45.5,0.127,23,1
6,105,70,32,68,30.8,0.122,37,0
8,118,72,19,0,23.1,1.476,46,0
2,87,58,16,52,32.7,0.166,25,0
1,180,0,0,0,43.3,0.282,41,1
12,106,80,0,0,23.6,0.137,44,0
1,95,60,18,58,23.9,0.26,22,0
0,165,76,43,255,47.9,0.259,26,0
0,117,0,0,0,33.8,0.932,44,0
5,115,76,0,0,31.2,0.343,44,1
9,152,78,34,171,34.2,0.893,33,1
7,178,84,0,0,39.9,0.331,41,1
1,130,70,13,105,25.9,0.472,22,0
1,95,74,21,73,25.9,0.673,36,0
1,0,68,35,0,32,0.389,22,0
5,122,86,0,0,34.7,0.29,33,0
8,95,72,0,0,36.8,0.485,57,0
8,126,88,36,108,38.5,0.349,49,0
1,139,46,19,83,28.7,0.654,22,0
3,116,0,0,0,23.5,0.187,23,0
3,99,62,19,74,21.8,0.279,26,0
5,0,80,32,0,41,0.346,37,1
4,92,80,0,0,42.2,0.237,29,0
4,137,84,0,0,31.2,0.252,30,0
3,61,82,28,0,34.4,0.243,46,0
1,90,62,12,43,27.2,0.58,24,0
3,90,78,0,0,42.7,0.559,21,0
9,165,88,0,0,30.4,0.302,49,1
1,125,50,40,167,33.3,0.962,28,1
13,129,0,30,0,39.9,0.569,44,1
12,88,74,40,54,35.3,0.378,48,0
1,196,76,36,249,36.5,0.875,29,1
5,189,64,33,325,31.2,0.583,29,1
5,158,70,0,0,29.8,0.207,63,0
5,103,108,37,0,39.2,0.305,65,0
4,146,78,0,0,38.5,0.52,67,1
4,147,74,25,293,34.9,0.385,30,0
5,99,54,28,83,34,0.499,30,0
6,124,72,0,0,27.6,0.368,29,1
0,101,64,17,0,21,0.252,21,0
3,81,86,16,66,27.5,0.306,22,0
1,133,102,28,140,32.8,0.234,45,1
3,173,82,48,465,38.4,2.137,25,1
0,118,64,23,89,0,1.731,21,0
0,84,64,22,66,35.8,0.545,21,0
2,105,58,40,94,34.9,0.225,25,0
2,122,52,43,158,36.2,0.816,28,0
12,140,82,43,325,39.2,0.528,58,1
0,98,82,15,84,25.2,0.299,22,0
1,87,60,37,75,37.2,0.509,22,0
4,156,75,0,0,48.3,0.238,32,1
0,93,100,39,72,43.4,1.021,35,0
1,107,72,30,82,30.8,0.821,24,0
0,105,68,22,0,20,0.236,22,0
1,109,60,8,182,25.4,0.947,21,0
1,90,62,18,59,25.1,1.268,25,0
1,125,70,24,110,24.3,0.221,25,0
1,119,54,13,50,22.3,0.205,24,0
5,116,74,29,0,32.3,0.66,35,1
8,105,100,36,0,43.3,0.239,45,1
5,144,82,26,285,32,0.452,58,1
3,100,68,23,81,31.6,0.949,28,0
1,100,66,29,196,32,0.444,42,0
5,166,76,0,0,45.7,0.34,27,1
1,131,64,14,415,23.7,0.389,21,0
4,116,72,12,87,22.1,0.463,37,0
4,158,78,0,0,32.9,0.803,31,1
2,127,58,24,275,27.7,1.6,25,0
3,96,56,34,115,24.7,0.944,39,0
0,131,66,40,0,34.3,0.196,22,1
3,82,70,0,0,21.1,0.389,25,0
3,193,70,31,0,34.9,0.241,25,1
4,95,64,0,0,32,0.161,31,1
6,137,61,0,0,24.2,0.151,55,0
5,136,84,41,88,35,0.286,35,1
9,72,78,25,0,31.6,0.28,38,0
5,168,64,0,0,32.9,0.135,41,1
2,123,48,32,165,42.1,0.52,26,0
4,115,72,0,0,28.9,0.376,46,1
0,101,62,0,0,21.9,0.336,25,0
8,197,74,0,0,25.9,1.191,39,1
1,172,68,49,579,42.4,0.702,28,1
6,102,90,39,0,35.7,0.674,28,0
1,112,72,30,176,34.4,0.528,25,0
1,143,84,23,310,42.4,1.076,22,0
1,143,74,22,61,26.2,0.256,21,0
0,138,60,35,167,34.6,0.534,21,1
3,173,84,33,474,35.7,0.258,22,1
1,97,68,21,0,27.2,1.095,22,0
4,144,82,32,0,38.5,0.554,37,1
1,83,68,0,0,18.2,0.624,27,0
3,129,64,29,115,26.4,0.219,28,1
1,119,88,41,170,45.3,0.507,26,0
2,94,68,18,76,26,0.561,21,0
0,102,64,46,78,40.6,0.496,21,0
2,115,64,22,0,30.8,0.421,21,0
8,151,78,32,210,42.9,0.516,36,1
4,184,78,39,277,37,0.264,31,1
0,94,0,0,0,0,0.256,25,0
1,181,64,30,180,34.1,0.328,38,1
0,135,94,46,145,40.6,0.284,26,0
1,95,82,25,180,35,0.233,43,1
2,99,0,0,0,22.2,0.108,23,0
3,89,74,16,85,30.4,0.551,38,0
1,80,74,11,60,30,0.527,22,0
2,139,75,0,0,25.6,0.167,29,0
1,90,68,8,0,24.5,1.138,36,0
0,141,0,0,0,42.4,0.205,29,1
12,140,85,33,0,37.4,0.244,41,0
5,147,75,0,0,29.9,0.434,28,0
1,97,70,15,0,18.2,0.147,21,0
6,107,88,0,0,36.8,0.727,31,0
0,189,104,25,0,34.3,0.435,41,1
2,83,66,23,50,32.2,0.497,22,0
4,117,64,27,120,33.2,0.23,24,0
8,108,70,0,0,30.5,0.955,33,1
4,117,62,12,0,29.7,0.38,30,1
0,180,78,63,14,59.4,2.42,25,1
1,100,72,12,70,25.3,0.658,28,0
0,95,80,45,92,36.5,0.33,26,0
0,104,64,37,64,33.6,0.51,22,1
0,120,74,18,63,30.5,0.285,26,0
1,82,64,13,95,21.2,0.415,23,0
2,134,70,0,0,28.9,0.542,23,1
0,91,68,32,210,39.9,0.381,25,0
2,119,0,0,0,19.6,0.832,72,0
2,100,54,28,105,37.8,0.498,24,0
14,175,62,30,0,33.6,0.212,38,1
1,135,54,0,0,26.7,0.687,62,0
5,86,68,28,71,30.2,0.364,24,0
10,148,84,48,237,37.6,1.001,51,1
9,134,74,33,60,25.9,0.46,81,0
9,120,72,22,56,20.8,0.733,48,0
1,71,62,0,0,21.8,0.416,26,0
8,74,70,40,49,35.3,0.705,39,0
5,88,78,30,0,27.6,0.258,37,0
10,115,98,0,0,24,1.022,34,0
0,124,56,13,105,21.8,0.452,21,0
0,74,52,10,36,27.8,0.269,22,0
0,97,64,36,100,36.8,0.6,25,0
8,120,0,0,0,30,0.183,38,1
6,154,78,41,140,46.1,0.571,27,0
1,144,82,40,0,41.3,0.607,28,0
0,137,70,38,0,33.2,0.17,22,0
0,119,66,27,0,38.8,0.259,22,0
7,136,90,0,0,29.9,0.21,50,0
4,114,64,0,0,28.9,0.126,24,0
0,137,84,27,0,27.3,0.231,59,0
2,105,80,45,191,33.7,0.711,29,1
7,114,76,17,110,23.8,0.466,31,0
8,126,74,38,75,25.9,0.162,39,0
4,132,86,31,0,28,0.419,63,0
3,158,70,30,328,35.5,0.344,35,1
0,123,88,37,0,35.2,0.197,29,0
4,85,58,22,49,27.8,0.306,28,0
0,84,82,31,125,38.2,0.233,23,0
0,145,0,0,0,44.2,0.63,31,1
0,135,68,42,250,42.3,0.365,24,1
1,139,62,41,480,40.7,0.536,21,0
0,173,78,32,265,46.5,1.159,58,0
4,99,72,17,0,25.6,0.294,28,0
8,194,80,0,0,26.1,0.551,67,0
2,83,65,28,66,36.8,0.629,24,0
2,89,90,30,0,33.5,0.292,42,0
4,99,68,38,0,32.8,0.145,33,0
4,125,70,18,122,28.9,1.144,45,1
3,80,0,0,0,0,0.174,22,0
6,166,74,0,0,26.6,0.304,66,0
5,110,68,0,0,26,0.292,30,0
2,81,72,15,76,30.1,0.547,25,0
7,195,70,33,145,25.1,0.163,55,1
6,154,74,32,193,29.3,0.839,39,0
2,117,90,19,71,25.2,0.313,21,0
3,84,72,32,0,37.2,0.267,28,0
6,0,68,41,0,39,0.727,41,1
7,94,64,25,79,33.3,0.738,41,0
3,96,78,39,0,37.3,0.238,40,0
10,75,82,0,0,33.3,0.263,38,0
0,180,90,26,90,36.5,0.314,35,1
1,130,60,23,170,28.6,0.692,21,0
2,84,50,23,76,30.4,0.968,21,0
8,120,78,0,0,25,0.409,64,0
12,84,72,31,0,29.7,0.297,46,1
0,139,62,17,210,22.1,0.207,21,0
9,91,68,0,0,24.2,0.2,58,0
2,91,62,0,0,27.3,0.525,22,0
3,99,54,19,86,25.6,0.154,24,0
3,163,70,18,105,31.6,0.268,28,1
9,145,88,34,165,30.3,0.771,53,1
7,125,86,0,0,37.6,0.304,51,0
13,76,60,0,0,32.8,0.18,41,0
6,129,90,7,326,19.6,0.582,60,0
2,68,70,32,66,25,0.187,25,0
3,124,80,33,130,33.2,0.305,26,0
6,114,0,0,0,0,0.189,26,0
9,130,70,0,0,34.2,0.652,45,1
3,125,58,0,0,31.6,0.151,24,0
3,87,60,18,0,21.8,0.444,21,0
1,97,64,19,82,18.2,0.299,21,0
3,116,74,15,105,26.3,0.107,24,0
0,117,66,31,188,30.8,0.493,22,0
0,111,65,0,0,24.6,0.66,31,0
2,122,60,18,106,29.8,0.717,22,0
0,107,76,0,0,45.3,0.686,24,0
1,86,66,52,65,41.3,0.917,29,0
6,91,0,0,0,29.8,0.501,31,0
1,77,56,30,56,33.3,1.251,24,0
4,132,0,0,0,32.9,0.302,23,1
0,105,90,0,0,29.6,0.197,46,0
0,57,60,0,0,21.7,0.735,67,0
0,127,80,37,210,36.3,0.804,23,0
3,129,92,49,155,36.4,0.968,32,1
8,100,74,40,215,39.4,0.661,43,1
3,128,72,25,190,32.4,0.549,27,1
10,90,85,32,0,34.9,0.825,56,1
4,84,90,23,56,39.5,0.159,25,0
1,88,78,29,76,32,0.365,29,0
8,186,90,35,225,34.5,0.423,37,1
5,187,76,27,207,43.6,1.034,53,1
4,131,68,21,166,33.1,0.16,28,0
1,164,82,43,67,32.8,0.341,50,0
4,189,110,31,0,28.5,0.68,37,0
1,116,70,28,0,27.4,0.204,21,0
3,84,68,30,106,31.9,0.591,25,0
6,114,88,0,0,27.8,0.247,66,0
1,88,62,24,44,29.9,0.422,23,0
1,84,64,23,115,36.9,0.471,28,0
7,124,70,33,215,25.5,0.161,37,0
1,97,70,40,0,38.1,0.218,30,0
8,110,76,0,0,27.8,0.237,58,0
11,103,68,40,0,46.2,0.126,42,0
11,85,74,0,0,30.1,0.3,35,0
6,125,76,0,0,33.8,0.121,54,1
0,198,66,32,274,41.3,0.502,28,1
1,87,68,34,77,37.6,0.401,24,0
6,99,60,19,54,26.9,0.497,32,0
0,91,80,0,0,32.4,0.601,27,0
2,95,54,14,88,26.1,0.748,22,0
1,99,72,30,18,38.6,0.412,21,0
6,92,62,32,126,32,0.085,46,0
4,154,72,29,126,31.3,0.338,37,0
0,121,66,30,165,34.3,0.203,33,1
3,78,70,0,0,32.5,0.27,39,0
2,130,96,0,0,22.6,0.268,21,0
3,111,58,31,44,29.5,0.43,22,0
2,98,60,17,120,34.7,0.198,22,0
1,143,86,30,330,30.1,0.892,23,0
1,119,44,47,63,35.5,0.28,25,0
6,108,44,20,130,24,0.813,35,0
2,118,80,0,0,42.9,0.693,21,1
10,133,68,0,0,27,0.245,36,0
2,197,70,99,0,34.7,0.575,62,1
0,151,90,46,0,42.1,0.371,21,1
6,109,60,27,0,25,0.206,27,0
12,121,78,17,0,26.5,0.259,62,0
8,100,76,0,0,38.7,0.19,42,0
8,124,76,24,600,28.7,0.687,52,1
1,93,56,11,0,22.5,0.417,22,0
8,143,66,0,0,34.9,0.129,41,1
6,103,66,0,0,24.3,0.249,29,0
3,176,86,27,156,33.3,1.154,52,1
0,73,0,0,0,21.1,0.342,25,0
11,111,84,40,0,46.8,0.925,45,1
2,112,78,50,140,39.4,0.175,24,0
3,132,80,0,0,34.4,0.402,44,1
2,82,52,22,115,28.5,1.699,25,0
6,123,72,45,230,33.6,0.733,34,0
0,188,82,14,185,32,0.682,22,1
0,67,76,0,0,45.3,0.194,46,0
1,89,24,19,25,27.8,0.559,21,0
1,173,74,0,0,36.8,0.088,38,1
1,109,38,18,120,23.1,0.407,26,0
1,108,88,19,0,27.1,0.4,24,0
6,96,0,0,0,23.7,0.19,28,0
1,124,74,36,0,27.8,0.1,30,0
7,150,78,29,126,35.2,0.692,54,1
4,183,0,0,0,28.4,0.212,36,1
1,124,60,32,0,35.8,0.514,21,0
1,181,78,42,293,40,1.258,22,1
1,92,62,25,41,19.5,0.482,25,0
0,152,82,39,272,41.5,0.27,27,0
1,111,62,13,182,24,0.138,23,0
3,106,54,21,158,30.9,0.292,24,0
3,174,58,22,194,32.9,0.593,36,1
7,168,88,42,321,38.2,0.787,40,1
6,105,80,28,0,32.5,0.878,26,0
11,138,74,26,144,36.1,0.557,50,1
3,106,72,0,0,25.8,0.207,27,0
6,117,96,0,0,28.7,0.157,30,0
2,68,62,13,15,20.1,0.257,23,0
9,112,82,24,0,28.2,1.282,50,1
0,119,0,0,0,32.4,0.141,24,1
2,112,86,42,160,38.4,0.246,28,0
2,92,76,20,0,24.2,1.698,28,0
6,183,94,0,0,40.8,1.461,45,0
0,94,70,27,115,43.5,0.347,21,0
2,108,64,0,0,30.8,0.158,21,0
4,90,88,47,54,37.7,0.362,29,0
0,125,68,0,0,24.7,0.206,21,0
0,132,78,0,0,32.4,0.393,21,0
5,128,80,0,0,34.6,0.144,45,0
4,94,65,22,0,24.7,0.148,21,0
7,114,64,0,0,27.4,0.732,34,1
0,102,78,40,90,34.5,0.238,24,0
2,111,60,0,0,26.2,0.343,23,0
1,128,82,17,183,27.5,0.115,22,0
10,92,62,0,0,25.9,0.167,31,0
13,104,72,0,0,31.2,0.465,38,1
5,104,74,0,0,28.8,0.153,48,0
2,94,76,18,66,31.6,0.649,23,0
7,97,76,32,91,40.9,0.871,32,1
1,100,74,12,46,19.5,0.149,28,0
0,102,86,17,105,29.3,0.695,27,0
4,128,70,0,0,34.3,0.303,24,0
6,147,80,0,0,29.5,0.178,50,1
4,90,0,0,0,28,0.61,31,0
3,103,72,30,152,27.6,0.73,27,0
2,157,74,35,440,39.4,0.134,30,0
1,167,74,17,144,23.4,0.447,33,1
0,179,50,36,159,37.8,0.455,22,1
11,136,84,35,130,28.3,0.26,42,1
0,107,60,25,0,26.4,0.133,23,0
1,91,54,25,100,25.2,0.234,23,0
1,117,60,23,106,33.8,0.466,27,0
5,123,74,40,77,34.1,0.269,28,0
2,120,54,0,0,26.8,0.455,27,0
1,106,70,28,135,34.2,0.142,22,0
2,155,52,27,540,38.7,0.24,25,1
2,101,58,35,90,21.8,0.155,22,0
1,120,80,48,200,38.9,1.162,41,0
11,127,106,0,0,39,0.19,51,0
3,80,82,31,70,34.2,1.292,27,1
10,162,84,0,0,27.7,0.182,54,0
1,199,76,43,0,42.9,1.394,22,1
8,167,106,46,231,37.6,0.165,43,1
9,145,80,46,130,37.9,0.637,40,1
6,115,60,39,0,33.7,0.245,40,1
1,112,80,45,132,34.8,0.217,24,0
4,145,82,18,0,32.5,0.235,70,1
10,111,70,27,0,27.5,0.141,40,1
6,98,58,33,190,34,0.43,43,0
9,154,78,30,100,30.9,0.164,45,0
6,165,68,26,168,33.6,0.631,49,0
1,99,58,10,0,25.4,0.551,21,0
10,68,106,23,49,35.5,0.285,47,0
3,123,100,35,240,57.3,0.88,22,0
8,91,82,0,0,35.6,0.587,68,0
6,195,70,0,0,30.9,0.328,31,1
9,156,86,0,0,24.8,0.23,53,1
0,93,60,0,0,35.3,0.263,25,0
3,121,52,0,0,36,0.127,25,1
2,101,58,17,265,24.2,0.614,23,0
2,56,56,28,45,24.2,0.332,22,0
0,162,76,36,0,49.6,0.364,26,1
0,95,64,39,105,44.6,0.366,22,0
4,125,80,0,0,32.3,0.536,27,1
5,136,82,0,0,0,0.64,69,0
2,129,74,26,205,33.2,0.591,25,0
3,130,64,0,0,23.1,0.314,22,0
1,107,50,19,0,28.3,0.181,29,0
1,140,74,26,180,24.1,0.828,23,0
1,144,82,46,180,46.1,0.335,46,1
8,107,80,0,0,24.6,0.856,34,0
13,158,114,0,0,42.3,0.257,44,1
2,121,70,32,95,39.1,0.886,23,0
7,129,68,49,125,38.5,0.439,43,1
2,90,60,0,0,23.5,0.191,25,0
7,142,90,24,480,30.4,0.128,43,1
3,169,74,19,125,29.9,0.268,31,1
0,99,0,0,0,25,0.253,22,0
4,127,88,11,155,34.5,0.598,28,0
4,118,70,0,0,44.5,0.904,26,0
2,122,76,27,200,35.9,0.483,26,0
6,125,78,31,0,27.6,0.565,49,1
1,168,88,29,0,35,0.905,52,1
2,129,0,0,0,38.5,0.304,41,0
4,110,76,20,100,28.4,0.118,27,0
6,80,80,36,0,39.8,0.177,28,0
10,115,0,0,0,0,0.261,30,1
2,127,46,21,335,34.4,0.176,22,0
9,164,78,0,0,32.8,0.148,45,1
2,93,64,32,160,38,0.674,23,1
3,158,64,13,387,31.2,0.295,24,0
5,126,78,27,22,29.6,0.439,40,0
10,129,62,36,0,41.2,0.441,38,1
0,134,58,20,291,26.4,0.352,21,0
3,102,74,0,0,29.5,0.121,32,0
7,187,50,33,392,33.9,0.826,34,1
3,173,78,39,185,33.8,0.97,31,1
10,94,72,18,0,23.1,0.595,56,0
1,108,60,46,178,35.5,0.415,24,0
5,97,76,27,0,35.6,0.378,52,1
4,83,86,19,0,29.3,0.317,34,0
1,114,66,36,200,38.1,0.289,21,0
1,149,68,29,127,29.3,0.349,42,1
5,117,86,30,105,39.1,0.251,42,0
1,111,94,0,0,32.8,0.265,45,0
4,112,78,40,0,39.4,0.236,38,0
1,116,78,29,180,36.1,0.496,25,0
0,141,84,26,0,32.4,0.433,22,0
2,175,88,0,0,22.9,0.326,22,0
2,92,52,0,0,30.1,0.141,22,0
3,130,78,23,79,28.4,0.323,34,1
8,120,86,0,0,28.4,0.259,22,1
2,174,88,37,120,44.5,0.646,24,1
2,106,56,27,165,29,0.426,22,0
2,105,75,0,0,23.3,0.56,53,0
4,95,60,32,0,35.4,0.284,28,0
0,126,86,27,120,27.4,0.515,21,0
8,65,72,23,0,32,0.6,42,0
2,99,60,17,160,36.6,0.453,21,0
1,102,74,0,0,39.5,0.293,42,1
11,120,80,37,150,42.3,0.785,48,1
3,102,44,20,94,30.8,0.4,26,0
1,109,58,18,116,28.5,0.219,22,0
9,140,94,0,0,32.7,0.734,45,1
13,153,88,37,140,40.6,1.174,39,0
12,100,84,33,105,30,0.488,46,0
1,147,94,41,0,49.3,0.358,27,1
1,81,74,41,57,46.3,1.096,32,0
3,187,70,22,200,36.4,0.408,36,1
6,162,62,0,0,24.3,0.178,50,1
4,136,70,0,0,31.2,1.182,22,1
1,121,78,39,74,39,0.261,28,0
3,108,62,24,0,26,0.223,25,0
0,181,88,44,510,43.3,0.222,26,1
8,154,78,32,0,32.4,0.443,45,1
1,128,88,39,110,36.5,1.057,37,1
7,137,90,41,0,32,0.391,39,0
0,123,72,0,0,36.3,0.258,52,1
1,106,76,0,0,37.5,0.197,26,0
6,190,92,0,0,35.5,0.278,66,1
2,88,58,26,16,28.4,0.766,22,0
9,170,74,31,0,44,0.403,43,1
9,89,62,0,0,22.5,0.142,33,0
10,101,76,48,180,32.9,0.171,63,0
2,122,70,27,0,36.8,0.34,27,0
5,121,72,23,112,26.2,0.245,30,0
1,126,60,0,0,30.1,0.349,47,1
1,93,70,31,0,30.4,0.315,23,0
```
## lab49
```python
import numpy
from keras.layers import Dense
from keras.models import Sequential
dataset1 = numpy.loadtxt('data/lab49.data', delimiter=',', skiprows=1)
print(dataset1.shape)
inputList = dataset1[:, 0:8]
resultList = dataset1[:, 8]
model = Sequential()
model.add(Dense(14, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
print(model.summary())
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(inputList, resultList, epochs=200, batch_size=20)
scores = model.evaluate(inputList, resultList)
print(scores)
print(model.metrics_names)
for name, score in zip(model.metrics_names, scores):
print(f"{name} value={score}")
```
## refactor
```python=
import numpy
from keras.layers import Dense
from keras.models import Sequential
dataset1 = numpy.loadtxt('data/lab49.data', delimiter=',', skiprows=1)
print(dataset1.shape)
inputList = dataset1[:, 0:8]
resultList = dataset1[:, 8]
# ctrl+alt+m
model = Sequential()
model.add(Dense(14, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
print(model.summary())
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(inputList, resultList, epochs=200, batch_size=20)
scores = model.evaluate(inputList, resultList)
print(scores)
print(model.metrics_names)
for name, score in zip(model.metrics_names, scores):
print(f"{name} value={score}")
```
## after refactor
```
import numpy
from keras.layers import Dense
from keras.models import Sequential
dataset1 = numpy.loadtxt('data/lab49.data', delimiter=',', skiprows=1)
print(dataset1.shape)
inputList = dataset1[:, 0:8]
resultList = dataset1[:, 8]
def createModel():
m = Sequential()
m.add(Dense(14, input_dim=8, activation='relu'))
m.add(Dense(8, activation='relu'))
m.add(Dense(1, activation='sigmoid'))
print(m.summary())
m.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
return m
# ctrl+alt+m
model = createModel()
model.fit(inputList, resultList, epochs=200, batch_size=20)
scores = model.evaluate(inputList, resultList)
print(scores)
print(model.metrics_names)
for name, score in zip(model.metrics_names, scores):
print(f"{name} value={score}")
```
## after save model
```python=
import numpy
from keras.layers import Dense
from keras.models import Sequential
from keras.models import save_model
dataset1 = numpy.loadtxt('data/lab49.data', delimiter=',', skiprows=1)
print(dataset1.shape)
inputList = dataset1[:, 0:8]
resultList = dataset1[:, 8]
def createModel():
m = Sequential()
m.add(Dense(14, input_dim=8, activation='relu'))
m.add(Dense(8, activation='relu'))
m.add(Dense(1, activation='sigmoid'))
print(m.summary())
m.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
return m
# ctrl+alt+m
model = createModel()
model.fit(inputList, resultList, epochs=200, batch_size=20)
save_model(model, 'data/lab50')
scores = model.evaluate(inputList, resultList)
print(scores)
print(model.metrics_names)
for name, score in zip(model.metrics_names, scores):
print(f"{name} value={score}")
```
## 18-Sep-2020
### lab50
```python=3.7
import numpy
from keras.layers import Dense
from keras.models import Sequential
from keras.models import save_model
dataset1 = numpy.loadtxt('data/lab49.data', delimiter=',', skiprows=1)
print(dataset1.shape)
inputList = dataset1[:, 0:8]
resultList = dataset1[:, 8]
def createModel():
m = Sequential()
m.add(Dense(14, input_dim=8, activation='relu'))
m.add(Dense(8, activation='relu'))
m.add(Dense(1, activation='sigmoid'))
print(m.summary())
m.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
return m
# ctrl+alt+m
model = createModel()
model.fit(inputList, resultList, epochs=200, batch_size=20)
save_model(model, 'data/lab50')
scores = model.evaluate(inputList, resultList)
print(scores)
print(model.metrics_names)
for name, score in zip(model.metrics_names, scores):
print(f"[fit]{name} value={score}")
print("Now perform evaluate without training..")
model2 = createModel()
scores2 = model2.evaluate(inputList, resultList)
for name, score in zip(model2.metrics_names, scores2):
print(f"[Un-fit]{name} value={score}")
print("Now perform evaluate from a load model")
model3 = load_model('data/lab50')
scores3 = model3.evaluate(inputList, resultList)
for name, score in zip(model3.metrics_names, scores3):
print(f"[pre-train]{name} value={score}")
```
### lab51
```
import numpy
from keras.layers import Dense
from keras.models import Sequential
dataset1 = numpy.loadtxt('data/lab49.data', delimiter=',', skiprows=1)
print(dataset1.shape)
inputList = dataset1[:, 0:8]
resultList = dataset1[:, 8]
model = Sequential()
model.add(Dense(14, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
print(model.summary())
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(inputList, resultList, epochs=200, batch_size=20,
validation_split=0.2)
# scores = model.evaluate(inputList, resultList)
# print(scores)
# print(model.metrics_names)
#
# for name, score in zip(model.metrics_names, scores):
# print(f"{name} value={score}")
```
### lab52
```python=
import numpy
from keras.layers import Dense
from keras.models import Sequential
from sklearn.model_selection import train_test_split
dataset1 = numpy.loadtxt('data/lab49.data', delimiter=',', skiprows=1)
print(dataset1.shape)
inputList = dataset1[:, 0:8]
resultList = dataset1[:, 8]
# cut data
feature_train, feature_test, label_train, label_test = \
train_test_split(inputList, resultList, test_size=0.2, stratify=resultList)
for d in [resultList, label_train, label_test]:
classes, counts = numpy.unique(d, return_counts=True)
for (cl, co) in zip(classes, counts):
print(f"{int(cl)}==>{co/sum(counts)}")
model = Sequential()
model.add(Dense(14, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
print(model.summary())
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# fit by train/validate data
model.fit(feature_train, label_train, epochs=200, batch_size=20, validation_data=(feature_test, label_test))
scores = model.evaluate(feature_test, label_test)
print(scores)
print(model.metrics_names)
for name, score in zip(model.metrics_names, scores):
print(f"{name} value={score}")
```
## lab53
```python=
import numpy
from keras.layers import Dense
from keras.models import Sequential
from sklearn.model_selection import StratifiedKFold
dataset1 = numpy.loadtxt('data/lab49.data', delimiter=',', skiprows=1)
print(dataset1.shape)
inputList = dataset1[:, 0:8]
resultList = dataset1[:, 8]
fiveFold = StratifiedKFold(n_splits=5, shuffle=True)
totalScores = []
def createModel():
m = Sequential()
m.add(Dense(14, input_dim=8, activation='relu'))
m.add(Dense(8, activation='relu'))
m.add(Dense(1, activation='sigmoid'))
print(m.summary())
m.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
return m
for train, test in fiveFold.split(inputList, resultList):
model = createModel()
model.fit(inputList[train], resultList[train], epochs=200, batch_size=20, verbose=0)
scores = model.evaluate(inputList[test], resultList[test])
totalScores.append(scores[1] * 100)
print(scores)
print(model.metrics_names)
for name, score in zip(model.metrics_names, scores):
print(f"{name} value={score}")
print(f"total mean={numpy.mean(totalScores)}, std={numpy.std(totalScores)}")
```
### lab54
```python=
import numpy
from keras.layers import Dense
from keras.models import Sequential
from keras.models import save_model, load_model
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import StratifiedKFold, cross_val_score
dataset1 = numpy.loadtxt('data/lab49.data', delimiter=',', skiprows=1)
print(dataset1.shape)
inputList = dataset1[:, 0:8]
resultList = dataset1[:, 8]
def createModel():
m = Sequential()
m.add(Dense(14, input_dim=8, activation='relu'))
m.add(Dense(8, activation='relu'))
m.add(Dense(1, activation='sigmoid'))
print(m.summary())
m.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
return m
model1 = KerasClassifier(build_fn=createModel, epochs=200, batch_size=20, verbose=0)
fiveFold = StratifiedKFold(n_splits=5, shuffle=True)
results = cross_val_score(model1, inputList, resultList, cv=fiveFold)
print(f"result mean={results.mean()}, std={results.std()}")
```
### lab55
```python=
import numpy
from keras.layers import Dense
from keras.models import Sequential
from keras.models import save_model, load_model
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import GridSearchCV
dataset1 = numpy.loadtxt('data/lab49.data', delimiter=',', skiprows=1)
print(dataset1.shape)
inputList = dataset1[:, 0:8]
resultList = dataset1[:, 8]
def createModel(optimizer='adam', init='uniform'):
m = Sequential()
m.add(Dense(14, input_dim=8, kernel_initializer=init, activation='relu'))
m.add(Dense(8, activation='relu'))
m.add(Dense(1, activation='sigmoid'))
print(m.summary())
m.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy'])
return m
model1 = KerasClassifier(build_fn=createModel, epochs=200, batch_size=20, verbose=0)
optimizers = ['rmsprop', 'adam', 'sgd']
inits = ['normal', 'uniform']
epochs = [50, 100, 150]
batches = [5, 10, 15]
param_grid = dict(optimizer=optimizers, epochs=epochs, batch_size=batches, init=inits)
grid = GridSearchCV(estimator=model1, param_grid=param_grid)
grid_result = grid.fit(inputList, resultList)
```
### start jupyter
```
jupyter-notebook
```
### iris
* (https://archive.ics.uci.edu/ml/machine-learning-databases/iris/)[https://archive.ics.uci.edu/ml/machine-learning-databases/iris/]
* (https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data)[https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data]
### lab56
```python=
from pandas import read_csv
import numpy
from sklearn.model_selection import KFold, cross_val_score
from sklearn.preprocessing import LabelEncoder
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers import Dense
from tensorflow.python.keras.wrappers.scikit_learn import KerasClassifier
dataFrame = read_csv('data/iris.data', header=None)
print(dataFrame.shape)
dataset = dataFrame.values
features = dataset[:, 0:4].astype(float)
labels = dataset[:, 4]
print(features[:10])
print(numpy.unique(labels, return_counts=True))
encoder = LabelEncoder()
encoder.fit(labels)
encoded_Y = encoder.transform(labels)
print(numpy.unique(encoded_Y, return_counts=True))
dummy_y = np_utils.to_categorical(encoded_Y)
print(dummy_y[:5], dummy_y[50:55], dummy_y[100:105])
def baseline_model():
model = Sequential()
model.add(Dense(8, input_dim=4, activation='relu'))
model.add(Dense(3, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
print(model.summary())
return model
estimator = KerasClassifier(build_fn=baseline_model,
epochs=200, batch_size=10, verbose=1)
kfold = KFold(n_splits=3, shuffle=True)
results = cross_val_score(estimator, features, dummy_y, cv=kfold)
print(f"Acc: {results.mean()}, std:{results.std()}")
```
### lab57
```python=
scores = [3.0, 1.0, 2.0]
import numpy as np
import tensorflow as tf
def manualSoftmax(x):
ax = np.array(x)
return np.exp(ax) / np.sum(np.exp(ax), axis=0)
def manualNoMax(x):
ax = np.array(x)
return ax / np.sum(ax, axis=0)
print(f"using softmax={manualSoftmax(scores)}")
print(f"normal ratio={manualNoMax(scores)}")
print(tf.nn.softmax(scores).numpy())
```
### lab58
```python=
import numpy
from keras.datasets import imdb
from matplotlib import pyplot
(X_train, y_train), (X_test, y_test) = imdb.load_data()
X = numpy.concatenate((X_train, X_test), axis=0)
y = numpy.concatenate((y_train, y_test), axis=0)
print(X[0])
print(X[1])
print(X.shape)
print(y.shape)
print(numpy.unique(y, return_counts=True))
print("distinct x=")
print(len(numpy.unique(numpy.hstack(X))))
result = [len(x) for x in X]
print(f"mean={numpy.mean(result)}, std={numpy.std(result)}")
pyplot.subplot(121)
pyplot.boxplot(result)
pyplot.subplot(122)
pyplot.hist(result)
pyplot.show()
```
### lab59
```python=
import numpy as np
from keras import layers, models
from keras.datasets import imdb
(train_data, train_labels), (test_data, test_labels) = \
imdb.load_data(num_words=10000)
print(train_data[0])
word_index = imdb.get_word_index()
reverse_word_index = dict([(v, k) for k, v in word_index.items()])
for i in range(5):
decoded_review = ' '.join([reverse_word_index.get(i - 3, '?')
for i in train_data[i]])
print(decoded_review)
def vectorize_sequence(sequences, dimension=10000):
results = np.zeros((len(sequences), dimension))
for i, sequence in enumerate(sequences):
results[i, sequence] = 1.
return results
x_train = vectorize_sequence(train_data)
x_test = vectorize_sequence(test_data)
y_train = np.array(train_labels).astype('float32')
y_test = np.asarray(test_labels).astype('float32')
print(x_train[0])
model = models.Sequential()
model.add(layers.Dense(16, activation='relu', input_shape=(10000,)))
model.add(layers.Dense(16, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))
model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])
print(model.summary())
model.fit(x_train, y_train, epochs=30, batch_size=100,
validation_data=(x_test, y_test))
```
### lab60
```python
#y = 1 # 10th
#y = 0.001 # 11 th
#y = 1000 # 8
#y = 1000000 # 7
y = 10000000 #4
def calculate(x):
for i in range(0, 1000000):
x += 0.0000001
x -= 0.1
return x
print(calculate(y))
```
### lab61
```python
import matplotlib.pyplot as plt
from keras import datasets
(train_images, train_labels), (test_images, test_labels) = datasets.mnist.load_data()
print(f"train image shape={train_images.shape}, test image shape={test_images.shape}")
print(f"train label len={len(train_labels)}, test label len={len(test_labels)}")
def plotImage(index):
plt.title(f"{index} the train image marked as {train_labels[index]}")
plt.imshow(train_images[index], cmap='binary')
plt.show()
def plotTestImage(index):
plt.title(f"{index} the test image marked as {test_labels[index]}")
plt.imshow(test_images[index], cmap='binary')
plt.show()
plotImage(200)
plotTestImage(200)
```
### lab62
```python=
import tensorflow as tf
import numpy as np
from keras import datasets, utils, layers, models, Sequential
(train_images, train_labels), (test_images, test_labels) = datasets.mnist.load_data()
FLATTEN_DIM = 28 * 28
TRAINING_SIZE = len(train_images)
TEST_SIZE = len(test_images)
trainImages = np.reshape(train_images, (TRAINING_SIZE, FLATTEN_DIM))
testImages = np.reshape(test_images, (TEST_SIZE, FLATTEN_DIM))
print(type(trainImages[0]))
print(np.unique(trainImages[0], return_counts=True))
trainImages = trainImages.astype(np.float32)
testImages = testImages.astype(np.float32)
trainImages /= 255
testImages /= 255
NUM_DIGITS = 10
print(trainImages)
trainLabels = utils.to_categorical(train_labels, NUM_DIGITS)
testLabels = utils.to_categorical(test_labels, NUM_DIGITS)
model = Sequential()
model.add(layers.Dense(units=128, activation=tf.nn.relu,
input_shape=(FLATTEN_DIM,)))
model.add(layers.Dense(units=64, activation=tf.nn.relu))
model.add(layers.Dense(units=10, activation=tf.nn.softmax))
model.compile(loss='categorical_crossentropy', optimizer='adam',
metrics=['accuracy'])
print(model.summary())
model.fit(trainImages, trainLabels, epochs=10)
predictLabels = model.predict_classes(testImages)
print(f"predict class result={predictLabels[:10]}")
predict2 = model.predict_proba(testImages)
print(f"predict prob result = {predict2[:10]}")
predict3 = model.predict(testImages)
print(f"type={predict3[:10]}")
loss, accuracy = model.evaluate(testImages, testLabels)
print("test accuracy:%.4f"%(accuracy))
```
### lab63
```python=
import tensorflow as tf
import numpy as np
from keras import datasets, utils, layers, models, Sequential, callbacks
(train_images, train_labels), (test_images, test_labels) = datasets.mnist.load_data()
FLATTEN_DIM = 28 * 28
TRAINING_SIZE = len(train_images)
TEST_SIZE = len(test_images)
trainImages = np.reshape(train_images, (TRAINING_SIZE, FLATTEN_DIM))
testImages = np.reshape(test_images, (TEST_SIZE, FLATTEN_DIM))
print(type(trainImages[0]))
print(np.unique(trainImages[0], return_counts=True))
trainImages = trainImages.astype(np.float32)
testImages = testImages.astype(np.float32)
trainImages /= 255
testImages /= 255
NUM_DIGITS = 10
print(trainImages)
trainLabels = utils.to_categorical(train_labels, NUM_DIGITS)
testLabels = utils.to_categorical(test_labels, NUM_DIGITS)
model = Sequential()
model.add(layers.Dense(units=128, activation=tf.nn.relu,
input_shape=(FLATTEN_DIM,)))
model.add(layers.Dense(units=64, activation=tf.nn.relu))
model.add(layers.Dense(units=10, activation=tf.nn.softmax))
model.compile(loss='categorical_crossentropy', optimizer='adam',
metrics=['accuracy'])
print(model.summary())
tbCallback = callbacks.TensorBoard(log_dir='logs/lab63/',
histogram_freq=0,
write_graph=True,
write_images=True)
model.fit(trainImages, trainLabels, epochs=10, callbacks=[tbCallback])
predictLabels = model.predict_classes(testImages)
print(f"predict class result={predictLabels[:10]}")
predict2 = model.predict_proba(testImages)
print(f"predict prob result = {predict2[:10]}")
predict3 = model.predict(testImages)
print(f"type={predict3[:10]}")
loss, accuracy = model.evaluate(testImages, testLabels)
print("test accuracy:%.4f" % (accuracy))
```
* tensorboard --logdir=logs\lab63
#### cell1
```
import tensorflow as tf
import numpy as np
from keras import datasets, utils, layers, models, Sequential, callbacks
(train_images, train_labels), (test_images, test_labels) = datasets.mnist.load_data()
FLATTEN_DIM = 28 * 28
TRAINING_SIZE = len(train_images)
TEST_SIZE = len(test_images)
trainImages = np.reshape(train_images, (TRAINING_SIZE, FLATTEN_DIM))
testImages = np.reshape(test_images, (TEST_SIZE, FLATTEN_DIM))
print(type(trainImages[0]))
print(np.unique(trainImages[0], return_counts=True))
trainImages = trainImages.astype(np.float32)
testImages = testImages.astype(np.float32)
trainImages /= 255
testImages /= 255
NUM_DIGITS = 10
print(trainImages)
trainLabels = utils.to_categorical(train_labels, NUM_DIGITS)
testLabels = utils.to_categorical(test_labels, NUM_DIGITS)
model = Sequential()
model.add(layers.Dense(units=128, activation=tf.nn.relu,
input_shape=(FLATTEN_DIM,)))
model.add(layers.Dense(units=64, activation=tf.nn.relu))
model.add(layers.Dense(units=10, activation=tf.nn.softmax))
model.compile(loss='categorical_crossentropy', optimizer='adam',
metrics=['accuracy'])
print(model.summary())
model.fit(trainImages, trainLabels, epochs=10)
# cell2
predictLabels = model.predict_classes(testImages)
print("first 10 result=", predictLabels[:10])
# cell3
import matplotlib.pyplot as plt
def plotTestImage(index):
plt.title(f"{index} the test image marked as {test_labels[index]}")
plt.imshow(test_images[index], cmap='binary')
plt.show()
# execute
plotTestImage(0)
# cell4
import pandas as pd
pd.crosstab(test_labels, predictLabels, rownames=['label'],colnames=['predict'])
# cell5
measure1 = pd.DataFrame({'label':test_labels, 'predict':predictLabels})
measure1[:10]
# cell6
measure1[(measure1.label==9) & (measure1.predict==4)]
measure1[(measure1.label==9) & (measure1.predict==4)].index
error49 = measure1[(measure1.label==9) & (measure1.predict==4)].index
for i in error49:
plotTestImage(i)
```
# lab65
```
import tensorflow as tf
temp = tf.constant([0, 1, 0, 1, 2, 1, 1, 0, 3, 1, 1, 0, 4, 4, 5, 4], tf.float32)
temp2 = tf.reshape(temp, [2, 2, 2, 2])
print(temp2)
filter = tf.constant([1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], tf.float32)
filter2 = tf.reshape(filter, [2, 2, 2, 2])
print(filter2)
convolution = tf.nn.conv2d(temp2, filter2, [1,1,1,1], padding='VALID')
print(convolution.numpy())
```
### lab66
```python=
from keras import layers
from keras import models
from keras.datasets import mnist
from keras.utils import to_categorical
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu',
input_shape=(28, 28, 1)))
model.add(layers.MaxPool2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPool2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(10, activation='softmax'))
print(model.summary())
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
train_images = train_images.reshape((60000, 28, 28, 1))
train_images = train_images.astype('float32') / 255
test_images = test_images.reshape((10000, 28, 28, 1))
test_images = test_images.astype('float32') / 255
train_labels = to_categorical(train_labels)
test_labels = to_categorical(test_labels)
model.compile(optimizer='adam', loss='categorical_crossentropy',
metrics=['accuracy'])
model.fit(train_images, train_labels, epochs=5, batch_size=64)
```