import numpy as np
import matplotlib.pylab as plt
import sklearn
from sklearn import datasets
def load_planer_dataset():
np.random.seed(1)
m = 400
N = int(400/2)
D = 2
X = np.zeros((m,D))
Y = np.zeros((m,1))
a = 4
for j in range(2):
ix = range(N * j, N * (j + 1))
t = np.linspace(j * 3.12, (j + 1) * 3.12, N) + np.random.randn(N) * 0.2
r = a * np.sin(4 * t) + np.random.randn(N) * 0.2
X[ix] = np.c_[r * np.sin(t), r * np.cos(t)]
Y[ix] = j
X = X.T
Y = Y.T
return X, Y
def sigmoid(x):
s = 1/(1+np.exp(-x))
return s
def Relu(x):
x = np.maximum(0,x)
return x
def layer_size(X,Y):
n_input = X.shape[0]
n_h = 4
n_output =Y.shape[0]
return (n_input,n_h,n_output)
def init_paramters(n_input,n_h,n_output):
w1 = np.random.randn(n_h,n_input)*0.01
b1 = np.zeros((n_h,1))
w2 = np.random.randn(n_output,n_h)*0.01
b2 = np.zeros((n_output,1))
parmaters={
"w1":w1,
"b1":b1,
"w2":w2,
"b2":b2
}
return parmaters
def forward_propagation(X,params):
w1 = params['w1']
w2 =params['w2']
b1 = params['b1']
b2 = params['b2']
Z1 = np.dot(w1,X)+b1
A1 = np.tanh(Z1)
Z2 = np.dot(w2,A1)+b2
A2 = sigmoid(Z2)
cache={
"Z1":Z1,
"A1":A1,
"Z2":Z2,
"A2":A2
}
return (A2,cache)
def compute_cost(A2,Y,params):
m = Y.shape[1]
loss = np.multiply(Y,np.log(A2))+np.multiply(1-Y,np.log(1-A2))
J = np.mean(loss)
cost = float(np.squeeze(J))
return -cost
def backward_propagation(parms,cache,X,Y):
A1,A2,Z1,Z2 = cache['A1'],cache['A2'],cache['Z1'],cache['Z2']
m = Y.shape[1]
dz2 = A2-Y
dw2 = np.dot(dz2,A1.T)/m
db2 = (1/m) * np.sum(dz2,axis=1,keepdims=True)
w2 = parms['w2']
dz1 = np.multiply(np.dot(w2.T,dz2), 1- np.power(A1,2))
dw1 = (1/m)* np.dot(dz1,X.T)
db1 = (1/m)*np.sum(dz1,axis=1,keepdims=True)
grads={
"dw1":dw1,
"dw2":dw2,
"db1":db1,
"db2":db2
}
return grads
def updata_params(params,grads,learning_rate=1.2):
w1,w2 = params['w1'],params['w2']
b1,b2 = params['b1'],params['b2']
dw1,dw2 = grads['dw1'],grads['dw2']
db1,db2 =grads['db1'],grads['db2']
w1 = w1-learning_rate*dw1
w2 = w2-learning_rate*dw2
b1 = b1-learning_rate*db1
b2 = b2-learning_rate*db2
parmaters = {
"w1": w1,
"b1": b1,
"w2": w2,
"b2": b2
}
return parmaters
def nn_modle(X,Y,n_h,num_iterations):
np.random.seed(3)
n_input,n_h,n_output = layer_size(X,Y)
params = init_paramters(n_input,n_h,n_output)
w1,w2 = params['w1'],params['w2']
b1,b2 = params['b1'],params['b2']
for i in range(num_iterations):
A2, cache = forward_propagation(X, params)
grads = backward_propagation(params, cache, X, Y)
params=updata_params(params, grads)
cost = compute_cost(A2, Y, params)
if(i%10000==0):
print("第",i,"次循环,成本为:"+str(cost))
return params
def predit(X,params):
A2, cache = forward_propagation(X, params)
predictions = np.round(A2)
return predictions
def plot_decision_boundary(model, X, y):
x_min, x_max = X[0, :].min() - 1, X[0, :].max() + 1
y_min, y_max = X[1, :].min() - 1, X[1, :].max() + 1
h = 0.01
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
Z = model(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)
plt.ylabel('x2')
plt.xlabel('x1')
plt.scatter(X[0, :], X[1, :], c=np.squeeze(y), cmap=plt.cm.Spectral)
X,Y =load_planer_dataset()
params = nn_modle(X, Y, 4, 100000)
yhat = predit(X,params)
socre = np.mean(np.array(yhat==Y,dtype=np.int))
相关知识
ix = range(n*j,n*(j+1)) t = np.
电机扭矩计算公式T=9550P/n怎么算
python实现元旦倒计时、圣诞树、跨年烟花的绘画马上双旦了给大家带来一些python代码 1.元旦节日倒计时代码的实现
python 水仙花数 水仙花数是指一个N位正整数(N≥3),它的每个位上的数字的N次幂之和等于它本身。 例如:153=1×1×1+5×5×5+3×3×3。
14260 Problem N 花坛
Python水仙花、冥函数、质因数、完全数案例练习题!
HDU
记凸n边形的对角线的条数为f的表达式为( )A.f(n)=n+1B.f(n)=2n
挑战编程 程序设计竞赛训练手册
求极限 lim(n→∞) tan^n (π/4 + 2/n) 谢谢!
网址: ix = range(n*j,n*(j+1)) t = np. https://m.huajiangbk.com/newsview292779.html
上一篇: 野生花卉应用讲解.ppt |
下一篇: 花卉商业研究 |