贴部分代码供参考
#data normalization
def normalization(data):
_range = np.max(data) - np.min(data)
return (data - np.min(data))/_range
# sigmod function
def sigmod(z):
a = 1/(1+np.exp(-z))
return a
#initialize the parameters
def initialize_with_zeros(dim):
w = np.zeros((dim,1))
b = 0
return w,b
def propagate(w,b,X,Y):
#获取样本数
m = X.shape[1]
#前向传播,交叉商
A = sigmod(np.dot(w.T,X)+b)
cost = -(np.sum(Y*np.log(A)+(1-Y)*np.log(1-A)))/m
#反向传播
dZ = A - Y
dw =(np.dot(X,dZ.T))/m
db = (np.sum(dZ))/m
#返回值
grads = {"dw":dw,"db":db}
return grads, cost
#define the optimize function
def optimize(w,b,X,Y,num_iterations,learning_rate):
costs = []
for i in range(num_iterations):
grads, cost = propagate(w,b,X,Y)
dw = grads["dw"]
db = grads["db"]
#更新参数
w = w - learning_rate*dw
b = b - learning_rate*db
if i%100 ==0:
costs.append(cost)
params = {"w":w,"b":b}
grads = {"dw":dw,"db":db}
return params,grads,costs
def predict(w,b,X):
m = X.shape[1]
Y_prediction = np.zeros((1,m))
A = sigmod(np.dot(w.T,X)+b)
for i in range(m):
if A[0,i]>0.5:
Y_prediction[0,i] = 1
else:
Y_prediction[0,i] = 0
return Y_prediction
def logistic_model(X_train, Y_train, X_test, Y_test, learning_rate, num_iterations):
learning_rate = 0.1
dim = X_train.shape[0]
W,b = initialize_with_zeros(dim)
params, grads, costs = optimize(W,b,X_train,Y_train,num_iterations,learning_rate)
W = params['w']
b = params['b']
prediction_train = predict(W,b,X_test)
prediction_test = predict(W,b,X_train)
accuracy_train = 1- np.mean(np.abs(prediction_train-Y_train))
accuracy_test = 1 - np.mean(np.abs(prediction_test-Y_test))
print("Accuracy on train set:",accuracy_train)
print("Accuracy on test set:",accuracy_test)
d = {"costs":costs,
"Y_prediction_test":prediction_test,
"Y_prediction_train":prediction_train,
"w":w,
"b":b,
"learning_rate":learning_rate,
"num_iterations":num_iterations,
}
return d
【 在 puke 的大作中提到: 】
: 用python全手写?好歹得能用numpy吧?
--
FROM 219.239.227.*