1 Star 1 Fork 6

张觉非 / 计算图框架

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
该仓库未声明开源许可证文件(LICENSE),使用请关注具体项目描述及其代码上游依赖。
克隆/下载
test_automata.py 2.70 KB
一键复制 编辑 原始数据 按行查看 历史
张觉非 提交于 2019-08-28 12:29 . 加入 RNN 。
from sklearn.metrics import accuracy_score
import matplotlib.pyplot as plt
from node import *
from util import mnist
from optimizer import *
from layer import *
print(__file__)
train_x, train_y, test_x, test_y = mnist('./data/MNIST')
test_x = test_x[:1000]
test_y = test_y[:1000]
# train_x = train_x[:10000]
# train_y = train_y[:10000]
img_shape = (28, 28)
img = Variable(img_shape, init=False, trainable=False) # 占位符,28x28 的图像
# 全 1 矩阵
ones = Variable(img_shape, init=False, trainable=False)
ones.set_value(np.mat(np.ones(img_shape)))
kernels = 4
kernel_list = []
bias_list = []
for k in range(kernels):
kernel_list.append(Variable((5, 5), init=True, trainable=True))
bias_list.append(Variable((1, 1), init=True, trainable=True))
steps = 8
x = img
for i in range(steps):
output_list = []
for kernel, bias in zip(kernel_list, bias_list):
output_list.append(ReLU(Add(Convolve(x, kernel), ScalarMultiply(bias, ones))))
x = Logistic(Add(*output_list))
fc1 = fc(Flatten(x), 784, 120, "ReLU") # 第一全连接层
fc2 = fc(fc1, 120, 10, "None") # 第二全连接层
# 分类概率
prob = SoftMax(fc2)
# 训练标签
label = Variable((10, 1), trainable=False)
# 交叉熵损失
loss = CrossEntropyWithSoftMax(fc2, label)
# Adam 优化器
optimizer = Adam(default_graph, loss, 0.005, batch_size=32)
# 训练
print("start training", flush=True)
losses = []
for i in range(len(train_x)):
img.set_value(np.mat(train_x[i, :]).reshape(28, 28))
label.set_value(np.mat(train_y[i, :]).T)
# 执行一步优化
optimizer.one_step()
loss.forward()
losses.append(loss.value[0, 0])
# 显示进度
if i % 100 == 0:
percent = int((i + 1) / len(train_x) * 100)
mean_loss = np.mean(losses)
losses = []
print("".join(["="] * percent) + "> loss:{:.6f} {:d}({:.0f}%)".format(mean_loss, i + 1, percent), flush=True)
if mean_loss < 0.1:
break
# 展示规则的效果。
shape = (28, 28)
x = Variable(shape, init=False, trainable=False) # 占位符,28x28 的图像
# x.set_value(np.mat((np.random.rand(shape[0], shape[1]) > 0.5).astype(int)))
x.set_value(np.mat(train_x[200, :]).reshape(28, 28))
# 全 1 矩阵
ones = Variable(shape, init=False, trainable=False)
ones.set_value(np.mat(np.ones(shape)))
steps = 100
step_list = []
for i in range(steps):
output_list = []
for kernel, bias in zip(kernel_list, bias_list):
output_list.append(Add(Convolve(x, kernel), ScalarMultiply(bias, ones)))
x = ReLU(Add(*output_list))
step_list.append(x)
for i, s in zip(np.arange(len(step_list)), step_list):
s.forward()
plt.imsave("pic/automata/step_{:2d}.png".format(i), s.value, cmap="gray")
Python
1
https://gitee.com/zhangjuefei/computing_graph_demo.git
git@gitee.com:zhangjuefei/computing_graph_demo.git
zhangjuefei
computing_graph_demo
计算图框架
master

搜索帮助