代码拉取完成,页面将自动刷新
from node import *
def conv(feature_maps, input_shape, kernels, kernel_shape, activation):
"""
:param feature_maps:
:param input_shape:
:param kernels:
:param kernel_shape:
:param activation:
:return:
"""
# 与输入同形状的全 1 矩阵
ones = Variable(input_shape, init=False, trainable=False)
ones.set_value(np.mat(np.ones(input_shape)))
outputs = []
for i in range(kernels):
channels = []
for fm in feature_maps:
kernel = Variable(kernel_shape, init=True, trainable=True)
conv = Convolve(fm, kernel)
channels.append(conv)
channles = Add(*channels)
bias = ScalarMultiply(Variable((1, 1), init=True, trainable=True), ones)
# bias = Variable(input_shape, init=True, trainable=True)
affine = Add(channles, bias)
if activation == "ReLU":
outputs.append(ReLU(affine))
elif activation == "Logistic":
outputs.append(Logistic(affine))
else:
outputs.append(affine)
assert len(outputs) == kernels
return outputs
def pooling(feature_maps, kernel_shape, stride):
"""
:param feature_maps:
:param kernel_shape:
:param stride:
:return:
"""
outputs = []
for fm in feature_maps:
outputs.append(MaxPooling(fm, size=kernel_shape, stride=stride))
return outputs
def fc(input, input_size, size, activation):
"""
:param input:
:param input_size:
:param size:
:param activation:
:return:
"""
weights = Variable((size, input_size), init=True, trainable=True)
bias = Variable((size, 1), init=True, trainable=True)
affine = Add(MatMul(weights, input), bias)
if activation == "ReLU":
return ReLU(affine)
elif activation == "Logistic":
return Logistic(affine)
else:
return affine
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。