提交 b7a65bb5 authored 作者: Ian Goodfellow's avatar Ian Goodfellow

pep8 ConvGrad3D

上级 f444ede4
......@@ -11,7 +11,7 @@ from theano.gradient import DisconnectedType
class ConvGrad3D(theano.Op):
""" Gradient of Conv3D with respect to W """
def __eq__(self,other):
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
......@@ -29,26 +29,26 @@ class ConvGrad3D(theano.Op):
return theano.Apply(self, inputs=[V_, d_, WShape_, dCdH_], outputs = [ T.TensorType(V_.dtype, (False,False,False,False,False))() ] )
def infer_shape(self, node, input_shapes):
V,d,W_shape, dCdH = node.inputs
V, d, W_shape, dCdH = node.inputs
return [ ( W_shape[0], W_shape[1], W_shape[2], W_shape[3], W_shape[4] ) ]
def connection_pattern(self, node):
return [[True], [True], [False], [True]]
def grad(self,inputs, output_gradients):
C,d, WShape, B = inputs
dLdA ,= output_gradients
def grad(self, inputs, output_gradients):
C, d, WShape, B = inputs
dLdA, = output_gradients
z = T.zeros_like(C[0,0,0,0,:])
dLdC = convTransp3D( dLdA, z, d, B, C.shape[1:4])
z = T.zeros_like(C[0, 0, 0, 0, :])
dLdC = convTransp3D(dLdA, z, d, B, C.shape[1:4])
# d actually does affect the outputs, so it's not disconnected
dLdd = grad_undefined(self, 1, d)
# The shape of the weights doesn't affect the output elements
dLdWShape = DisconnectedType()()
dLdB = conv3D( C, dLdA, T.zeros_like(B[0,0,0,0,:]), d)
dLdB = conv3D(C, dLdA, T.zeros_like(B[0, 0, 0, 0, :]), d)
return [ dLdC, dLdd, dLdWShape, dLdB ]
return [dLdC, dLdd, dLdWShape, dLdB]
def perform(self, node, inputs, output_storage):
V, d, WShape, dCdH = inputs
......@@ -72,17 +72,15 @@ class ConvGrad3D(theano.Op):
#print 'computing output of shape '+str(WShape)
for k in xrange(0,WShape[1]):
for l in xrange(0,WShape[2]):
for m in xrange(0,WShape[3]):
for i in xrange(0,batchSize):
for p in xrange(0,outputHeight):
for q in xrange(0,outputWidth):
for r in xrange(0,outputDur):
for j in xrange(0,WShape[0]):
for z in xrange(0,WShape[4]):
for k in xrange(0, WShape[1]):
for l in xrange(0, WShape[2]):
for m in xrange(0, WShape[3]):
for i in xrange(0, batchSize):
for p in xrange(0, outputHeight):
for q in xrange(0, outputWidth):
for r in xrange(0, outputDur):
for j in xrange(0, WShape[0]):
for z in xrange(0, WShape[4]):
dCdW[j,k,l,m,z] += dCdH[i,p,q,r,j] * V[i,dr*p+k,dc*q+l,dt*r+m,z]
output_storage[0][0] = dCdW
......@@ -97,7 +95,7 @@ class ConvGrad3D(theano.Op):
dCdW = outputs[0]
codeSource = """
codeSource = """
///////////// < code generated by ConvGradW3D >
//printf("\t\t\t\tConvGradW3D c code\\n");
......@@ -277,7 +275,7 @@ class ConvGrad3D(theano.Op):
///////////// < /code generated by ConvGradW3D >
"""
return strutil.renderString(codeSource,locals())
return strutil.renderString(codeSource, locals())
convGrad3D = ConvGrad3D()
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论