提交 1960ddab authored 作者: Cesar Laurent's avatar Cesar Laurent

Removed downsample.py

上级 589fc4b0
......@@ -187,7 +187,7 @@
<Compile Include="theano\tensor\sharedvar.py" />
<Compile Include="theano\tensor\shared_randomstreams.py" />
<Compile Include="theano\tensor\signal\conv.py" />
<Compile Include="theano\tensor\signal\downsample.py" />
<Compile Include="theano\tensor\signal\pool.py" />
<Compile Include="theano\tensor\signal\__init__.py" />
<Compile Include="theano\tensor\tensor_grad.py" />
<Compile Include="theano\tensor\xlogx.py" />
......
......@@ -20,5 +20,4 @@ forms of signal processing.
:maxdepth: 1
conv
downsample
pool
from __future__ import absolute_import, print_function, division
from . import pool
import warnings
warnings.warn(
"downsample module has been moved to the theano.tensor.signal.pool module.")
max_pool_2d_same_size = pool.max_pool_2d_same_size
max_pool_2d = pool.pool_2d
DownsampleFactorMax = pool.Pool
PoolGrad = pool.PoolGrad
MaxPoolGrad = pool.MaxPoolGrad
AveragePoolGrad = pool.AveragePoolGrad
# This is for compatibility with pickled things. It should go away at
# some point.
class DownsampleFactorMaxGrad(object):
def __new__(self, ds, ignore_border, st=None, padding=(0, 0), mode='max'):
if mode == 'max':
return MaxPoolGrad(ds=ds, ignore_border=ignore_border, st=st,
padding=padding)
else:
return AveragePoolGrad(ds=ds, ignore_border=ignore_border, st=st,
padding=padding, mode=mode)
DownsampleFactorMaxGradGrad = pool.DownsampleFactorMaxGradGrad
......@@ -17,8 +17,6 @@ from theano.tensor.signal.pool import (Pool, pool_2d,
max_pool_2d_same_size,
DownsampleFactorMaxGradGrad)
from theano.tensor.signal.downsample import DownsampleFactorMaxGrad
from theano import function
......@@ -876,31 +874,5 @@ class TestDownsampleFactorMax(utt.InferShapeTester):
utt.assert_allclose(o, n)
def test_DownsampleFactorMaxGrad(self):
im = theano.tensor.tensor4()
maxout = theano.tensor.tensor4()
grad = theano.tensor.tensor4()
for mode in ['max', 'sum', 'average_inc_pad', 'average_exc_pad']:
f = theano.function([im, maxout, grad],
DownsampleFactorMaxGrad(ignore_border=False,
mode=mode)(im, maxout,
grad,
(3, 3)),
on_unused_input='ignore')
if mode == 'max':
assert any(isinstance(n.op, MaxPoolGrad)
for n in f.maker.fgraph.toposort())
assert not any(isinstance(n.op, AveragePoolGrad)
for n in f.maker.fgraph.toposort())
else:
assert not any(isinstance(n.op, MaxPoolGrad)
for n in f.maker.fgraph.toposort())
assert any(isinstance(n.op, AveragePoolGrad)
for n in f.maker.fgraph.toposort())
if __name__ == '__main__':
unittest.main()
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论