提交 cdffaa87 authored 作者: James Bergstra's avatar James Bergstra

Moved max_pool() from downsample to test_downsample. The main purpose of this

function is to flatten the output, but I don't think that is generally wanted or useful. The old tests use it, but I don't think people should be left wondering whether they should use DownsampleFactorMax or max_pool.
上级 cb0f29e3
...@@ -304,37 +304,3 @@ class DownsampleFactorMax(Op): ...@@ -304,37 +304,3 @@ class DownsampleFactorMax(Op):
def c_code_cache_version(self): def c_code_cache_version(self):
return () return ()
def max_pool(images=None, imshp=None, maxpoolshp=None, ignore_border=True):
"""Implements a max pooling layer
Uses the same API as sp.max_pool but uses the Downsample op instead.
Takes as input a 2D tensor of shape batch_size x img_size and performs max pooling.
Max pooling downsamples by taking the max value in a given area, here defined by
maxpoolshp. Outputs a 2D tensor of shape batch_size x output_size.
Parameters are keyword arguments in order to use func_to_mod.
@param images: 2D tensor containing images on which to apply convolution.
Assumed to be of shape batch_size x img_size
@param imgshp: tuple containing image dimensions
@param maxpoolshp: tuple containing shape of area to max pool over
@output out1: symbolic result (2D tensor)
@output out2: logical shape of the output
"""
if len(imshp) == 2:
imshp = (1,) + imshp
elif len(imshp)!=3:
raise NotImplementedError("!")
# all these reshapes should happen in place
imrshp = tensor.stack(images.shape[0],
*[tensor.as_tensor(x) for x in imshp])
imtensor = tensor.reshape(images, imrshp)
maxpop = DownsampleFactorMax(maxpoolshp, ignore_border)
rval = maxpop(imtensor)
return tensor.flatten(rval,2), maxpop.out_shape(imshp, maxpoolshp, ignore_border)
...@@ -2,9 +2,44 @@ import unittest, sys, time ...@@ -2,9 +2,44 @@ import unittest, sys, time
import numpy as N import numpy as N
import theano.tensor as T import theano.tensor as T
from theano.tests import unittest_tools as utt from theano.tests import unittest_tools as utt
from theano.sandbox.downsample import DownsampleFactorMax, max_pool from theano.sandbox.downsample import DownsampleFactorMax
from theano import function, Mode from theano import function, Mode
def max_pool(images=None, imshp=None, maxpoolshp=None, ignore_border=True):
"""Implements a max pooling layer
Uses the same API as sp.max_pool but uses the Downsample op instead.
Takes as input a 2D tensor of shape batch_size x img_size and performs max pooling.
Max pooling downsamples by taking the max value in a given area, here defined by
maxpoolshp. Outputs a 2D tensor of shape batch_size x output_size.
Parameters are keyword arguments in order to use func_to_mod.
@param images: 2D tensor containing images on which to apply convolution.
Assumed to be of shape batch_size x img_size
@param imgshp: tuple containing image dimensions
@param maxpoolshp: tuple containing shape of area to max pool over
@output out1: symbolic result (2D tensor)
@output out2: logical shape of the output
"""
if len(imshp) == 2:
imshp = (1,) + imshp
elif len(imshp)!=3:
raise NotImplementedError("!")
# all these reshapes should happen in place
imrshp = T.stack(images.shape[0],
*[T.as_tensor(x) for x in imshp])
imtensor = T.reshape(images, imrshp)
maxpop = DownsampleFactorMax(maxpoolshp, ignore_border)
rval = maxpop(imtensor)
return T.flatten(rval,2), maxpop.out_shape(imshp, maxpoolshp, ignore_border)
class TestDownsampleFactorMax(unittest.TestCase): class TestDownsampleFactorMax(unittest.TestCase):
def test_maxpool(self): def test_maxpool(self):
# generate flatted images # generate flatted images
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论