提交 ae18fb3c authored 作者: Frederic Bastien's avatar Frederic Bastien

manual merge.

......@@ -73,6 +73,8 @@ Software Requirements
- g++, python-dev (optional, to compile generated C code)
- nose, for testing
- `psyco <http://psyco.sourceforge.net/>`__ can make your python code much faster, if you are on a 32-bit x86 architecture. If you use compiled C code, this can be less important.
Downloading Theano
......@@ -89,7 +91,7 @@ Get the source and run the tests like this:
hg clone http://pylearn.org/hg/theano theano
cd theano
nosetest
nosetests
To update your library to the latest on pylearn.org, change directory (`cd`) to this `theano` folder and type
......
......@@ -25,7 +25,7 @@ Our project uses the name to honour the ancient Greek mathematician.
Overview
========
**To get up & running quickly** see README_.
**To get up and running quickly** see README_.
All **documentation** can be reached from the `Theano Project Documentation Overview`_.
......
......@@ -44,7 +44,9 @@ from compile import \
predefined_modes, predefined_linkers, predefined_optimizers, \
FunctionMaker, function, OpFromGraph, \
Component, External, Member, KitComponent, Method, \
Composite, ComponentList, Module, FancyModule
Composite, ComponentList, ComponentDict, Module
FancyModule = Module
from printing import \
pprint, pp
......
差异被折叠。
......@@ -246,6 +246,7 @@ class LocalLinker(Linker):
class PerformLinker(LocalLinker):
"""WRITEME
Basic L{Linker} subclass that calls the perform method on each L{Op} in
the L{Env} in the order given by L{Env.toposort}.
"""
......@@ -254,6 +255,13 @@ class PerformLinker(LocalLinker):
self.env = None
def accept(self, env, no_recycling = []):
"""
:param env: a PerformLinker can have accepted one Env instance at a time.
:param no_recycling: WRITEME
:returns: self (WHY? Who calls this function?)
"""
if self.env is not None and self.env is not env:
return type(self)().accept(env, no_recycling)
#raise Exception("Cannot accept from a Linker that is already tied to another Env.")
......@@ -262,6 +270,14 @@ class PerformLinker(LocalLinker):
return self
def make_all(self, profiler = None, input_storage = None, output_storage = None):
"""
:param profiler: WRITEME
:param input_storage: WRITEME
:param output_storage: WRITEME
:returns: WRITEME (or see: SOMETHING)
"""
env = self.env
order = env.toposort()
no_recycling = self.no_recycling
......
"""Pretty-printing graphs, and the 'Print' Op.
"""
import gof
from copy import copy
import sys
from gof import Op, Apply
class Print(Op):
"""This identity-like Op has the side effect of printing a message followed by its inputs
when it runs.
"""
def __init__(self,message=""):
self.message=message
self.view_map={0:[0]}
def make_node(self,xin):
xout = xin.type.make_result()
return Apply(op = self, inputs = [xin], outputs=[xout])
def perform(self,node,inputs,output_storage):
xin, = inputs
xout, = output_storage
xout[0] = xin
print self.message,xin
def grad(self,input,output_gradients):
return output_gradients
class PrinterState(gof.utils.scratchpad):
......@@ -232,3 +255,4 @@ pprint.assign(lambda pstate, r: hasattr(pstate, 'target') and pstate.target is n
pp = pprint
......@@ -21,7 +21,7 @@ from .. import scalar as scal
from ..gof.python25 import partial
from .. import compile, printing
from ..printing import pprint
from ..printing import pprint, Print
### set up the external interface
......@@ -456,10 +456,11 @@ class _tensor_py_operators:
def __abs__(self): return abs_(self)
def __neg__(self): return neg(self)
#CASTS
def __int__(self): return AsInt(self).out
def __float__(self): return AsInt(self).out
def __complex__(self): return AsComplex(self).out
#CASTS
#### REMOVED THESE BECAUSE PYTHON appears to require __int__ to return an int. -JB 20081112
#def __int__(self): return convert_to_int32(self)
#def __float__(self): return convert_to_float64(self)
#def __complex__(self): return convert_to_complex128(self)
#COMPARISONS
def __lt__(self,other): return lt(self, other)
......@@ -712,7 +713,7 @@ class Shape(Op):
x = as_tensor(x)
return Apply(self, [x], [lvector()])
def perform(self, node, (x, ), (out, )):
out[0] = numpy.asarray(x.shape)
out[0] = numpy.asarray(x.shape, dtype = 'int64')
def grad(self, (x,), (gz,)):
return [None]
@_redefine_asRoutine(Shape())
......@@ -1012,6 +1013,10 @@ pprint.assign(Sum(), printing.FunctionPrinter('sum'))
@constructor
def mean(input, axis = None):
"""WRITEME"""
if str(input.dtype).startswith('int'):
# we need to cast eventually anyway, and this helps
# to prevents overflow
input = convert_to_float64(input)
s = sum(input, axis)
shp = shape(input)
if axis is None:
......@@ -1589,7 +1594,7 @@ def concatenate(tensor_list, axis=0):
if not isinstance(tensor_list, (tuple, list)):
raise TypeError("The 'tensors' argument must be either a tuple "
"or a list, make sure you did not forget () or [] around "
"arguments of concatenate.", tensors)
"arguments of concatenate.", tensor_list)
return join(axis, *tensor_list)
def get_vector_length(v):
......
......@@ -55,8 +55,9 @@ class RandomFunction(gof.Op):
r = copy(r)
rout[0] = r
rval = self.fn(r, *(args + [shape]))
if not isinstance(rval, numpy.ndarray):
out[0] = numpy.asarray(rval, dtype = node.outputs[0].type.dtype)
if not isinstance(rval, numpy.ndarray) \
or str(rval.dtype) != node.outputs[1].type.dtype:
out[0] = numpy.asarray(rval, dtype = node.outputs[1].type.dtype)
else:
out[0] = rval
......@@ -237,7 +238,7 @@ class RandomKit(SymbolicInputKit):
rk = RandomKit('rk', 0xBAD5EED)
class RModule(compile.FancyModule):
class RModule(compile.Module):
def __init__(self, components = {}, **kwcomponents):
super(RModule, self).__init__(components, **kwcomponents)
......
from xlogx import xlogx
import unittest
from theano import compile
from theano import gradient
from theano.tensor import as_tensor
import theano._test_tensor as TT
import random
import numpy.random
class T_XlogX(unittest.TestCase):
def test0(self):
x = as_tensor([1, 0])
y = xlogx(x)
y = compile.eval_outputs([y])
self.failUnless(numpy.all(y == numpy.asarray([0, 0.])))
def test1(self):
class Dummy(object):
def make_node(self, a):
return [xlogx(a)[:,2]]
TT.verify_grad(self, Dummy(), [numpy.random.rand(3,4)])
if __name__ == '__main__':
unittest.main()
import theano
from theano import tensor, scalar
import numpy
class XlogX(scalar.UnaryScalarOp):
"""
Compute X * log(X), with special case 0 log(0) = 0.
"""
@staticmethod
def st_impl(x):
if x == 0.0:
return 0.0
return x * numpy.log(x)
def impl(self, x):
return XlogX.st_impl(x)
def grad(self, (x,), (gz,)):
return [gz * (1 + scalar.log(x))]
def c_code(self, node, name, (x,), (z,), sub):
if node.inputs[0].type in [scalar.float32, scalar.float64]:
return """%(z)s =
%(x)s == 0.0
? 0.0
: %(x)s * log(%(x)s);""" % locals()
raise NotImplementedError('only floatingpoint is implemented')
scalar_xlogx = XlogX(scalar.upgrade_to_float, name='scalar_xlogx')
xlogx = tensor.Elemwise(scalar_xlogx, name='xlogx')
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论