提交 50ab0430 authored 作者: Brandon T. Willard's avatar Brandon T. Willard

Replace theano.tensor alias T with tt in tests.gof

上级 2a73520d
import os
import numpy as np
import theano
import theano.tensor as T
import theano.tensor as tt
floatX = "float32"
......@@ -17,18 +17,18 @@ def test_graph_opt_caching():
default = theano.config.cache_optimizations
try:
theano.config.cache_optimizations = True
a = T.fmatrix("a")
b = T.fmatrix("b")
a = tt.fmatrix("a")
b = tt.fmatrix("b")
c = theano.shared(np.ones((10, 10), dtype=floatX))
d = theano.shared(np.ones((10, 10), dtype=floatX))
e = T.sum(T.sum(T.sum(a ** 2 + b) + c) + d)
e = tt.sum(tt.sum(tt.sum(a ** 2 + b) + c) + d)
f1 = theano.function([a, b], e, mode=mode)
m = T.fmatrix("x1")
n = T.fmatrix("x2")
m = tt.fmatrix("x1")
n = tt.fmatrix("x2")
p = theano.shared(np.ones((10, 10), dtype=floatX))
q = theano.shared(np.ones((10, 10), dtype=floatX))
j = T.sum(T.sum(T.sum(m ** 2 + n) + p) + q)
j = tt.sum(tt.sum(tt.sum(m ** 2 + n) + p) + q)
f2 = theano.function([m, n], j, mode=mode)
in1 = np.ones((10, 10), dtype=floatX)
......
......@@ -7,7 +7,7 @@ from theano.gof.op import PureOp
from theano.gof import Apply, generic
from theano import function, Mode
from theano.ifelse import ifelse
import theano.tensor as T
import theano.tensor as tt
class IfElseIfElseIf(PureOp):
......@@ -104,7 +104,7 @@ class NotImplementedOp(PureOp):
def test_ifelse():
a = T.scalar()
a = tt.scalar()
b = generic()
c = generic()
......@@ -143,17 +143,17 @@ def more_complex_test():
notimpl = NotImplementedOp()
ifelseifelseif = IfElseIfElseIf()
x1 = T.scalar("x1")
x2 = T.scalar("x2")
c1 = T.scalar("c1")
c2 = T.scalar("c2")
x1 = tt.scalar("x1")
x2 = tt.scalar("x2")
c1 = tt.scalar("c1")
c2 = tt.scalar("c2")
t1 = ifelse(c1, x1, notimpl(x2))
t1.name = "t1"
t2 = t1 * 10
t2.name = "t2"
t3 = ifelse(c2, t2, x1 + t1)
t3.name = "t3"
t4 = ifelseifelseif(T.eq(x1, x2), x1, T.eq(x1, 5), x2, c2, t3, t3 + 0.5)
t4 = ifelseifelseif(tt.eq(x1, x2), x1, tt.eq(x1, 5), x2, c2, t3, t3 + 0.5)
t4.name = "t4"
f = function([c1, c2, x1, x2], t4, mode=Mode(linker="vm", optimizer="fast_run"))
......
import theano.tensor as tt
from theano.gof.type import Type
from theano.gof.graph import Variable, Apply, Constant
from theano.gof.op import Op
......@@ -16,8 +18,6 @@ from theano.gof.opt import (
)
from theano.gof.fg import FunctionGraph
from theano import tensor as T
def as_variable(x):
if not isinstance(x, Variable):
......@@ -385,9 +385,9 @@ class TestMergeOptimizer:
def est_one_assert_merge(self):
# Merge two nodes, one has assert, the other not.
x1 = T.matrix("x1")
x2 = T.matrix("x2")
e = T.dot(x1, x2) + T.dot(T.opt.assert_op(x1, (x1 > x2).all()), x2)
x1 = tt.matrix("x1")
x2 = tt.matrix("x2")
e = tt.dot(x1, x2) + tt.dot(tt.opt.assert_op(x1, (x1 > x2).all()), x2)
g = FunctionGraph([x1, x2], [e])
MergeOptimizer().optimize(g)
strg = theano.printing.debugprint(g, file="str")
......@@ -407,10 +407,10 @@ class TestMergeOptimizer:
def test_both_assert_merge_identical(self):
# Merge two nodes, both have assert on the same node
# with the same conditions.
x1 = T.matrix("x1")
x2 = T.matrix("x2")
e = T.dot(T.opt.assert_op(x1, (x1 > x2).all()), x2) + T.dot(
T.opt.assert_op(x1, (x1 > x2).all()), x2
x1 = tt.matrix("x1")
x2 = tt.matrix("x2")
e = tt.dot(tt.opt.assert_op(x1, (x1 > x2).all()), x2) + tt.dot(
tt.opt.assert_op(x1, (x1 > x2).all()), x2
)
g = FunctionGraph([x1, x2], [e])
MergeOptimizer().optimize(g)
......@@ -432,11 +432,11 @@ class TestMergeOptimizer:
def est_both_assert_merge_1(self):
# Merge two nodes, both have assert on the same node
# with different conditions.
x1 = T.matrix("x1")
x2 = T.matrix("x2")
x3 = T.matrix("x3")
e = T.dot(T.opt.assert_op(x1, (x1 > x3).all()), x2) + T.dot(
T.opt.assert_op(x1, (x1 > x2).all()), x2
x1 = tt.matrix("x1")
x2 = tt.matrix("x2")
x3 = tt.matrix("x3")
e = tt.dot(tt.opt.assert_op(x1, (x1 > x3).all()), x2) + tt.dot(
tt.opt.assert_op(x1, (x1 > x2).all()), x2
)
g = FunctionGraph([x1, x2, x3], [e])
MergeOptimizer().optimize(g)
......@@ -476,11 +476,11 @@ class TestMergeOptimizer:
def est_both_assert_merge_2(self):
# Merge two nodes, both have assert on different node
x1 = T.matrix("x1")
x2 = T.matrix("x2")
x3 = T.matrix("x3")
e = T.dot(T.opt.assert_op(x1, (x1 > x3).all()), x2) + T.dot(
x1, T.opt.assert_op(x2, (x2 > x3).all())
x1 = tt.matrix("x1")
x2 = tt.matrix("x2")
x3 = tt.matrix("x3")
e = tt.dot(tt.opt.assert_op(x1, (x1 > x3).all()), x2) + tt.dot(
x1, tt.opt.assert_op(x2, (x2 > x3).all())
)
g = FunctionGraph([x1, x2, x3], [e])
MergeOptimizer().optimize(g)
......@@ -506,11 +506,11 @@ class TestMergeOptimizer:
def est_both_assert_merge_2_reverse(self):
# Test case "test_both_assert_merge_2" but in reverse order
x1 = T.matrix("x1")
x2 = T.matrix("x2")
x3 = T.matrix("x3")
e = T.dot(x1, T.opt.assert_op(x2, (x2 > x3).all())) + T.dot(
T.opt.assert_op(x1, (x1 > x3).all()), x2
x1 = tt.matrix("x1")
x2 = tt.matrix("x2")
x3 = tt.matrix("x3")
e = tt.dot(x1, tt.opt.assert_op(x2, (x2 > x3).all())) + tt.dot(
tt.opt.assert_op(x1, (x1 > x3).all()), x2
)
g = FunctionGraph([x1, x2, x3], [e])
MergeOptimizer().optimize(g)
......@@ -610,18 +610,16 @@ class TestEquilibrium(object):
def test_pre_constant_merge_slice():
ms = theano.tensor.type_other.MakeSlice()(1)
ms = tt.type_other.MakeSlice()(1)
pre_constant_merge([ms])
const_slice = theano.tensor.type_other.SliceConstant(
type=theano.tensor.type_other.slicetype, data=slice(1, None, 2)
)
adv = theano.tensor.subtensor.AdvancedSubtensor()(
theano.tensor.matrix(), [2, 3], const_slice
const_slice = tt.type_other.SliceConstant(
type=tt.type_other.slicetype, data=slice(1, None, 2)
)
adv = tt.subtensor.AdvancedSubtensor()(tt.matrix(), [2, 3], const_slice)
pre_constant_merge(adv)
cst = pre_greedy_local_optimizer([theano.tensor.opt.constant_folding], ms)
assert isinstance(cst, theano.tensor.type_other.SliceConstant)
cst = pre_greedy_local_optimizer([tt.opt.constant_folding], ms)
assert isinstance(cst, tt.type_other.SliceConstant)
# Make sure constant of slice signature is hashable.
hash(cst.signature())
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论