提交 9733b595 authored 作者: David Warde-Farley's avatar David Warde-Farley 提交者: Arnaud Bergeron

dict(zip()) -> dict(izip()) (and OrderedDict)

上级 30340e72
import theano
from theano import gof
from theano.compat import izip
from theano.compile.function_module import orig_function
from theano.compile import SharedVariable, rebuild_collect_shared
from theano.gof import ops_with_inner_function
......@@ -80,8 +81,8 @@ class OpFromGraph(gof.Op):
if isinstance(var, SharedVariable)]
shared_vars = [var.type() for var in self.shared_inputs]
new = rebuild_collect_shared(outputs, inputs=inputs + shared_vars,
replace=dict(zip(self.shared_inputs,
shared_vars)),
replace=dict(izip(self.shared_inputs,
shared_vars)),
copy_inputs_over=False)
(new_inputs, new_outputs,
[clone_d, update_d, update_expr, shared_inputs]) = new
......@@ -143,8 +144,8 @@ class OpFromGraph(gof.Op):
grad_ops = self.grad_ops
else:
gs = theano.gradient.grad(cost=None,
known_grads=dict(zip(self.new_outputs,
output_grads)),
known_grads=dict(izip(self.new_outputs,
output_grads)),
wrt=self.new_inputs,
disconnected_inputs='ignore')
......
......@@ -1047,11 +1047,11 @@ class FunctionMaker(object):
t2 = removeAllFgraph(t2)
givens = dict(zip(gof.graph.inputs([t1]),
gof.graph.inputs([t2])))
givens = dict(izip(gof.graph.inputs([t1]),
gof.graph.inputs([t2])))
temp = dict(zip(gof.graph.inputs([t1]),
gof.graph.inputs([t2])))
temp = dict(izip(gof.graph.inputs([t1]),
gof.graph.inputs([t2])))
# hack to remove inconstent entry in givens
# seems to work that but source of inconsistency
......
......@@ -17,6 +17,7 @@ import numpy
import theano
from theano import config
from theano.compat import izip
from theano.compat.six import string_types, iteritems
from theano.compat.six.moves import reduce
from theano.gof import graph, op, utils, unify, toolbox
......@@ -1211,7 +1212,7 @@ class PatternSub(LocalOptimizer):
ret = self.transform(real_node, get_nodes=False)
if ret is not False and ret is not None:
assert len(real_node.outputs) == len(ret)
return dict(zip(real_node.outputs, ret))
return dict(izip(real_node.outputs, ret))
if node.op != self.op:
return False
......
"""Driver for gradient calculations."""
from __future__ import print_function
import theano.compat.six.moves.builtins as builtins
from theano.compat import izip
import logging
import time
import warnings
......@@ -12,7 +11,7 @@ import theano
from theano import gof
from theano.gof import Variable
from theano.compat import OrderedDict
from theano.compat import OrderedDict, izip
from theano.compat.six.moves import xrange
from theano.gof.null_type import NullType, null_type
from theano.gof.op import get_debug_values
......@@ -700,7 +699,7 @@ def subgraph_grad(wrt, end, start=None, cost=None, details=False):
for i in range(len(grads)):
grads[i] += cost_grads[i]
pgrads = OrderedDict(zip(params, grads))
pgrads = OrderedDict(izip(params, grads))
# separate wrt from end grads:
wrt_grads = list(pgrads[k] for k in wrt)
end_grads = list(pgrads[k] for k in end)
......
......@@ -18,6 +18,7 @@ from theano.compile import SharedVariable, function
from theano.compat.six import iteritems
from theano import compile
from theano import gof
from theano.compat import izip
from theano.compat import OrderedDict, ifilter
from theano.tensor import opt
from theano import tensor
......@@ -457,8 +458,8 @@ def scan(fn,
# as non sequences at the end of our args
fake_nonseqs = [x.type() for x in non_seqs]
fake_outputs = scan_utils.clone(outputs + updates.values(),
replace=dict(zip(non_seqs,
fake_nonseqs)))
replace=dict(izip(non_seqs,
fake_nonseqs)))
all_inputs = ifilter(
lambda x: (isinstance(x, gof.Variable) and
not isinstance(x, SharedVariable) and
......@@ -567,7 +568,7 @@ def scan(fn,
if (not isinstance(arg, SharedVariable) and
not isinstance(arg, tensor.Constant))]
givens.update(dict(zip(other_scan_args, other_inner_args)))
givens.update(dict(izip(other_scan_args, other_inner_args)))
other_shared_scan_args = [arg.variable for arg
in dummy_f.maker.expanded_inputs
if (isinstance(arg.variable, SharedVariable) and
......@@ -576,8 +577,7 @@ def scan(fn,
in dummy_f.maker.expanded_inputs
if (isinstance(arg.variable, SharedVariable) and
not arg.update)]
givens.update(dict(zip(other_shared_scan_args,
other_shared_inner_args)))
givens.update(dict(izip(other_shared_scan_args, other_shared_inner_args)))
##
# Step 6. Re-order the outputs and clone them replacing things
......
......@@ -136,8 +136,7 @@ class TestScan(unittest.TestCase):
shared_outs = [sh * 5 for sh in shared_vars]
states_out = [x for x in states_out]
pure_outs = [2 for x in xrange(n_outputs)]
return states_out + pure_outs, dict(zip(shared_vars,
shared_outs))
return states_out + pure_outs, dict(izip(shared_vars, shared_outs))
def execute_inner_graph(*args):
"""
......
......@@ -13,6 +13,7 @@ you probably want to use theano.tensor.[c,z,f,d,b,w,i,l,]scalar!
"""
from __future__ import print_function
from itertools import chain
import math
import warnings
from copy import copy
......@@ -21,7 +22,7 @@ from textwrap import dedent
import numpy
import theano
from theano.compat import PY3, imap
from theano.compat import PY3, imap, izip
from theano import gof, printing
from theano.gof import (Op, utils, Variable, Constant, Type, Apply,
FunctionGraph)
......@@ -3425,7 +3426,7 @@ class Composite(ScalarOp):
res2 = theano.compile.rebuild_collect_shared(
inputs=outputs[0].owner.op.inputs,
outputs=outputs[0].owner.op.outputs,
replace=dict(zip(outputs[0].owner.op.inputs, res[1]))
replace=dict(izip(outputs[0].owner.op.inputs, res[1]))
)
assert len(res2[1]) == len(outputs)
assert len(res[0]) == len(inputs)
......@@ -3461,7 +3462,7 @@ class Composite(ScalarOp):
assert len(inputs) == self.nin
res = theano.compile.rebuild_collect_shared(
self.outputs,
replace=dict(zip(self.inputs, inputs)),
replace=dict(izip(self.inputs, inputs)),
rebuild_strict=False)
# After rebuild_collect_shared, the Variable in inputs
# are not necessarily in the graph represented by res.
......@@ -3485,11 +3486,9 @@ class Composite(ScalarOp):
raise NotImplementedError("grad is not implemented for Composite")
def c_code(self, node, nodename, inames, onames, sub):
d = dict(zip(["i%i" % i for i in xrange(len(inames))],
inames) +
zip(["o%i" % i for i in xrange(len(onames))],
onames),
**sub)
d = dict(chain(izip(("i%i" % i for i in xrange(len(inames))), inames),
izip(("o%i" % i for i in xrange(len(onames))),
onames)), **sub)
d['nodename'] = nodename
if not 'id' in sub:
# The use of a dummy id is safe as the code is in a separate block.
......
......@@ -46,7 +46,7 @@ import logging
import numpy
import warnings
from theano.compat import ifilter
from theano.compat import ifilter, izip
from theano.compat.six import iteritems
from theano.compile import SharedVariable, function
from theano import compile
......@@ -790,8 +790,8 @@ def scan(fn,
# as non sequences at the end of our args
fake_nonseqs = [x.type() for x in non_seqs]
fake_outputs = scan_utils.clone(outputs,
replace=OrderedDict(zip(non_seqs,
fake_nonseqs)))
replace=OrderedDict(izip(non_seqs,
fake_nonseqs)))
all_inputs = ifilter(
lambda x: (isinstance(x, gof.Variable) and
not isinstance(x, SharedVariable) and
......@@ -915,7 +915,7 @@ def scan(fn,
if (not isinstance(arg, SharedVariable) and
not isinstance(arg, tensor.Constant))]
givens.update(OrderedDict(zip(other_scan_args, other_inner_args)))
givens.update(OrderedDict(izip(other_scan_args, other_inner_args)))
if strict:
non_seqs_set = set(non_sequences if non_sequences != None else [])
......@@ -939,8 +939,8 @@ def scan(fn,
in dummy_f.maker.expanded_inputs
if (isinstance(arg.variable, SharedVariable) and
not arg.update)]
givens.update(OrderedDict(zip(other_shared_scan_args,
other_shared_inner_args)))
givens.update(OrderedDict(izip(other_shared_scan_args,
other_shared_inner_args)))
##
# Step 6. Re-order the outputs and clone them replacing things
......
import numpy
import theano
from theano.compat import izip
from theano.gof.cc import hash_from_code
......@@ -96,7 +97,7 @@ def shape_of_variables(fgraph, input_shapes):
for dim in input_shapes[inp]]
numeric_output_dims = compute_shapes(*numeric_input_dims)
sym_to_num_dict = dict(zip(output_dims, numeric_output_dims))
sym_to_num_dict = dict(izip(output_dims, numeric_output_dims))
l = {}
for var in fgraph.shape_feature.shape_of:
......
......@@ -8,7 +8,7 @@ import numpy as np
import theano
from theano import gof
from theano.compat import OrderedDict
from theano.compat import OrderedDict, izip
from theano.tests import unittest_tools as utt
from theano import gradient
......@@ -26,8 +26,8 @@ def grad_sources_inputs(sources, inputs):
"""
if inputs is None:
inputs = theano.gof.graph.inputs([source[0] for source in sources])
return dict(zip(inputs, theano.gradient.grad(cost=None, known_grads=dict(sources),
wrt=inputs, consider_constant=inputs)))
return dict(izip(inputs, theano.gradient.grad(cost=None, known_grads=dict(sources),
wrt=inputs, consider_constant=inputs)))
class testgrad_sources_inputs(unittest.TestCase):
......@@ -467,7 +467,7 @@ def test_known_grads():
for layer in layers:
print('Testing by separately computing ', layer)
first = theano.tensor.grad(cost, layer, disconnected_inputs='ignore')
known = dict(zip(layer, first))
known = dict(izip(layer, first))
full = theano.tensor.grad(cost=None,
known_grads=known, wrt=inputs, disconnected_inputs='ignore')
full = theano.function(inputs, full)
......@@ -599,7 +599,7 @@ def test_subgraph_grad():
wrt=params[i], end=grad_ends[i],
start=next_grad, cost=costs[i]
)
next_grad = OrderedDict(zip(grad_ends[i], next_grad))
next_grad = OrderedDict(izip(grad_ends[i], next_grad))
param_grads.extend(param_grad)
pgrads = theano.function(inputs, param_grads)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论