提交 f20ea386 authored 作者: Frederic Bastien's avatar Frederic Bastien

Remove useless graph of clone. Scan inputs and outputs aren't modified inplace,…

Remove useless graph of clone. Scan inputs and outputs aren't modified inplace, so multiple scan can share them.
上级 bab9388f
...@@ -1983,10 +1983,8 @@ class Scan(PureOp): ...@@ -1983,10 +1983,8 @@ class Scan(PureOp):
if self.truncate_gradient != -1: if self.truncate_gradient != -1:
grad_steps = tensor.minimum(grad_steps, self.truncate_gradient) grad_steps = tensor.minimum(grad_steps, self.truncate_gradient)
rval = scan_utils.reconstruct_graph(self.inputs, self_inputs = self.inputs
self.outputs) self_outputs = self.outputs
self_inputs = rval[0]
self_outputs = rval[1]
# differentiable inputs # differentiable inputs
diff_inputs = (self.inner_seqs(self_inputs) + diff_inputs = (self.inner_seqs(self_inputs) +
self.inner_mitmot(self_inputs) + self.inner_mitmot(self_inputs) +
...@@ -2645,13 +2643,13 @@ class Scan(PureOp): ...@@ -2645,13 +2643,13 @@ class Scan(PureOp):
return gradients return gradients
def R_op(self, inputs, eval_points): def R_op(self, inputs, eval_points):
# Step 0. Don't work on the orignal tensor variables # Step 0. Prepare some shortcut variable
rval = scan_utils.reconstruct_graph(self.inputs, self_inputs = self.inputs
self.outputs, '_rop') rop_of_inputs = (self_inputs[:self.n_seqs + self.n_outs] +
self_inputs = rval[0] self_inputs[self.n_seqs + self.n_outs +
rop_of_inputs = rval[0][:self.n_seqs + self.n_outs] + \ self.n_shared_outs:])
rval[0][self.n_seqs + self.n_outs + self.n_shared_outs:] self_outputs = self.outputs
self_outputs = rval[1]
# Step 1. Compute the R_op of the inner function # Step 1. Compute the R_op of the inner function
inner_eval_points = [scan_utils.safe_new(x, '_evalpoint') inner_eval_points = [scan_utils.safe_new(x, '_evalpoint')
for x in rop_of_inputs] for x in rop_of_inputs]
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论