提交 fe4e2d03 authored 作者: Christof Angermueller's avatar Christof Angermueller

Add context menu and javascript libs

上级 aa62fad6
.d3-context-menu {
position: absolute;
display: none;
background-color: #f2f2f2;
border-radius: 4px;
font-family: Arial, sans-serif;
font-size: 14px;
min-width: 50px;
border: 1px solid #d4d4d4;
z-index:1200;
}
.d3-context-menu ul {
list-style-type: none;
margin: 4px 0px;
padding: 0px;
cursor: default;
}
.d3-context-menu ul li {
padding: 4px 16px;
}
.d3-context-menu ul li:hover {
background-color: #4677f8;
color: #fefefe;
}
d3.contextMenu = function (menu, openCallback) {
// create the div element that will hold the context menu
d3.selectAll('.d3-context-menu').data([1])
.enter()
.append('div')
.attr('class', 'd3-context-menu');
// close menu
d3.select('body').on('click.d3-context-menu', function() {
d3.select('.d3-context-menu').style('display', 'none');
});
// this gets executed when a contextmenu event occurs
return function(data, index) {
var elm = this;
d3.selectAll('.d3-context-menu').html('');
var list = d3.selectAll('.d3-context-menu').append('ul');
list.selectAll('li').data(menu).enter()
.append('li')
.html(function(d) {
return d.title;
})
.on('click', function(d, i) {
d.action(elm, data, index);
d3.select('.d3-context-menu').style('display', 'none');
});
// the openCallback allows an action to fire before the menu is displayed
// an example usage would be closing a tooltip
if (openCallback) openCallback(data, index);
// display context menu
d3.select('.d3-context-menu')
.style('left', (d3.event.pageX - 2) + 'px')
.style('top', (d3.event.pageY - 2) + 'px')
.style('display', 'block');
d3.event.preventDefault();
};
};
差异被折叠。
This source diff could not be displayed because it is too large. You can view the blob instead.
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Couldn't import dot_parser, loading of dot files will not be possible.\n"
]
}
],
"source": [
"import numpy\n",
"import numpy.random as rng\n",
"import theano\n",
"import theano.tensor as T\n",
"import theano.printing as pr\n",
"import theano.d3printing as d3p\n",
"from theano.d3printing.formatting import GraphFormatter\n",
"from theano import ProfileMode\n",
"\n",
"from IPython.display import SVG"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Model "
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"nsamples = 10000\n",
"ninput = 100\n",
"nhidden = 50\n",
"noutput = 1\n",
"\n",
"def init_params(m, n):\n",
" W = np.random.normal(0, 1, (m, n)).astype(theano.config.floatX)\n",
" return theano.shared(W, borrow=True)\n",
"\n",
"W1 = init_params(ninput, nhidden)\n",
"W2 = init_params(nhidden, noutput)\n",
"X = T.dmatrix('X')\n",
"\n",
"H = T.nnet.sigmoid(T.dot(X, W1))\n",
"Z = T.nnet.sigmoid(T.dot(H, W2))\n",
"zm = T.mean(Z)\n",
"\n",
"predict_profiled = theano.function([X], [Z, zm], profile=True)\n",
"data = rng.rand(nsamples, ninput)\n",
"dZ, dzm = predict_profiled(data)\n",
"\n",
"predict_unprofiled = theano.function([X], [Z, zm], profile=False)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Timining information via debugpring "
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Timing Info\n",
"-----------\n",
"--> <time> <% time> - <total time> <% total time>'\n",
"\n",
"<time> computation time for this node\n",
"<% time> fraction of total computation time for this node\n",
"<total time> time for this node + total times for this node's ancestors\n",
"<% total time> total time for this node over total computation time\n",
"\n",
"N.B.:\n",
"* Times include the node time and the function overhead.\n",
"* <total time> and <% total time> may over-count computation times\n",
" if inputs to a node share a common ancestor and should be viewed as a\n",
" loose upper bound. Their intended use is to help rule out potential nodes\n",
" to remove when optimizing a graph because their <total time> is very low.\n",
"\n",
"Elemwise{ScalarSigmoid}[(0, 0)] [@A] '' 9 --> 1.16e-04s 0.9% 1.21e-02s 95.4%\n",
" |Dot22 [@B] '' 6 --> 9.38e-04s 7.4% 1.20e-02s 94.5%\n",
" |Elemwise{ScalarSigmoid}[(0, 0)] [@C] '' 4 --> 4.59e-03s 36.1% 1.11e-02s 87.1%\n",
" | |Dot22 [@D] '' 2 --> 6.48e-03s 51.0% 6.48e-03s 51.0%\n",
" | |X [@E]\n",
" | |<TensorType(float64, matrix)> [@F]\n",
" |<TensorType(float64, matrix)> [@G]\n",
"\n",
"Timing Info\n",
"-----------\n",
"--> <time> <% time> - <total time> <% total time>'\n",
"\n",
"<time> computation time for this node\n",
"<% time> fraction of total computation time for this node\n",
"<total time> time for this node + total times for this node's ancestors\n",
"<% total time> total time for this node over total computation time\n",
"\n",
"N.B.:\n",
"* Times include the node time and the function overhead.\n",
"* <total time> and <% total time> may over-count computation times\n",
" if inputs to a node share a common ancestor and should be viewed as a\n",
" loose upper bound. Their intended use is to help rule out potential nodes\n",
" to remove when optimizing a graph because their <total time> is very low.\n",
"\n",
"Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)] [@H] '' 11 --> 9.54e-07s 0.0% 1.22e-02s 95.8%\n",
" |Sum{acc_dtype=float64} [@I] '' 10 --> 1.50e-05s 0.1% 1.21e-02s 95.5%\n",
" | |Elemwise{ScalarSigmoid}[(0, 0)] [@A] '' 9 --> 1.16e-04s 0.9% 1.21e-02s 95.4%\n",
" |Subtensor{int64} [@J] '' 8 --> 3.10e-06s 0.0% 1.48e-05s 0.1%\n",
" | |Elemwise{Cast{float64}} [@K] '' 5 --> 3.81e-06s 0.0% 1.17e-05s 0.1%\n",
" | | |MakeVector [@L] '' 3 --> 5.96e-06s 0.0% 7.87e-06s 0.1%\n",
" | | |Shape_i{0} [@M] '' 1 --> 9.54e-07s 0.0% 9.54e-07s 0.0%\n",
" | | | |X [@E]\n",
" | | |Shape_i{1} [@N] '' 0 --> 9.54e-07s 0.0% 9.54e-07s 0.0%\n",
" | | |<TensorType(float64, matrix)> [@G]\n",
" | |Constant{0} [@O]\n",
" |Subtensor{int64} [@P] '' 7 --> 9.54e-07s 0.0% 1.26e-05s 0.1%\n",
" |Elemwise{Cast{float64}} [@K] '' 5 --> 3.81e-06s 0.0% 1.17e-05s 0.1%\n",
" |Constant{1} [@Q]\n"
]
}
],
"source": [
"pr.debugprint(predict_profiled)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Profiled graphed "
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"The output file is available at predict_profiled.html\n"
]
}
],
"source": [
"d3p.d3print(predict_profiled, 'predict_profiled.html')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"[open](./predict_profiled.html) "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"pr.pydot"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"The output file is available at predict_profiled.svg\n"
]
}
],
"source": [
"pr.pydotprint(predict_profiled, 'predict_profiled', format='svg')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"![predict_profiled.svg](predict_profiled.svg)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Unprofiled graph"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"The output file is available at predict_unprofiled.html\n"
]
}
],
"source": [
"d3p.d3print(predict_unprofiled, 'predict_unprofiled.html')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"[open](./predict_unprofiled.html) "
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.10"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
digraph G {
graph [bb="0,0,1055,672"];
node [label="\N"];
"Shape_i{1}" [fillcolor=cyan,
height=0.5,
pos="427,458",
profile="[9.5367431640625e-07, 0.012705802917480469]",
shape=ellipse,
style=filled,
type=colored,
width=1.4763];
MakeVector [height=0.5,
pos="250,370",
profile="[5.9604644775390625e-06, 0.012705802917480469]",
shape=ellipse,
width=1.5402];
"Shape_i{1}" -> MakeVector [label="1 TensorType(int64, scalar)",
lp="433,414",
pos="e,279.57,385.37 397.77,442.8 368.01,428.34 321.7,405.84 288.87,389.89"];
"TensorType(float64, matrix)" [fillcolor=limegreen,
height=0.5,
pos="464,566",
profile="[0.0009379386901855469, 0.012705802917480469]",
shape=box,
style=filled,
width=2.4444];
"TensorType(float64, matrix)" -> "Shape_i{1}" [label="TensorType(float64, matrix)",
lp="504,512",
pos="e,422.08,476.18 437.6,547.65 432.1,542.66 427.05,536.72 424,530 417.83,516.39 418.16,499.77 420.24,486.03"];
"Dot22 id=6" [height=0.5,
pos="716,370",
profile="[0.0009379386901855469, 0.012705802917480469]",
shape=ellipse,
width=1.4534];
"TensorType(float64, matrix)" -> "Dot22 id=6" [label="1 TensorType(float64, matrix)",
lp="692,458",
pos="e,682.81,383.97 552,551.07 563.93,546.03 575.1,539.2 584,530 612.7,500.32 582.73,473.4 607,440 623.72,416.99 650.68,399.83 673.6,\
388.37"];
"Shape_i{0}" [fillcolor=cyan,
height=0.5,
pos="229,512",
profile="[9.5367431640625e-07, 0.012705802917480469]",
shape=ellipse,
style=filled,
type=colored,
width=1.4763];
"Shape_i{0}" -> MakeVector [label="0 TensorType(int64, scalar)",
lp="287,458",
pos="e,236.58,387.68 217.14,494.33 213.83,488.77 210.69,482.36 209,476 204.89,460.54 204.76,455.43 209,440 213.31,424.32 222.2,408.6 \
230.61,396.13"];
"name=X TensorType(float64, matrix)" [fillcolor=limegreen,
height=0.5,
pos="334,654",
profile="[0.0064849853515625, 0.012705802917480469]",
shape=box,
style=filled,
width=3.1667];
"name=X TensorType(float64, matrix)" -> "Shape_i{0}" [label="TensorType(float64, matrix)",
lp="344,610",
pos="e,230.68,530.25 286.71,635.8 278.29,631.06 270.23,625.19 264,618 244.98,596.07 236.22,563.46 232.23,540.41"];
Dot22 [height=0.5,
pos="645,566",
profile="[0.0064849853515625, 0.012705802917480469]",
shape=ellipse,
width=0.92774];
"name=X TensorType(float64, matrix)" -> Dot22 [label="0 TensorType(float64, matrix)",
lp="556,610",
pos="e,614.28,573.23 377.6,635.91 410.55,623.16 453.06,607.1 471,602 510.24,590.85 521.01,592.05 561,584 575.06,581.17 590.45,578.06 \
604.17,575.28"];
"Elemwise{ScalarSigmoid}[(0, 0)]" [fillcolor="#FFAABB",
height=0.5,
pos="921,458",
profile="[0.004585981369018555, 0.012705802917480469]",
shape=ellipse,
style=filled,
type=colored,
width=3.7297];
Dot22 -> "Elemwise{ScalarSigmoid}[(0, 0)]" [color=red,
label="TensorType(float64, matrix)",
lp="903,512",
pos="e,879,475.13 671.46,554.84 716.96,537.36 810.78,501.33 869.53,478.77"];
"TensorType(float64, matrix) id=4" [fillcolor=limegreen,
height=0.5,
pos="645,654",
profile="[0.0064849853515625, 0.012705802917480469]",
shape=box,
style=filled,
width=2.8403];
"TensorType(float64, matrix) id=4" -> Dot22 [label="1 TensorType(float64, matrix)",
lp="730,610",
pos="e,645,584.08 645,635.6 645,623.75 645,607.82 645,594.29"];
"Elemwise{Cast{float64}}" [fillcolor="#FFAABB",
height=0.5,
pos="250,282",
profile="[3.814697265625e-06, 0.012705802917480469]",
shape=ellipse,
style=filled,
type=colored,
width=2.9207];
MakeVector -> "Elemwise{Cast{float64}}" [label="TensorType(int64, vector)",
lp="324,326",
pos="e,250,300.08 250,351.6 250,339.75 250,323.82 250,310.29"];
"Elemwise{ScalarSigmoid}[(0, 0)]" -> "Dot22 id=6" [label="0 TensorType(float64, matrix)",
lp="921,414",
pos="e,747.84,384.36 881.96,440.62 846.23,425.63 793.55,403.53 757.17,388.27"];
"Subtensor{int64}" [fillcolor="#FFAAFF",
height=0.5,
pos="317,194",
profile="[9.5367431640625e-07, 0.012705802917480469]",
shape=ellipse,
style=filled,
type=colored,
width=2.0659];
"Elemwise{Cast{float64}}" -> "Subtensor{int64}" [color=dodgerblue,
label="0 TensorType(float64, vector)",
lp="374.5,238",
pos="e,303.79,211.96 263.24,264.01 273.06,251.41 286.6,234.03 297.63,219.86"];
"Subtensor{int64} id=8" [fillcolor="#FFAAFF",
height=0.5,
pos="103,194",
profile="[3.0994415283203125e-06, 0.012705802917480469]",
shape=ellipse,
style=filled,
type=colored,
width=2.5916];
"Elemwise{Cast{float64}}" -> "Subtensor{int64} id=8" [color=dodgerblue,
label="0 TensorType(float64, vector)",
lp="189.5,238",
pos="e,98.802,212.45 169.53,270.28 141.06,264.73 113.86,256.82 105,246 99.708,239.54 98.04,230.93 98.046,222.62"];
"Elemwise{ScalarSigmoid}[(0, 0)] id=9" [fillcolor="#FFAABB",
height=0.5,
pos="716,282",
profile="[0.00011587142944335938, 0.012705802917480469]",
shape=ellipse,
style=filled,
type=colored,
width=4.2783];
"Dot22 id=6" -> "Elemwise{ScalarSigmoid}[(0, 0)] id=9" [color=red,
label="TensorType(float64, matrix)",
lp="796,326",
pos="e,716,300.08 716,351.6 716,339.75 716,323.82 716,310.29"];
"Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)]" [fillcolor="#FFAABB",
height=0.5,
pos="317,106",
profile="[9.5367431640625e-07, 0.012705802917480469]",
shape=ellipse,
style=filled,
type=colored,
width=4.8998];
"Subtensor{int64}" -> "Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)]" [label="2 TensorType(float64, scalar)",
lp="400.5,150",
pos="e,317,124.08 317,175.6 317,163.75 317,147.82 317,134.29"];
"val=1 int64" [fillcolor=limegreen,
height=0.5,
pos="487,282",
profile="[9.5367431640625e-07, 0.012705802917480469]",
shape=box,
style=filled,
width=1.1181];
"val=1 int64" -> "Subtensor{int64}" [label="1 int64",
lp="496,238",
pos="e,378.35,204.25 483.08,263.92 479.63,252.68 473.47,238.52 463,230 451.1,220.31 419.25,212.13 388.33,206.12"];
"Subtensor{int64} id=8" -> "Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)]" [label="1 TensorType(float64, scalar)",
lp="229.5,150",
pos="e,201.3,119.69 113.23,175.69 120.84,164.35 132.26,150.16 146,142 159.92,133.73 175.47,127.39 191.37,122.53"];
"val=0 int64" [fillcolor=limegreen,
height=0.5,
pos="40,282",
profile="[3.0994415283203125e-06, 0.012705802917480469]",
shape=box,
style=filled,
width=1.1181];
"val=0 int64" -> "Subtensor{int64} id=8" [label="1 int64",
lp="61,238",
pos="e,61.928,210.23 36.259,263.76 34.929,253.28 35.008,240.04 41,230 44.243,224.57 48.669,219.91 53.673,215.93"];
"TensorType(float64, matrix) id=14" [fillcolor=dodgerblue,
height=0.5,
pos="539,194",
profile="[0.00011587142944335938, 0.012705802917480469]",
shape=box,
style=filled,
width=2.9444];
"Elemwise{ScalarSigmoid}[(0, 0)] id=9" -> "TensorType(float64, matrix) id=14" [label="TensorType(float64, matrix)",
lp="653,238",
pos="e,546.35,212.14 619.81,267.9 603.25,262.81 586.92,255.76 573,246 563.98,239.68 556.7,230.13 551.22,221"];
"Sum{acc_dtype=float64}" [height=0.5,
pos="766,194",
profile="[1.5020370483398438e-05, 0.012705802917480469]",
shape=ellipse,
width=2.8658];
"Elemwise{ScalarSigmoid}[(0, 0)] id=9" -> "Sum{acc_dtype=float64}" [label="TensorType(float64, matrix)",
lp="826,238",
pos="e,756.14,211.96 725.88,264.01 733.07,251.64 742.93,234.68 751.08,220.66"];
"Sum{acc_dtype=float64}" -> "Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)]" [color=red,
label="0 TensorType(float64, scalar)",
lp="665.5,150",
pos="e,396.23,122.17 699.26,180.22 621.48,165.32 492.61,140.64 406.28,124.1"];
"TensorType(float64, scalar) id=16" [fillcolor=dodgerblue,
height=0.5,
pos="317,18",
profile="[9.5367431640625e-07, 0.012705802917480469]",
shape=box,
style=filled,
width=2.8889];
"Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)]" -> "TensorType(float64, scalar) id=16" [label="TensorType(float64, scalar)",
lp="395,62",
pos="e,317,36.084 317,87.597 317,75.746 317,59.817 317,46.292"];
}
digraph G {
graph [bb="0,0,1055,672"];
node [label="\N"];
"Shape_i{1}" [fillcolor=cyan,
height=0.5,
pos="427,458",
shape=ellipse,
style=filled,
type=colored,
width=1.4763];
MakeVector [height=0.5,
pos="250,370",
shape=ellipse,
width=1.5402];
"Shape_i{1}" -> MakeVector [label="1 TensorType(int64, scalar)",
lp="433,414",
pos="e,279.57,385.37 397.77,442.8 368.01,428.34 321.7,405.84 288.87,389.89"];
"TensorType(float64, matrix)" [fillcolor=limegreen,
height=0.5,
pos="464,566",
shape=box,
style=filled,
width=2.4444];
"TensorType(float64, matrix)" -> "Shape_i{1}" [label="TensorType(float64, matrix)",
lp="504,512",
pos="e,422.08,476.18 437.6,547.65 432.1,542.66 427.05,536.72 424,530 417.83,516.39 418.16,499.77 420.24,486.03"];
"Dot22 id=6" [height=0.5,
pos="716,370",
shape=ellipse,
width=1.4534];
"TensorType(float64, matrix)" -> "Dot22 id=6" [label="1 TensorType(float64, matrix)",
lp="692,458",
pos="e,682.81,383.97 552,551.07 563.93,546.03 575.1,539.2 584,530 612.7,500.32 582.73,473.4 607,440 623.72,416.99 650.68,399.83 673.6,\
388.37"];
"Shape_i{0}" [fillcolor=cyan,
height=0.5,
pos="229,512",
shape=ellipse,
style=filled,
type=colored,
width=1.4763];
"Shape_i{0}" -> MakeVector [label="0 TensorType(int64, scalar)",
lp="287,458",
pos="e,236.58,387.68 217.14,494.33 213.83,488.77 210.69,482.36 209,476 204.89,460.54 204.76,455.43 209,440 213.31,424.32 222.2,408.6 \
230.61,396.13"];
"name=X TensorType(float64, matrix)" [fillcolor=limegreen,
height=0.5,
pos="334,654",
shape=box,
style=filled,
width=3.1667];
"name=X TensorType(float64, matrix)" -> "Shape_i{0}" [label="TensorType(float64, matrix)",
lp="344,610",
pos="e,230.68,530.25 286.71,635.8 278.29,631.06 270.23,625.19 264,618 244.98,596.07 236.22,563.46 232.23,540.41"];
Dot22 [height=0.5,
pos="645,566",
shape=ellipse,
width=0.92774];
"name=X TensorType(float64, matrix)" -> Dot22 [label="0 TensorType(float64, matrix)",
lp="556,610",
pos="e,614.28,573.23 377.6,635.91 410.55,623.16 453.06,607.1 471,602 510.24,590.85 521.01,592.05 561,584 575.06,581.17 590.45,578.06 \
604.17,575.28"];
"Elemwise{ScalarSigmoid}[(0, 0)]" [fillcolor="#FFAABB",
height=0.5,
pos="921,458",
shape=ellipse,
style=filled,
type=colored,
width=3.7297];
Dot22 -> "Elemwise{ScalarSigmoid}[(0, 0)]" [color=red,
label="TensorType(float64, matrix)",
lp="903,512",
pos="e,879,475.13 671.46,554.84 716.96,537.36 810.78,501.33 869.53,478.77"];
"TensorType(float64, matrix) id=4" [fillcolor=limegreen,
height=0.5,
pos="645,654",
shape=box,
style=filled,
width=2.8403];
"TensorType(float64, matrix) id=4" -> Dot22 [label="1 TensorType(float64, matrix)",
lp="730,610",
pos="e,645,584.08 645,635.6 645,623.75 645,607.82 645,594.29"];
"Elemwise{Cast{float64}}" [fillcolor="#FFAABB",
height=0.5,
pos="250,282",
shape=ellipse,
style=filled,
type=colored,
width=2.9207];
MakeVector -> "Elemwise{Cast{float64}}" [label="TensorType(int64, vector)",
lp="324,326",
pos="e,250,300.08 250,351.6 250,339.75 250,323.82 250,310.29"];
"Elemwise{ScalarSigmoid}[(0, 0)]" -> "Dot22 id=6" [label="0 TensorType(float64, matrix)",
lp="921,414",
pos="e,747.84,384.36 881.96,440.62 846.23,425.63 793.55,403.53 757.17,388.27"];
"Subtensor{int64}" [fillcolor="#FFAAFF",
height=0.5,
pos="317,194",
shape=ellipse,
style=filled,
type=colored,
width=2.0659];
"Elemwise{Cast{float64}}" -> "Subtensor{int64}" [color=dodgerblue,
label="0 TensorType(float64, vector)",
lp="374.5,238",
pos="e,303.79,211.96 263.24,264.01 273.06,251.41 286.6,234.03 297.63,219.86"];
"Subtensor{int64} id=8" [fillcolor="#FFAAFF",
height=0.5,
pos="103,194",
shape=ellipse,
style=filled,
type=colored,
width=2.5916];
"Elemwise{Cast{float64}}" -> "Subtensor{int64} id=8" [color=dodgerblue,
label="0 TensorType(float64, vector)",
lp="189.5,238",
pos="e,98.802,212.45 169.53,270.28 141.06,264.73 113.86,256.82 105,246 99.708,239.54 98.04,230.93 98.046,222.62"];
"Elemwise{ScalarSigmoid}[(0, 0)] id=9" [fillcolor="#FFAABB",
height=0.5,
pos="716,282",
shape=ellipse,
style=filled,
type=colored,
width=4.2783];
"Dot22 id=6" -> "Elemwise{ScalarSigmoid}[(0, 0)] id=9" [color=red,
label="TensorType(float64, matrix)",
lp="796,326",
pos="e,716,300.08 716,351.6 716,339.75 716,323.82 716,310.29"];
"Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)]" [fillcolor="#FFAABB",
height=0.5,
pos="317,106",
shape=ellipse,
style=filled,
type=colored,
width=4.8998];
"Subtensor{int64}" -> "Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)]" [label="2 TensorType(float64, scalar)",
lp="400.5,150",
pos="e,317,124.08 317,175.6 317,163.75 317,147.82 317,134.29"];
"val=1 int64" [fillcolor=limegreen,
height=0.5,
pos="487,282",
shape=box,
style=filled,
width=1.1181];
"val=1 int64" -> "Subtensor{int64}" [label="1 int64",
lp="496,238",
pos="e,378.35,204.25 483.08,263.92 479.63,252.68 473.47,238.52 463,230 451.1,220.31 419.25,212.13 388.33,206.12"];
"Subtensor{int64} id=8" -> "Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)]" [label="1 TensorType(float64, scalar)",
lp="229.5,150",
pos="e,201.3,119.69 113.23,175.69 120.84,164.35 132.26,150.16 146,142 159.92,133.73 175.47,127.39 191.37,122.53"];
"val=0 int64" [fillcolor=limegreen,
height=0.5,
pos="40,282",
shape=box,
style=filled,
width=1.1181];
"val=0 int64" -> "Subtensor{int64} id=8" [label="1 int64",
lp="61,238",
pos="e,61.928,210.23 36.259,263.76 34.929,253.28 35.008,240.04 41,230 44.243,224.57 48.669,219.91 53.673,215.93"];
"TensorType(float64, matrix) id=14" [fillcolor=dodgerblue,
height=0.5,
pos="539,194",
shape=box,
style=filled,
width=2.9444];
"Elemwise{ScalarSigmoid}[(0, 0)] id=9" -> "TensorType(float64, matrix) id=14" [label="TensorType(float64, matrix)",
lp="653,238",
pos="e,546.35,212.14 619.81,267.9 603.25,262.81 586.92,255.76 573,246 563.98,239.68 556.7,230.13 551.22,221"];
"Sum{acc_dtype=float64}" [height=0.5,
pos="766,194",
shape=ellipse,
width=2.8658];
"Elemwise{ScalarSigmoid}[(0, 0)] id=9" -> "Sum{acc_dtype=float64}" [label="TensorType(float64, matrix)",
lp="826,238",
pos="e,756.14,211.96 725.88,264.01 733.07,251.64 742.93,234.68 751.08,220.66"];
"Sum{acc_dtype=float64}" -> "Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)]" [color=red,
label="0 TensorType(float64, scalar)",
lp="665.5,150",
pos="e,396.23,122.17 699.26,180.22 621.48,165.32 492.61,140.64 406.28,124.1"];
"TensorType(float64, scalar) id=16" [fillcolor=dodgerblue,
height=0.5,
pos="317,18",
shape=box,
style=filled,
width=2.8889];
"Elemwise{Composite{((i0 / i1) / i2)}}[(0, 0)]" -> "TensorType(float64, scalar) id=16" [label="TensorType(float64, scalar)",
lp="395,62",
pos="e,317,36.084 317,87.597 317,75.746 317,59.817 317,46.292"];
}
...@@ -17,6 +17,7 @@ from theano.compile import Function ...@@ -17,6 +17,7 @@ from theano.compile import Function
_logger = logging.getLogger("theano.printing") _logger = logging.getLogger("theano.printing")
class GraphFormatter(object): class GraphFormatter(object):
def __init__(self): def __init__(self):
...@@ -304,27 +305,13 @@ class GraphFormatter(object): ...@@ -304,27 +305,13 @@ class GraphFormatter(object):
if node in self.apply_name_cache: if node in self.apply_name_cache:
return self.apply_name_cache[node] return self.apply_name_cache[node]
prof = None prof = ''
if mode: if mode:
time = mode.profile_stats[fct].apply_time.get(node, 0) profile = mode.profile_stats[fct]
# second, % total time in profiler, %fct time in profiler if profile:
if mode.local_time == 0:
pt = 0
else:
pt = time * 100 / mode.local_time
if mode.profile_stats[fct].fct_callcount == 0:
pf = 0
else:
pf = time * 100 / mode.profile_stats[fct].fct_call_time
prof = [time, pt, pf]
elif profile:
time = profile.apply_time.get(node, 0) time = profile.apply_time.get(node, 0)
# second, %fct time in profiler call_time = profile.fct_call_time
if profile.fct_callcount == 0: prof = str([time, call_time])
pf = 0
else:
pf = time * 100 / profile.fct_call_time
prof = [time, None, pf]
applystr = str(node.op).replace(':', '_') applystr = str(node.op).replace(':', '_')
if (applystr in self.all_strings) or self.with_ids: if (applystr in self.all_strings) or self.with_ids:
...@@ -343,10 +330,6 @@ class GraphFormatter(object): ...@@ -343,10 +330,6 @@ class GraphFormatter(object):
applystr = (applystr[:self.max_label_size - 3 - len(suffix)] + applystr = (applystr[:self.max_label_size - 3 - len(suffix)] +
'...' + '...' +
suffix) suffix)
if prof is not None:
prof = str(prof)
else:
prof = ''
self.all_strings.add(applystr) self.all_strings.add(applystr)
self.apply_name_cache[node] = (applystr, prof) self.apply_name_cache[node] = (applystr, prof)
......
.d3-context-menu {
position: absolute;
display: none;
background-color: #f2f2f2;
border-radius: 4px;
font-family: Arial, sans-serif;
font-size: 14px;
min-width: 50px;
border: 1px solid #d4d4d4;
z-index:1200;
}
.d3-context-menu ul {
list-style-type: none;
margin: 4px 0px;
padding: 0px;
cursor: default;
}
.d3-context-menu ul li {
padding: 4px 16px;
}
.d3-context-menu ul li:hover {
background-color: #4677f8;
color: #fefefe;
}
d3.contextMenu = function (menu, openCallback) {
// create the div element that will hold the context menu
d3.selectAll('.d3-context-menu').data([1])
.enter()
.append('div')
.attr('class', 'd3-context-menu');
// close menu
d3.select('body').on('click.d3-context-menu', function() {
d3.select('.d3-context-menu').style('display', 'none');
});
// this gets executed when a contextmenu event occurs
return function(data, index) {
var elm = this;
d3.selectAll('.d3-context-menu').html('');
var list = d3.selectAll('.d3-context-menu').append('ul');
list.selectAll('li').data(menu).enter()
.append('li')
.html(function(d) {
return d.title;
})
.on('click', function(d, i) {
d.action(elm, data, index);
d3.select('.d3-context-menu').style('display', 'none');
});
// the openCallback allows an action to fire before the menu is displayed
// an example usage would be closing a tooltip
if (openCallback) openCallback(data, index);
// display context menu
d3.select('.d3-context-menu')
.style('left', (d3.event.pageX - 2) + 'px')
.style('top', (d3.event.pageY - 2) + 'px')
.style('display', 'block');
d3.event.preventDefault();
};
};
This source diff could not be displayed because it is too large. You can view the blob instead.
差异被折叠。
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论