提交 d2b2e4fe authored 作者: Frederic's avatar Frederic

update nextml2015 pres

上级 e543f6ef
...@@ -145,6 +145,7 @@ We use the IMDB dataset. ...@@ -145,6 +145,7 @@ We use the IMDB dataset.
\end{frame} \end{frame}
\begin{frame}{Description} \begin{frame}{Description}
TODO, merge with next slides
\begin{itemize} \begin{itemize}
\item Mathematical symbolic expression compiler \item Mathematical symbolic expression compiler
\item Expressions mimic NumPy's syntax and semantics \item Expressions mimic NumPy's syntax and semantics
...@@ -386,7 +387,7 @@ from theano import tensor as T ...@@ -386,7 +387,7 @@ from theano import tensor as T
tensor3 = T.TensorType( tensor3 = T.TensorType(
broadcastable=(False, False, False), broadcastable=(False, False, False),
dtype='float32') dtype='float32')
x = tensor3() x = T.tensor3()
\end{lstlisting} \end{lstlisting}
\end{frame} \end{frame}
...@@ -430,11 +431,31 @@ b = a.T ...@@ -430,11 +431,31 @@ b = a.T
# Same as b # Same as b
c = a.dimshuffle((0, 1)) c = a.dimshuffle((0, 1))
# Adding to larger tensor # Adding to larger tensor
d = a.dimshuffle((0, 1, ``x'')) d = a.dimshuffle((0, 1, 'x'))
e = a + d e = a + d
\end{lstlisting} \end{lstlisting}
\end{frame} \end{frame}
\begin{frame}
\frametitle{Indexing}
As NumPy!
\begin{itemize}
\item This mean all slices, index selection return view:
\begin{itemize}
\item a\_tensor[startstopstep, N] # return a view
\item a\_tensor[-1] # reverse the vector, return a view
\end{itemize}
\item Advanced indexing as NumPy:
\begin{itemize}
\item This mean it can use vector/tensor of indices to use.
\item this create a copy
\item This can be mixed with slicing/index selection.
\item GPU Support only this case: a\_tensor[an\_index\_vector]
\end{itemize}
\end{itemize}
\end{frame}
\subsection{Compiling/Running} \subsection{Compiling/Running}
\begin{frame}{Compiling and running expression} \begin{frame}{Compiling and running expression}
\begin{itemize} \begin{itemize}
...@@ -493,11 +514,11 @@ array(3.0) ...@@ -493,11 +514,11 @@ array(3.0)
>>> updates[x] = x + 1 >>> updates[x] = x + 1
>>> f = function([], updates=updates) >>> f = function([], updates=updates)
>>> f() >>> f()
>>> x.get\_value() >>> x.get_value()
1.0 1.0
>>> x.set\_value(100.) >>> x.set_value(100.)
>>> f() >>> f()
>>> x.get\_value() >>> x.get_value()
101.0 101.0
\end{lstlisting} \end{lstlisting}
\end{frame} \end{frame}
...@@ -539,6 +560,7 @@ instead of compiled C code. Runs slow. ...@@ -539,6 +560,7 @@ instead of compiled C code. Runs slow.
\item DEBUG\_MODE: Adds lots of checks. \item DEBUG\_MODE: Adds lots of checks.
Raises error messages in situations other Raises error messages in situations other
modes regard as fine. modes regard as fine.
\item optimizer=fast\_compile: as mode=FAST\_COMPILE, but with C code.
\end{itemize} \end{itemize}
\end{frame} \end{frame}
...@@ -558,10 +580,12 @@ modes regard as fine. ...@@ -558,10 +580,12 @@ modes regard as fine.
\begin{frame}{Modifying expressions} \begin{frame}{Modifying expressions}
\begin{itemize} \begin{itemize}
\item The grad method \item The grad method
\item Variable nodes \item Others
\item Types
\item Ops % \item Variable nodes
\item Apply nodes % \item Types
% \item Ops
% \item Apply nodes
\end{itemize} \end{itemize}
\end{frame} \end{frame}
...@@ -576,56 +600,66 @@ modes regard as fine. ...@@ -576,56 +600,66 @@ modes regard as fine.
>>> x = T.scalar('x') >>> x = T.scalar('x')
>>> y = 2. * x >>> y = 2. * x
>>> g = T.grad(y, x) >>> g = T.grad(y, x)
>>> from theano.printing import min_informative_str >>> theano.printing.debugprint(g)
>>> print min_informative_str(g) Elemwise{mul} [@A] ''
A. Elemwise{mul} |Elemwise{second,no_inplace} [@B] ''
B. Elemwise{second,no_inplace} | |Elemwise{mul,no_inplace} [@C] ''
C. Elemwise{mul,no_inplace} | | |TensorConstant{2.0} [@D]
D. TensorConstant{2.0} | | |x [@E]
E. x | |TensorConstant{1.0} [@F]
F. TensorConstant{1.0} |TensorConstant{2.0} [@D]
<D>
\end{lstlisting} \end{lstlisting}
\end{frame} \end{frame}
\begin{frame}{Theano Variables} %% \begin{frame}{Theano Variables}
\begin{itemize} %% \begin{itemize}
\item A Variable is a theano expression %% \item A Variable is a theano expression
\item Can come from T.scalar, T.matrix, etc. %% \item Can come from T.scalar, T.matrix, etc.
\item Can come from doing operations on other Variables %% \item Can come from doing operations on other Variables
\item Every Variable has a type field, identifying its Type \newline %% \item Every Variable has a type field, identifying its Type \newline
e.g. TensorType((True, False), ‘float32’) %% e.g. TensorType((True, False), ‘float32’)
\item Variables can be thought of as nodes in a graph %% \item Variables can be thought of as nodes in a graph
\end{itemize} %% \end{itemize}
\end{frame} %% \end{frame}
\begin{frame}{Ops} %% \begin{frame}{Ops}
\begin{itemize} %% \begin{itemize}
\item An Op is any class that describes a %% \item An Op is any class that describes a
mathematical function of some variables %% mathematical function of some variables
\item Can call the op on some variables to get a %% \item Can call the op on some variables to get a
new variable or variables %% new variable or variables
\item An Op class can supply other forms of %% \item An Op class can supply other forms of
information about the function, such as its %% information about the function, such as its
derivatives %% derivatives
\end{itemize} %% \end{itemize}
\end{frame} %% \end{frame}
\begin{frame}{Apply nodes} %% \begin{frame}{Apply nodes}
%% \begin{itemize}
%% \item The Apply class is a specific instance of an application of an Op
%% \item Notable fields:
%% \begin{itemize}
%% \item op: The Op to be applied
%% \item inputs: The Variables to be used as input
%% \item outputs: The Variables produced
%% \end{itemize}
%% \item Variable.owner identifies the Apply that created the variable
%% \item Variable and Apply instances are nodes and owner/
%% inputs/outputs identify edges in a Theano graph
%% \end{itemize}
%% \end{frame}
\begin{frame}{Others}
\begin{itemize} \begin{itemize}
\item The Apply class is a specific instance of an application of an Op \item R\_op, L\_op for hessian free
\item Notable fields: \item hessian
\begin{itemize} \item jacobian
\item op: The Op to be applied \item you can navigate the graph if you need
\item inputs: The Variables to be used as input (go from the result of computation to its input, recursively)
\item outputs: The Variables produced
\end{itemize}
\item Variable.owner identifies the Apply that created the variable
\item Variable and Apply instances are nodes and owner/
inputs/outputs identify edges in a Theano graph
\end{itemize} \end{itemize}
\end{frame} \end{frame}
%TODO place somewhere a computation graph example.
\subsection{Debugging} \subsection{Debugging}
\begin{frame}{Debugging} \begin{frame}{Debugging}
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论