1 % -*- mode: latex; mode: reftex; mode: auto-fill; mode: flyspell; -*-
3 % Written by Francois Fleuret <francois@fleuret.org>
5 \documentclass[c,8pt]{beamer}
8 \newcommand{\transpose}{^{\top}}
9 \def\softmax{\operatorname{softmax}}
11 \setbeamertemplate{navigation symbols}{}
15 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
17 \begin{frame}[fragile]
19 Given a query sequence $Q$, a key sequence $K$, and a value sequence
20 $V$, compute an attention matrix $A$ by matching $Q$s to $K$s, and
21 weight $V$ with it to get $Y$.
27 % A_{i,j} = \softmax \left( \frac{Q_i \cdot K_j}{\sqrt{d}} \right)
28 A_i = \softmax \left( \frac{Q_i \, K\transpose}{\sqrt{d}} \right)
40 \makebox[\textwidth][c]{
43 \node[xscale=0.5,yslant=0.5] (V) at (-2, 2.35) {
45 \draw[fill=green!20] (0, 0) rectangle (4, 1.4);
46 \uncover<3,5>{\draw[fill=yellow] (0, 0) rectangle (4, 1.4);}
47 \foreach \x in { 0.2, 0.4, ..., 3.8 } \draw (\x, 0) -- ++(0, 1.4);
48 %% \foreach \y in { 0.0, 0.2, ..., 1.4 } \draw (0, \y) -- ++(4, 0);
52 \node[yscale=0.5,xslant=0.5] (A) at (0.5, 1.6) {
54 \draw (0, 0) rectangle ++(3, 4);
55 %% \uncover<4->{\draw[fill=green!20] (0, 0) rectangle ++(0.2, 4);}
56 %% \uncover<6->{\draw[fill=green!20] (0.2, 0) rectangle ++(0.2, 4);}
61 \node[xscale=0.5,yslant=0.5] (a1) at (-0.9, 2.1) {
63 \draw[draw=none] (0, 0) rectangle (4, 1);
65 0.00/0.03, 0.20/0.04, 0.40/0.07, 0.60/0.35, 0.80/0.52,
66 1.00/1.00, 1.20/0.82, 1.40/0.25, 1.60/0.08, 1.80/0.03,
67 2.00/0.15, 2.20/0.24, 2.40/0.70, 2.60/0.05, 2.80/0.03,
68 3.00/0.03, 3.20/0.03, 3.40/0.00, 3.60/0.03, 3.80/0.00
70 \uncover<2>{\draw[black,fill=orange] (\x, 0) rectangle ++(0.2, \y);}
71 \uncover<3>{\draw[black,fill=yellow] (\x, 0) rectangle ++(0.2, \y);}
78 \node[xscale=0.5,yslant=0.5] (a2) at (-0.7, 2.1) {
80 \draw[draw=none] (0, 0) rectangle (4, 1);
82 0.00/0.03, 0.20/0.04, 0.40/0.07, 0.60/0.03, 0.80/0.03,
83 1.00/0.05, 1.20/0.02, 1.40/0.08, 1.60/0.35, 1.80/0.85,
84 2.00/0.05, 2.20/0.04, 2.40/0.03, 2.60/0.05, 2.80/0.03,
85 3.00/0.03, 3.20/0.03, 3.40/0.00, 3.60/0.03, 3.80/0.00
87 \uncover<4>{\draw[black,fill=orange] (\x, 0) rectangle ++(0.2, \y);}
88 \uncover<5>{\draw[black,fill=yellow] (\x, 0) rectangle ++(0.2, \y);}
94 \node (Q) at (-0.5, -0.05) {
96 \draw[fill=green!20] (0, 0) rectangle (3, 1.0);
97 \foreach \x in { 0.2, 0.4, ..., 2.8 } \draw (\x, 0) -- ++(0, 1.0);
98 \uncover<2>{\draw[fill=yellow] (0.0, 0) rectangle ++(0.2, 1);}
99 \uncover<4>{\draw[fill=yellow] (0.2, 0) rectangle ++(0.2, 1);}
100 %% \foreach \y in { 0.0, 0.2, ..., 1.0 } \draw (0, \y) -- ++(3, 0);
104 \node (Y) at (1.5, 3.45) {
106 \uncover<3>{\draw[fill=orange] (0.0, 0) rectangle ++(0.2, 1.4);}
107 \uncover<4->{\draw[fill=green!20] (0.0, 0) rectangle ++(0.2, 1.4);}
108 \uncover<6->{\draw[fill=green!20] (0.0, 0) rectangle ++(3, 1.4);}
109 \uncover<5>{\draw[fill=orange] (0.2, 0) rectangle ++(0.2, 1.4);}
110 \draw (0, 0) rectangle (3, 1.4);
111 \foreach \x in { 0.2, 0.4, ..., 2.8 } \draw (\x, 0) -- ++(0, 1.4);
112 %% \foreach \y in { 0.0, 0.2, ..., 1.4 } \draw (0, \y) -- ++(3, 0);
116 \node[xscale=0.5,yslant=0.5] (K) at (3, 1.1) {
118 \draw[fill=green!20] (0, 0) rectangle (4, 1);
119 \uncover<2,4>{\draw[fill=yellow] (0, 0) rectangle (4, 1);}
120 \foreach \x in { 0.2, 0.4, ..., 3.8 } \draw (\x, 0) -- ++(0, 1);
121 %% \foreach \y in { 0.0, 0.2, ..., 1.0 } \draw (0, \y) -- ++(4, 0);
125 \node[left of=V,xshift=0.5cm,yshift=0.7cm] (Vl) {$V$};
126 \node[left of=Q,xshift=-0.8cm] (Ql) {$Q$};
127 \node (Al) at (A) {$A$};
128 \node[right of=K,xshift=-0.6cm,yshift=-0.6cm] (Kl) {$K$};
129 \node[right of=Y,xshift=0.8cm] (Yl) {$Y$};
131 % \uncover<1>{\draw[<->] (2, 0) -- ++ (0, 1) node[midway,right]{$d$};}
138 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
140 \begin{frame}[fragile]
142 A standard attention layer takes as input two sequences $X$ and $X'$
149 Y & = \underbrace{\softmax_{row} \left( \frac{Q K\transpose}{\sqrt{d}} \right)}_{A} V
152 When $X = X'$, this is \textbf{self attention}, otherwise \textbf{cross
159 Several such processes can be combined in which case $Y$ is the
160 concatenation of the separate results. This is \textbf{multi-head