[pypy-svn] extradoc extradoc: - improve fixed-width font

cfbolz commits-noreply at bitbucket.org
Thu Mar 24 15:19:38 CET 2011


Author: Carl Friedrich Bolz <cfbolz at gmx.de>
Branch: extradoc
Changeset: r3393:6b0d386fb06e
Date: 2011-03-24 14:55 +0100
http://bitbucket.org/pypy/extradoc/changeset/6b0d386fb06e/

Log:	- improve fixed-width font
	- turn some of the code into figures

diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex
--- a/talk/icooolps2011/paper.tex
+++ b/talk/icooolps2011/paper.tex
@@ -6,91 +6,10 @@
 \usepackage{color}
 \usepackage{ulem}
 \usepackage{xspace}
+\usepackage[scaled=0.8]{beramono}
 \usepackage[utf8]{inputenc}
 
-\makeatletter
-\def\PY at reset{\let\PY at it=\relax \let\PY at bf=\relax%
-    \let\PY at ul=\relax \let\PY at tc=\relax%
-    \let\PY at bc=\relax \let\PY at ff=\relax}
-\def\PY at tok#1{\csname PY at tok@#1\endcsname}
-\def\PY at toks#1+{\ifx\relax#1\empty\else%
-    \PY at tok{#1}\expandafter\PY at toks\fi}
-\def\PY at do#1{\PY at bc{\PY at tc{\PY at ul{%
-    \PY at it{\PY at bf{\PY at ff{#1}}}}}}}
-\def\PY#1#2{\PY at reset\PY at toks#1+\relax+\PY at do{#2}}
-
-\def\PY at tok@gd{\def\PY at bc##1{\fcolorbox[rgb]{0.80,0.00,0.00}{1.00,0.80,0.80}{##1}}}
-\def\PY at tok@gu{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.00}{##1}}}
-\def\PY at tok@gt{\def\PY at tc##1{\textcolor[rgb]{0.60,0.80,0.40}{##1}}}
-\def\PY at tok@gs{\let\PY at bf=\textbf}
-\def\PY at tok@gr{\def\PY at tc##1{\textcolor[rgb]{1.00,0.00,0.00}{##1}}}
-\def\PY at tok@cm{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}}
-\def\PY at tok@vg{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}}
-\def\PY at tok@m{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}}
-\def\PY at tok@mh{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}}
-\def\PY at tok@cs{\let\PY at bf=\textbf\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}}
-\def\PY at tok@ge{\let\PY at it=\textit}
-\def\PY at tok@vc{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}}
-\def\PY at tok@il{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}}
-\def\PY at tok@go{\def\PY at tc##1{\textcolor[rgb]{0.67,0.67,0.67}{##1}}}
-\def\PY at tok@cp{\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,0.60}{##1}}}
-\def\PY at tok@gi{\def\PY at bc##1{\fcolorbox[rgb]{0.00,0.80,0.00}{0.80,1.00,0.80}{##1}}}
-\def\PY at tok@gh{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.00}{##1}}}
-\def\PY at tok@ni{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.60,0.60,0.60}{##1}}}
-\def\PY at tok@nl{\def\PY at tc##1{\textcolor[rgb]{0.60,0.60,1.00}{##1}}}
-\def\PY at tok@nn{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.80,1.00}{##1}}}
-\def\PY at tok@no{\def\PY at tc##1{\textcolor[rgb]{0.20,0.40,0.00}{##1}}}
-\def\PY at tok@na{\def\PY at tc##1{\textcolor[rgb]{0.20,0.00,0.60}{##1}}}
-\def\PY at tok@nb{\def\PY at tc##1{\textcolor[rgb]{0.20,0.40,0.40}{##1}}}
-\def\PY at tok@nc{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.67,0.53}{##1}}}
-\def\PY at tok@nd{\def\PY at tc##1{\textcolor[rgb]{0.60,0.60,1.00}{##1}}}
-\def\PY at tok@ne{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.80,0.00,0.00}{##1}}}
-\def\PY at tok@nf{\def\PY at tc##1{\textcolor[rgb]{0.80,0.00,1.00}{##1}}}
-\def\PY at tok@si{\def\PY at tc##1{\textcolor[rgb]{0.67,0.00,0.00}{##1}}}
-\def\PY at tok@s2{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}}
-\def\PY at tok@vi{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}}
-\def\PY at tok@nt{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.20,0.00,0.60}{##1}}}
-\def\PY at tok@nv{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}}
-\def\PY at tok@s1{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}}
-\def\PY at tok@gp{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.00,0.60}{##1}}}
-\def\PY at tok@sh{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}}
-\def\PY at tok@ow{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.00,0.00}{##1}}}
-\def\PY at tok@sx{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}}
-\def\PY at tok@bp{\def\PY at tc##1{\textcolor[rgb]{0.20,0.40,0.40}{##1}}}
-\def\PY at tok@c1{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}}
-\def\PY at tok@kc{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}}
-\def\PY at tok@c{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}}
-\def\PY at tok@mf{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}}
-\def\PY at tok@err{\def\PY at tc##1{\textcolor[rgb]{0.67,0.00,0.00}{##1}}\def\PY at bc##1{\colorbox[rgb]{1.00,0.67,0.67}{##1}}}
-\def\PY at tok@kd{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}}
-\def\PY at tok@ss{\def\PY at tc##1{\textcolor[rgb]{1.00,0.80,0.20}{##1}}}
-\def\PY at tok@sr{\def\PY at tc##1{\textcolor[rgb]{0.20,0.67,0.67}{##1}}}
-\def\PY at tok@mo{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}}
-\def\PY at tok@mi{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}}
-\def\PY at tok@kn{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}}
-\def\PY at tok@o{\def\PY at tc##1{\textcolor[rgb]{0.33,0.33,0.33}{##1}}}
-\def\PY at tok@kr{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}}
-\def\PY at tok@s{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}}
-\def\PY at tok@kp{\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}}
-\def\PY at tok@w{\def\PY at tc##1{\textcolor[rgb]{0.73,0.73,0.73}{##1}}}
-\def\PY at tok@kt{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.47,0.53}{##1}}}
-\def\PY at tok@sc{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}}
-\def\PY at tok@sb{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}}
-\def\PY at tok@k{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}}
-\def\PY at tok@se{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}}
-\def\PY at tok@sd{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}}
-
-\def\PYZbs{\char`\\}
-\def\PYZus{\char`\_}
-\def\PYZob{\char`\{}
-\def\PYZcb{\char`\}}
-\def\PYZca{\char`\^}
-% for compatibility with earlier versions
-\def\PYZat{@}
-\def\PYZlb{[}
-\def\PYZrb{]}
-\makeatother
-
+\input{code/style.tex}
 
 \ifthenelse{\isundefined{\hypersetup}}{
   \usepackage[colorlinks=true,linkcolor=blue,urlcolor=blue]{hyperref}
@@ -100,7 +19,7 @@
 }
 
 \newboolean{showcomments}
-\setboolean{showcomments}{false}
+\setboolean{showcomments}{true}
 \ifthenelse{\boolean{showcomments}}
   {\newcommand{\nb}[2]{
     \fbox{\bfseries\sffamily\scriptsize#1}
@@ -167,31 +86,32 @@
 \end{abstract}
 
 
+%___________________________________________________________________________
 \section{Introduction}
 
 
+%___________________________________________________________________________
 \section{The PyPy Project}
 \label{sect:pypy}
 
 XXX
+\cite{armin_rigo_pypys_2006}
 
 
+%___________________________________________________________________________
 \section{Tracing JIT Compilers}
 \label{sect:tracing}
 
 XXX
 
+%___________________________________________________________________________
 \section{Controlling The Extent of Tracing}
 
-The question I was asked most often during my recent \href{http://morepypy.blogspot.com/2011/03/us-trip-report-popl-microsoft-ibm.html}{US trip} was how exactly
+XXX how exactly
 the hints work that interpreter authors can use to improve the execution speed
-of the programs running on their interpreters. Since those hints are not really
-documented all that well, I decided to write blog posts about them. This is the
-first one.
+of the programs running on their interpreters?
 
 
-%___________________________________________________________________________
-
 \subsection{Background}
 
 First, let's recap some basics: PyPy's approach to implementing dynamic
@@ -201,7 +121,7 @@
 large number of generated C functions and some data. Similarly, the user
 program consists of functions in the language the interpreter executes.
 
-As was explained in a \href{http://morepypy.blogspot.com/2009/03/applying-tracing-jit-to-interpreter.html}{blog post} and a \href{http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf}{paper} two years ago, PyPy's JIT is a
+XXX As was explained in a \href{http://morepypy.blogspot.com/2009/03/applying-tracing-jit-to-interpreter.html}{blog post} and a \href{http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf}{paper} two years ago, PyPy's JIT is a
 meta-tracer. Since we want to re-use our tracer for a variety of languages, we
 don't trace the execution of the user program, but instead trace the execution
 of the \emph{interpreter} that is running the program. This means that the traces
@@ -230,8 +150,6 @@
 in the user program.
 
 
-%___________________________________________________________________________
-
 \subsection{How Far Should Tracing Go}
 
 When the tracer encounters a function call at the interpreter level, e.g. the
@@ -282,8 +200,6 @@
 meaning that the trace with unrolling is not run to completion in most cases.
 
 
-%___________________________________________________________________________
-
 \subsection{Influencing the Default Behaviour}
 
 Sometimes the default behaviour is not actually what is wanted. This is
@@ -304,12 +220,10 @@
 
 If the interpreter author finds false negatives or false positives, she can fix
 that by applying a hint to the tracer. These hints take the form of function
-decorators (which both live in the \texttt{pypy.rlib.jit} module). In the next two
-subsections I will describe these two function decorators and their use.
+decorators (which both live in the \Verb|pypy.rlib.jit| module). In the next two
+subsections we describe these two function decorators and their use.
 
 
-%___________________________________________________________________________
-
 \subsubsection{Unrolling Functions With Loops}
 
 The first decorator, used to fix false negatives, is the \texttt{unroll\_safe}
@@ -345,15 +259,13 @@
 with the \texttt{unroll\_safe} decorator.
 
 
-%___________________________________________________________________________
-
 \subsubsection{Preventing the Tracing of Functions}
 
 The second decorator \texttt{dont\_look\_inside} is used to fix false positives. It
 tells the JIT to never trace into the decorated function and just always produce
 a residual call instead. This decorator is in many ways less important than the
-unrolling one (except for a special situation that I will describe in a
-follow-up post). It is used if tracing into a function is not expected to yield
+unrolling one (except for a special situation that is described in
+Section XXX). It is used if tracing into a function is not expected to yield
 any speed benefits, because the optimizer will not be able to improve it much.
 This is often the case if the called helper function does not contain any
 ``dynamic'' behaviour. In such a situation it is better to just leave the function
@@ -365,33 +277,29 @@
 \texttt{dont\_look\_inside}.
 
 
-%___________________________________________________________________________
-
 \subsection{Conclusion}
 
-In this post we discussed two hints that can be used to control precisely which
+In this section we discussed two hints that can be used to control precisely which
 parts of the interpreter should be meta-traced. If these hints are used
 carefully, this can go a long way to making the interpreter produce traces that
 contain exactly the interesting part of the execution, and will contain calls to
 the functions that can not be optimized by tracing techniques.
 
-In the next part of this series I will discuss a different set of hints that can
+In the next section we discuss a different set of hints that can
 be used to strongly optimize traces.
 
+%___________________________________________________________________________
 
-% Document title
-\section{Controlling the Tracing of an Interpreter With Hints, Part 2: Controlling Optimization}
+\section{Controlling Optimization}
 
-This is part 2 of a series on how to speed up an interpreter written with PyPy
-by adding JIT hints to the interpreter. Part 1 described how to \href{http://morepypy.blogspot.com/2011/03/controlling-tracing-of-interpreter-with.html}{control the
-extent of tracing}. In this post I will describe how to add hints that
+The last section described how to control the
+extent of tracing. In this section we will describe how to add hints that
 influence the optimizer.  If applied correctly these techniques can give
 really big speedups by pre-computing parts of what happens at runtime. On the other
 hand, if applied incorrectly they might lead to code bloat, thus making the
 resulting program actually slower.
 
 
-%___________________________________________________________________________
 
 \subsection{Background}
 
@@ -402,37 +310,27 @@
 JIT because it only has to deal with linear traces.  Among the techniques:
 %
 \begin{itemize}
-
-\item \href{http://en.wikipedia.org/wiki/Constant_folding}{constant folding}
-
-\item \href{http://en.wikipedia.org/wiki/Common_subexpression_elimination}{common subexpression elimination}
-
-\item allocation removal, as described in the paper that I recently \href{http://morepypy.blogspot.com/2011/03/us-trip-report-popl-microsoft-ibm.html}{presented at
-PEPM}
-
-\item store/load propagation
-
-\item \href{http://morepypy.blogspot.com/2011/01/loop-invariant-code-motion.html}{loop invariant code motion}
-
+    \item constant folding
+    \item common subexpression elimination
+    \item allocation removal \cite{bolz_allocation_2011}
+    \item store/load propagation
+    \item loop invariant code motion
 \end{itemize}
 
 In some places it turns out that if the interpreter author rewrites some parts
 of the interpreter with these optimizations in mind the traces that are produced
 by the optimizer can be vastly improved.
 
-In this post I will describe two hints that allow the interpreter author to
+In this section we describe two hints that allow the interpreter author to
 increase the optimization opportunities for constant folding. For constant
 folding to work, two conditions need
 to be met:
 %
 \begin{itemize}
-
-\item the arguments of an operation actually need to all be constant,
-i.e. statically known by the optimizer
-
-\item the operation needs to be \emph{pure}, i.e. always yield the same result given
-the same arguments.
-
+    \item the arguments of an operation actually need to all be constant,
+    i.e. statically known by the optimizer
+    \item the operation needs to be \emph{pure}, i.e. always yield the same result given
+    the same arguments.
 \end{itemize}
 
 The PyPy JIT generator automatically detects the majority of these conditions.
@@ -444,8 +342,6 @@
 RPython source of the interpreter. Normal Python users will never see them.
 
 
-%___________________________________________________________________________
-
 \subsection{Where Do All the Constants Come From}
 
 It is worth clarifying what is a ``constant'' in this context.  A variable of
@@ -579,12 +475,10 @@
 program. An example would be the types of variables in a user function. Even
 though in principle the argument to a Python function could be any Python type,
 in practice the argument types tend to not vary often. Therefore it is possible to
-promote the types. In the next blog post I will give a complete example of how
+promote the types. The next section will present a complete example of how
 this works.
 
 
-%___________________________________________________________________________
-
 \subsection{Declaring New Pure Operations}
 
 In the last section we saw a way to turn arbitrary variables into constants. All
@@ -683,8 +577,6 @@
 annotation.
 
 
-%___________________________________________________________________________
-
 \subsubsection{Observably Pure Functions}
 
 Why can't we simply write an analysis to find out that the \texttt{x} fields of the
@@ -700,7 +592,6 @@
 of this function needs to be annotated.
 
 
-%___________________________________________________________________________
 
 \subsubsection{Immutable Fields}
 
@@ -711,23 +602,21 @@
 to using getters and annotating them with \texttt{purefunction}.
 
 
-%___________________________________________________________________________
 
 \subsection{Conclusion}
 
-In this blog post I explained two more hints that can be used in the source code
+In this section we presented two more hints that can be used in the source code
 of the interpreter. They are used to influence what the optimizer does with the
-trace. I realize the examples given here are a bit too small, in the next
-installment I will give a worked-out example that puts all the pieces together.
+trace. The examples given here are a bit too small, the next
+section gives a worked-out example that puts all the pieces together.
 
-\section{Controlling the Tracing of an Interpreter With Hints, Part 3: Putting Things Together}
+%___________________________________________________________________________
 
-This is part 3 of the series on how to speed up an interpreter written with
-PyPy by adding JIT hints to the interpreter. Part 1 described how to \href{http://morepypy.blogspot.com/2011/03/controlling-tracing-of-interpreter-with.html}{control
-the extent of tracing}. Part 2 described how to \href{http://morepypy.blogspot.com/2011/03/controlling-tracing-of-interpreter-with_15.html}{influence the optimizer with
-promotion and pure functions}. In this post I describe a worked-out example of
+\section{Putting Things Together}
+
+In this section we describe a worked-out example of
 a small object model for a dynamic language and how to make it efficient using
-the hints described in the previous posts.
+the hints described in the previous sections.
 
 
 %___________________________________________________________________________
@@ -739,7 +628,7 @@
 dictionaries everywhere. Let's look at an example of how the JIT can be made to
 optimize such operations.
 
-For the purpose of this blog post we will use a very simple and bare-bones
+For the purpose of this section we will use a very simple and bare-bones
 object model that just supports very simple classes and instances, without any
 inheritance or any fancy features. The model has classes, which contain methods.
 Instances have a class. Instances have their own attributes. When looking up an
@@ -748,45 +637,13 @@
 
 To implement this object model, we could use the following RPython code as part
 of the interpreter source code:
-\begin{Verbatim}[commandchars=\\\{\}]
-\PY{k}{class} \PY{n+nc}{Class}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:}
-    \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{name} \PY{o}{=} \PY{n}{name}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}}
 
-    \PY{k}{def} \PY{n+nf}{instantiate}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:}
-        \PY{k}{return} \PY{n}{Instance}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}
+\begin{figure}
+\input{code/interpreter-slow.tex}
+\caption{Original Version of a Simple Object Model}
+\label{fig:interpreter-slow}
+\end{figure}
 
-    \PY{k}{def} \PY{n+nf}{find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
-        \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-        \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:}
-            \PY{k}{return} \PY{n}{result}
-        \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-
-    \PY{k}{def} \PY{n+nf}{change\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value}
-
-
-\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:}
-    \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}}
-
-    \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
-        \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-        \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:}
-            \PY{k}{return} \PY{n}{result}
-        \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-
-    \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value}
-
-    \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
-        \PY{k}{try}\PY{p}{:}
-            \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-        \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:}
-            \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-\end{Verbatim}
 
 In this straightforward implementation the methods and attributes are just
 stored in dictionaries on the classes/instances. While this object model is very
@@ -806,34 +663,12 @@
 \end{Verbatim}
 
 The trace could look like this:
-\begin{Verbatim}[commandchars=\\\{\}]
-\PY{c}{# inst.getattr("a")}
-\PY{n}{attributes1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{attributes}
-\PY{n}{result1} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{attributes1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{a}\PY{l+s}{"}\PY{p}{)}
-\PY{n}{guard}\PY{p}{(}\PY{n}{result1} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)}
 
-\PY{c}{# inst.getattr("b")}
-\PY{n}{attributes2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{attributes}
-\PY{n}{v1} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{attributes2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)}
-\PY{n}{guard}\PY{p}{(}\PY{n}{v1} \PY{o+ow}{is} \PY{n+nb+bp}{None}\PY{p}{)}
-\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls}
-\PY{n}{methods1} \PY{o}{=} \PY{n}{cls}\PY{o}{.}\PY{n}{methods}
-\PY{n}{result2} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)}
-\PY{n}{guard}\PY{p}{(}\PY{n}{result2} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)}
-\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{n}{result2}
-
-\PY{c}{# inst.getattr("c")}
-\PY{n}{attributes3} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{attributes}
-\PY{n}{v3} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{attributes3}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)}
-\PY{n}{guard}\PY{p}{(}\PY{n}{v3} \PY{o+ow}{is} \PY{n+nb+bp}{None}\PY{p}{)}
-\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls}
-\PY{n}{methods2} \PY{o}{=} \PY{n}{cls}\PY{o}{.}\PY{n}{methods}
-\PY{n}{result3} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)}
-\PY{n}{guard}\PY{p}{(}\PY{n}{result3} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)}
-
-\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{n}{result3}
-\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)}
-\end{Verbatim}
+\begin{figure}
+\input{code/trace1.tex}
+\caption{Trace Through the Object Model}
+\label{fig:trace1}
+\end{figure}
 
 In this example, the attribute \texttt{a} is found on the instance, but the
 attributes \texttt{b} and \texttt{c} are found on the class. The trace indeed contains
@@ -846,7 +681,7 @@
 
 The first step in making \texttt{getattr} faster in our object model is to optimize
 away the dictionary lookups on the instances. The hints we have looked at in the
-two earlier blog posts don't seem to help with the current object model. There is
+two previous sections don't seem to help with the current object model. There is
 no pure function to be seen, and the instance is not a candidate for promotion,
 because there tend to be many instances.
 
@@ -859,61 +694,16 @@
 
 Therefore it makes sense to factor the layout information out of the instance
 implementation into a shared object. This shared layout object is called a
-\emph{map}. Maps are an old idea that comes originally from the SELF language. They are
-also used by many JavaScript implementations such as V8. I've \href{http://morepypy.blogspot.com/2010/11/efficiently-implementing-python-objects.html}{written about maps
-before}, so I won't explain them fully again.
+\emph{map}. Maps are an old idea that comes originally from the SELF language \cite{XXX}. They are
+also used by many JavaScript implementations such as V8.
 
 The rewritten \texttt{Instance} class using maps looks like this:
-\begin{Verbatim}[commandchars=\\\{\}]
-\PY{k}{class} \PY{n+nc}{Map}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:}
-    \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}}
 
-    \PY{n+nd}{@purefunction}
-    \PY{k}{def} \PY{n+nf}{getindex}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
-        \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{,} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)}
-
-    \PY{n+nd}{@purefunction}
-    \PY{k}{def} \PY{n+nf}{new\PYZus{}map\PYZus{}with\PYZus{}additional\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
-        \PY{k}{if} \PY{n}{name} \PY{o+ow}{not} \PY{o+ow}{in} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{:}
-            \PY{n}{newmap} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)}
-            \PY{n}{newmap}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{o}{.}\PY{n}{update}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{p}{)}
-            \PY{n}{newmap}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n+nb}{len}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{p}{)}
-            \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{newmap}
-        \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]}
-
-
-\PY{n}{EMPTY\PYZus{}MAP} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)}
-
-\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:}
-    \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n}{EMPTY\PYZus{}MAP}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage} \PY{o}{=} \PY{p}{[}\PY{p}{]}
-
-    \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
-        \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)}
-        \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-        \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:}
-            \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]}
-        \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-
-    \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:}
-        \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)}
-        \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-        \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:}
-            \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]} \PY{o}{=} \PY{n}{value}
-            \PY{k}{return}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{new\PYZus{}map\PYZus{}with\PYZus{}additional\PYZus{}attribute}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{o}{.}\PY{n}{append}\PY{p}{(}\PY{n}{value}\PY{p}{)}
-
-    \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
-        \PY{k}{try}\PY{p}{:}
-            \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-        \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:}
-            \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)}
-\end{Verbatim}
+\begin{figure}
+\input{code/map.tex}
+\caption{Simple Object Model With Maps}
+\label{fig:maps}
+\end{figure}
 
 Instances no longer use dictionaries to store their fields. Instead, they have a
 reference to a map, which maps field names to indexes into a storage list. The
@@ -1175,13 +965,11 @@
 
 \subsection{Conclusion}
 
-In this post I showed how to use \texttt{purefunction} and \texttt{promote} to make a
+In this section we saw how to use \texttt{purefunction} and \texttt{promote} to make a
 small but still relevant dynamic object model no longer use any dictionary lookups
 after tracing. Instead a number of guards are inserted into the
 trace to check whether the assumptions about the objects are still true. This
-makes operations on objects seriously faster. I plan to write another small post
-that shows the speed benefits for PyPy's Python interpreter for exactly these
-operations.
+makes operations on objects seriously faster.
 
 \section{Evaluation}
 \label{sect:evaluation}

diff --git a/talk/icooolps2011/Makefile b/talk/icooolps2011/Makefile
--- a/talk/icooolps2011/Makefile
+++ b/talk/icooolps2011/Makefile
@@ -1,5 +1,5 @@
 
-jit-hints.pdf: paper.tex paper.bib
+jit-hints.pdf: paper.tex paper.bib code/interpreter-slow.tex code/map.tex
 	pdflatex paper
 	bibtex paper
 	pdflatex paper
@@ -11,3 +11,6 @@
 
 xpdf: jit-hints.pdf
 	xpdf jit-hints.pdf &
+
+%.tex: %.py
+	pygmentize -l python -o $@ $<

diff --git a/talk/icooolps2011/code/map.tex b/talk/icooolps2011/code/map.tex
new file mode 100644
--- /dev/null
+++ b/talk/icooolps2011/code/map.tex
@@ -0,0 +1,49 @@
+\begin{Verbatim}[commandchars=\\\{\}]
+\PY{k}{class} \PY{n+nc}{Map}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:}
+    \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:}
+        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}}
+        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}}
+
+    \PY{n+nd}{@purefunction}
+    \PY{k}{def} \PY{n+nf}{getindex}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
+        \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{,} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)}
+
+    \PY{n+nd}{@purefunction}
+    \PY{k}{def} \PY{n+nf}{add\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
+        \PY{k}{if} \PY{n}{name} \PY{o+ow}{not} \PY{o+ow}{in} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{:}
+            \PY{n}{newmap} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)}
+            \PY{n}{newmap}\PY{o}{.}\PY{n}{indexes}\PY{o}{.}\PY{n}{update}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes}\PY{p}{)}
+            \PY{n}{newmap}\PY{o}{.}\PY{n}{indexes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n+nb}{len}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes}\PY{p}{)}
+            \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{newmap}
+        \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]}
+
+\PY{n}{EMPTY\PYZus{}MAP} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)}
+
+\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:}
+    \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:}
+        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls}
+        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n}{EMPTY\PYZus{}MAP}
+        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage} \PY{o}{=} \PY{p}{[}\PY{p}{]}
+
+    \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
+        \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)}
+        \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)}
+        \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:}
+            \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]}
+        \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)}
+
+    \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:}
+        \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)}
+        \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)}
+        \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:}
+            \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]} \PY{o}{=} \PY{n}{value}
+            \PY{k}{return}
+        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{add\PYZus{}attribute}\PY{p}{(}\PY{n}{name}\PY{p}{)}
+        \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{o}{.}\PY{n}{append}\PY{p}{(}\PY{n}{value}\PY{p}{)}
+
+    \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:}
+        \PY{k}{try}\PY{p}{:}
+            \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)}
+        \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:}
+            \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)}
+\end{Verbatim}

diff --git a/talk/icooolps2011/code/style.tex b/talk/icooolps2011/code/style.tex
new file mode 100644
--- /dev/null
+++ b/talk/icooolps2011/code/style.tex
@@ -0,0 +1,57 @@
+
+\makeatletter
+\def\PY at reset{\let\PY at it=\relax \let\PY at bf=\relax%
+    \let\PY at ul=\relax \let\PY at tc=\relax%
+    \let\PY at bc=\relax \let\PY at ff=\relax}
+\def\PY at tok#1{\csname PY at tok@#1\endcsname}
+\def\PY at toks#1+{\ifx\relax#1\empty\else%
+    \PY at tok{#1}\expandafter\PY at toks\fi}
+\def\PY at do#1{\PY at bc{\PY at tc{\PY at ul{%
+    \PY at it{\PY at bf{\PY at ff{#1}}}}}}}
+\def\PY#1#2{\PY at reset\PY at toks#1+\relax+\PY at do{#2}}
+
+\def\PY at tok@gu{\let\PY at bf=\textbf}
+\def\PY at tok@gs{\let\PY at bf=\textbf}
+\def\PY at tok@cm{\let\PY at it=\textit}
+\def\PY at tok@gp{\let\PY at bf=\textbf}
+\def\PY at tok@ge{\let\PY at it=\textit}
+\def\PY at tok@cs{\let\PY at it=\textit}
+\def\PY at tok@gh{\let\PY at bf=\textbf}
+\def\PY at tok@ni{\let\PY at bf=\textbf}
+\def\PY at tok@nn{\let\PY at bf=\textbf}
+\def\PY at tok@s2{\let\PY at it=\textit}
+\def\PY at tok@s1{\let\PY at it=\textit}
+\def\PY at tok@nc{\let\PY at bf=\textbf}
+\def\PY at tok@ne{\let\PY at bf=\textbf}
+\def\PY at tok@si{\let\PY at bf=\textbf\let\PY at it=\textit}
+\def\PY at tok@nt{\let\PY at bf=\textbf}
+\def\PY at tok@ow{\let\PY at bf=\textbf}
+\def\PY at tok@c1{\let\PY at it=\textit}
+\def\PY at tok@kc{\let\PY at bf=\textbf}
+\def\PY at tok@c{\let\PY at it=\textit}
+\def\PY at tok@sx{\let\PY at it=\textit}
+\def\PY at tok@err{\def\PY at bc##1{\fcolorbox[rgb]{1.00,0.00,0.00}{1,1,1}{##1}}}
+\def\PY at tok@kd{\let\PY at bf=\textbf}
+\def\PY at tok@ss{\let\PY at it=\textit}
+\def\PY at tok@sr{\let\PY at it=\textit}
+\def\PY at tok@k{\let\PY at bf=\textbf}
+\def\PY at tok@kn{\let\PY at bf=\textbf}
+\def\PY at tok@kr{\let\PY at bf=\textbf}
+\def\PY at tok@s{\let\PY at it=\textit}
+\def\PY at tok@sh{\let\PY at it=\textit}
+\def\PY at tok@sc{\let\PY at it=\textit}
+\def\PY at tok@sb{\let\PY at it=\textit}
+\def\PY at tok@se{\let\PY at bf=\textbf\let\PY at it=\textit}
+\def\PY at tok@sd{\let\PY at it=\textit}
+
+\def\PYZbs{\char`\\}
+\def\PYZus{\char`\_}
+\def\PYZob{\char`\{}
+\def\PYZcb{\char`\}}
+\def\PYZca{\char`\^}
+% for compatibility with earlier versions
+\def\PYZat{@}
+\def\PYZlb{[}
+\def\PYZrb{]}
+\makeatother
+

diff --git a/talk/icooolps2011/paper.bib b/talk/icooolps2011/paper.bib
new file mode 100644
--- /dev/null
+++ b/talk/icooolps2011/paper.bib
@@ -0,0 +1,261 @@
+
+ at inproceedings{carl_friedrich_bolz_towards_????,
+	series = {{LNCS} 6037  to appear},
+	title = {Towards {Just-In-Time} Partial Evaluation of Prolog},
+	abstract = {We introduce a just-in-time specializer for Prolog. Just-in-
+time specialization attempts to unify of the concepts and benefits of
+partial evaluation {(PE)} and just-in-time {(JIT)} compilation. It is a variant
+of {PE} that occurs purely at runtime, which lazily generates residual code
+and is constantly driven by runtime feedback.
+Our prototype is an on-line just-in-time partial evaluator. A major fo-
+cus of our work is to remove the overhead incurred when executing an
+interpreter written in Prolog. It improves over classical offline {PE} by re-
+quiring almost no heuristics nor hints from the author of the interpreter;
+it also avoids most termination issues due to interleaving execution and
+specialization. We evaluate the performance of our prototype on a small
+number of benchmarks.},
+	booktitle = {Logic-based Program Synthesis and Transformation {(LOPSTR'2009)}},
+	publisher = {{Springer-Verlag}},
+	author = {Carl Friedrich Bolz and Michael Leuschel and Armin Rigo}
+},
+
+ at phdthesis{cuni_high_2010,
+	title = {High performance implementation of Python for {CLI/.NET} with {JIT} compiler generation for dynamic languages.},
+	school = {Dipartimento di Informatica e Scienze {dell'Informazione,} University of Genova},
+	author = {Antonio Cuni},
+	year = {2010},
+	note = {Technical Report {DISI-TH-2010-05}}
+},
+
+ at inproceedings{carl_friedrich_bolz_towards_2010,
+	address = {Hagenberg, Austria},
+	title = {Towards a Jitting {VM} for Prolog execution},
+	isbn = {978-1-4503-0132-9},
+	url = {http://portal.acm.org/citation.cfm?id=1836102},
+	doi = {10.1145/1836089.1836102},
+	abstract = {Most Prolog implementations are implemented in low-level languages such as C and are based on a variation of the {WAM} instruction set, which enhances their performance but makes them hard to write. In addition, many of the more dynamic features of Prolog (like assert), despite their popularity, are not well supported. We present a high-level continuation-based Prolog interpreter based on the {PyPy} project. The {PyPy} project makes it possible to easily and efficiently implement dynamic languages. It provides tools that automatically generate a just-in-time compiler for a given interpreter of the target language, by using partial evaluation techniques. The resulting Prolog implementation is surprisingly efficient: it clearly outperforms existing interpreters of Prolog in high-level languages such as Java. Moreover, on some benchmarks, our system outperforms state-of-the-art {WAM-based} Prolog implementations. Our paper aims to show that declarative languages such as Prolog can indeed benefit from having a just-in-time compiler and that {PyPy} can form the basis for implementing programming languages other than Python.},
+	booktitle = {Proceedings of the 12th international {ACM} {SIGPLAN} symposium on Principles and practice of declarative programming},
+	publisher = {{ACM}},
+	author = {Carl Friedrich Bolz and Michael Leuschel and David Schneider},
+	year = {2010},
+	keywords = {interpreters, jit, logic programming, partial evaluation},
+	pages = {99--108}
+},
+
+ at inproceedings{garg_compiling_2010,
+	address = {Pittsburgh, Pennsylvania},
+	title = {Compiling Python to a hybrid execution environment},
+	isbn = {978-1-60558-935-0},
+	url = {http://portal.acm.org/citation.cfm?id=1735695&dl=GUIDE&coll=GUIDE&CFID=108695705&CFTOKEN=81778166},
+	doi = {10.1145/1735688.1735695},
+	abstract = {A new compilation framework enables the execution of numerical-intensive applications, written in Python, on a hybrid execution environment formed by a {CPU} and a {GPU.} This compiler automatically computes the set of memory locations that need to be transferred to the {GPU,} and produces the correct mapping between the {CPU} and the {GPU} address spaces. Thus, the programming model implements a virtual shared address space. This framework is implemented as a combination of {unPython,} an ahead-of-time compiler from {Python/NumPy} to the C programming language, and {jit4GPU,} a just-in-time compiler from C to the {AMD} {CAL} interface. Experimental evaluation demonstrates that for some benchmarks the generated {GPU} code is 50 times faster than generated {OpenMP} code. The {GPU} performance also compares favorably with optimized {CPU} {BLAS} code for single-precision computations in most cases.},
+	booktitle = {Proceedings of the 3rd Workshop on {General-Purpose} Computation on Graphics Processing Units},
+	publisher = {{ACM}},
+	author = {Rahul Garg and Jos\'{e} Nelson Amaral},
+	year = {2010},
+	pages = {19--30}
+},
+
+ at inproceedings{bebenita_spur:_2010,
+	address = {{Reno/Tahoe,} Nevada, {USA}},
+	title = {{SPUR:} a trace-based {JIT} compiler for {CIL}},
+	isbn = {978-1-4503-0203-6},
+	shorttitle = {{SPUR}},
+	url = {http://portal.acm.org/citation.cfm?id=1869459.1869517&coll=GUIDE&dl=GUIDE&type=series&idx=SERIES318&part=series&WantType=Proceedings&title=OOPSLA%2FSPLASH&CFID=106280261&CFTOKEN=29377718},
+	doi = {10.1145/1869459.1869517},
+	abstract = {Tracing just-in-time compilers {(TJITs)} determine frequently executed traces (hot paths and loops) in running programs and focus their optimization effort by emitting optimized machine code specialized to these traces. Prior work has established this strategy to be especially beneficial for dynamic languages such as {JavaScript,} where the {TJIT} interfaces with the interpreter and produces machine code from the {JavaScript} trace.},
+	booktitle = {Proceedings of the {ACM} international conference on Object oriented programming systems languages and applications},
+	publisher = {{ACM}},
+	author = {Michael Bebenita and Florian Brandner and Manuel Fahndrich and Francesco Logozzo and Wolfram Schulte and Nikolai Tillmann and Herman Venter},
+	year = {2010},
+	keywords = {cil, dynamic compilation, javascript, just-in-time, tracing},
+	pages = {708--725}
+},
+
+ at article{bolz_allocation_2011,
+	series = {{PEPM} '11},
+	title = {Allocation removal by partial evaluation in a tracing {JIT}},
+	location = {Austin, Texas, {USA}},
+	doi = {10.1145/1929501.1929508},
+	abstract = {The performance of many dynamic language implementations suffers from high allocation rates and runtime type checks. This makes dynamic languages less applicable to purely algorithmic problems, despite their growing popularity. In this paper we present a simple compiler optimization based on online partial evaluation to remove object allocations and runtime type checks in the context of a tracing {JIT.} We evaluate the optimization using a Python {VM} and find that it gives good results for all our (real-life) benchmarks.},
+	journal = {Proceedings of the 20th {ACM} {SIGPLAN} workshop on Partial evaluation and program manipulation},
+	author = {Carl Friedrich Bolz and Antonio Cuni and Maciej {FijaBkowski} and Michael Leuschel and Samuele Pedroni and Armin Rigo},
+	year = {2011},
+	note = {{ACM} {ID:} 1929508},
+	keywords = {code generation, experimentation, interpreters, languages, optimization, partial evaluation, performance, run-time environments, tracing jit},
+	pages = {43{\textendash}52}
+},
+
+ at inproceedings{chang_tracing_2009,
+	address = {Washington, {DC,} {USA}},
+	title = {Tracing for Web 3.0: Trace Compilation for the Next Generation Web Applications},
+	isbn = {978-1-60558-375-4},
+	shorttitle = {Tracing for web 3.0},
+	url = {http://portal.acm.org/citation.cfm?id=1508293.1508304},
+	doi = {10.1145/1508293.1508304},
+	abstract = {Today's web applications are pushing the limits of modern web browsers. The emergence of the browser as the platform of choice for rich client-side applications has shifted the use of in-browser {JavaScript} from small scripting programs to large computationally intensive application logic. For many web applications, {JavaScript} performance has become one of the bottlenecks preventing the development of even more interactive client side applications. While traditional just-in-time compilation is successful for statically typed virtual machine based languages like Java, compiling {JavaScript} turns out to be a challenging task. Many {JavaScript} programs and scripts are short-lived, and users expect a responsive browser during page loading. This leaves little time for compilation of {JavaScript} to generate machine code.},
+	booktitle = {Proceedings of the 2009 {ACM} {SIGPLAN/SIGOPS} International Conference on Virtual Execution Environments},
+	publisher = {{ACM}},
+	author = {Mason Chang and Edwin Smith and Rick Reitmaier and Michael Bebenita and Andreas Gal and Christian Wimmer and Brendan Eich and Michael Franz},
+	year = {2009},
+	keywords = {dynamically typed languages, forth, tamarin, trace trees, tracing, type specialization},
+	pages = {71--80}
+},
+
+ at phdthesis{carl_friedrich_bolz_automatic_2008,
+	type = {Master Thesis},
+	title = {Automatic {JIT} Compiler Generation with Runtime Partial Evaluation},
+	school = {{Heinrich-Heine-Universit\"{a}t} D\"{u}sseldorf},
+	author = {Carl Friedrich Bolz},
+	year = {2008}
+},
+
+ at inproceedings{davide_ancona_rpython:_2007,
+	address = {Montreal, Quebec, Canada},
+	title = {{RPython:} a step towards reconciling dynamically and statically typed {OO} languages},
+	isbn = {978-1-59593-868-8},
+	shorttitle = {{RPython}},
+	url = {http://portal.acm.org/citation.cfm?id=1297091},
+	doi = {10.1145/1297081.1297091},
+	abstract = {Although the C-based interpreter of Python is reasonably fast, implementations on the {CLI} or the {JVM} platforms offers some advantages in terms of robustness and interoperability. Unfortunately, because the {CLI} and {JVM} are primarily designed to execute statically typed, object-oriented languages, most dynamic language implementations cannot use the native bytecodes for common operations like method calls and exception handling; as a result, they are not able to take full advantage of the power offered by the {CLI} and {JVM.}},
+	booktitle = {Proceedings of the 2007 symposium on Dynamic languages},
+	publisher = {{ACM}},
+	author = {Davide Ancona and Massimo Ancona and Antonio Cuni and Nicholas D. Matsakis},
+	year = {2007},
+	keywords = {{JVM,} .net, Python},
+	pages = {53--64}
+},
+
+ at inproceedings{armin_rigo_pypys_2006,
+	address = {Portland, Oregon, {USA}},
+	title = {{PyPy's} approach to virtual machine construction},
+	isbn = {{1-59593-491-X}},
+	url = {http://portal.acm.org/citation.cfm?id=1176753},
+	doi = {10.1145/1176617.1176753},
+	abstract = {The {PyPy} project seeks to prove both on a research and a practical level the feasibility of constructing a virtual machine {(VM)} for a dynamic language in a dynamic language - in this case, Python. The aim is to translate (i.e. compile) the {VM} to arbitrary target environments, ranging in level from {C/Posix} to {Smalltalk/Squeak} via Java and {CLI/.NET,} while still being of reasonable efficiency within these {environments.A} key tool to achieve this goal is the systematic reuse of the Python language as a system programming language at various levels of our architecture and translation process. For each level, we design a corresponding type system and apply a generic type inference engine - for example, the garbage collector is written in a style that manipulates simulated pointer and address objects, and when translated to C these operations become C-level pointer and address instructions.},
+	booktitle = {Companion to the 21st {ACM} {SIGPLAN} conference on Object-oriented programming systems, languages, and applications},
+	publisher = {{ACM}},
+	author = {Armin Rigo and Samuele Pedroni},
+	year = {2006},
+	keywords = {metacircularity, Python, retargettable code generation, type inference, {VM}},
+	pages = {944--953}
+},
+
+ at article{georges_statistically_2007,
+	title = {Statistically rigorous java performance evaluation},
+	volume = {42},
+	url = {http://portal.acm.org/citation.cfm?id=1297105.1297033},
+	doi = {10.1145/1297105.1297033},
+	abstract = {Java performance is far from being trivial to benchmark because it is affected by various factors such as the Java application, its input, the virtual machine, the garbage collector, the heap size, etc. In addition, non-determinism at run-time causes the execution time of a Java program to differ from run to run. There are a number of sources of non-determinism such as {Just-In-Time} {(JIT)} compilation and optimization in the virtual machine {(VM)} driven by timer-based method sampling, thread scheduling, garbage collection, and various.},
+	number = {10},
+	journal = {{SIGPLAN} Not.},
+	author = {Andy Georges and Dries Buytaert and Lieven Eeckhout},
+	year = {2007},
+	keywords = {benchmarking, data analysis, methodology, statistics},
+	pages = {57--76},
+	annote = {{{\textless}p{\textgreater}The} paper evaluates the various ways in which a number of Java papers do their Java benchmarks. It then proposes a statistically correct way to do this and compares common approaches against the statistically correct way. Especially if the results of two alternatives are very close together, many common approaches can lead to systematic errors.{\textless}/p{\textgreater}}
+},
+
+ at inproceedings{andreas_gal_trace-based_2009,
+	title = {Trace-based {Just-in-Time} Type Specialization for Dynamic Languages},
+	booktitle = {{PLDI}},
+	author = {Andreas Gal and Brendan Eich and Mike Shaver and David Anderson and Blake Kaplan and Graydon Hoare and David Mandelin and Boris Zbarsky and Jason Orendorff and Michael Bebenita and Mason Chang and Michael Franz and Edwin Smith and Rick Reitmaier and Mohammad Haghighat},
+	year = {2009},
+	keywords = {toappear}
+},
+
+ at inproceedings{bolz_tracing_2009,
+	address = {Genova, Italy},
+	title = {Tracing the meta-level: {PyPy's} tracing {JIT} compiler},
+	isbn = {978-1-60558-541-3},
+	shorttitle = {Tracing the meta-level},
+	url = {http://portal.acm.org/citation.cfm?id=1565827},
+	doi = {10.1145/1565824.1565827},
+	abstract = {We attempt to apply the technique of Tracing {JIT} Compilers in the context of the {PyPy} project, i.e., to programs that are interpreters for some dynamic languages, including Python. Tracing {JIT} compilers can greatly speed up programs that spend most of their time in loops in which they take similar code paths. However, applying an unmodified tracing {JIT} to a program that is itself a bytecode interpreter results in very limited or no speedup. In this paper we show how to guide tracing {JIT} compilers to greatly improve the speed of bytecode interpreters. One crucial point is to unroll the bytecode dispatch loop, based on two kinds of hints provided by the implementer of the bytecode interpreter. We evaluate our technique by applying it to two {PyPy} interpreters: one is a small example, and the other one is the full Python interpreter.},
+	booktitle = {Proceedings of the 4th workshop on the Implementation, Compilation, Optimization of {Object-Oriented} Languages and Programming Systems},
+	publisher = {{ACM}},
+	author = {Carl Friedrich Bolz and Antonio Cuni and Maciej Fija\l{}kowski and Armin Rigo},
+	year = {2009},
+	pages = {18--25}
+},
+
+ at techreport{armin_rigo_jit_2007,
+	title = {{JIT} Compiler Architecture},
+	url = {http://codespeak.net/pypy/dist/pypy/doc/index-report.html},
+	abstract = {{PyPy{\textquoteright}s} translation tool-chain {\textendash} from the interpreter written in {RPython} to generated {VMs} for low-level platforms {\textendash} is now able to extend those {VMs} with an automatically generated dynamic compiler, derived from the interpreter. This is achieved by a pragmatic application of partial evaluation techniques guided by a few hints added to the source of the interpreter. Crucial for the effectiveness of dynamic compilation is the use of run-time information to improve compilation results: in our approach, a novel powerful primitive called {\textquotedblleft}promotion{\textquotedblright} that {\textquotedblleft}promotes{\textquotedblright} run-time values to compile-time is used to that effect. In this report, we describe it along with other novel techniques that allow the approach to scale to something as large as {PyPy{\textquoteright}s} Python interpreter.},
+	number = {D08.2},
+	institution = {{PyPy}},
+	author = {Armin Rigo and Samuele Pedroni},
+	month = may,
+	year = {2007}
+},
+
+ at article{bala_dynamo:_2000,
+	title = {Dynamo: a transparent dynamic optimization system},
+	volume = {35},
+	shorttitle = {Dynamo},
+	url = {http://citeseer.ist.psu.edu/bala00dynamo.html},
+	number = {5},
+	journal = {{ACM} {SIGPLAN} Notices},
+	author = {Vasanth Bala and Evelyn Duesterwald and Sanjeev Banerjia},
+	year = {2000},
+	keywords = {toread},
+	pages = {1--12}
+},
+
+ at inproceedings{gal_hotpathvm:_2006,
+	address = {Ottawa, Ontario, Canada},
+	title = {{HotpathVM:} an effective {JIT} compiler for resource-constrained devices},
+	isbn = {1-59593-332-6},
+	shorttitle = {{HotpathVM}},
+	url = {http://portal.acm.org/citation.cfm?doid=1134760.1134780},
+	doi = {10.1145/1134760.1134780},
+	abstract = {We present a just-in-time compiler for a Java {VM} that is small enough to fit on resource-constrained devices, yet is surprisingly effective. Our system dynamically identifies traces of frequently executed bytecode instructions (which may span several basic blocks across several methods) and compiles them via Static Single Assignment {(SSA)} construction. Our novel use of {SSA} form in this context allows to hoist instructions across trace side-exits without necessitating expensive compensation code in off-trace paths. The overall memory consumption (code and data) of our system is only 150 {kBytes,} yet benchmarks show a speedup that in some cases rivals heavy-weight just-in-time compilers.},
+	booktitle = {Proceedings of the 2nd international conference on Virtual execution environments},
+	publisher = {{ACM}},
+	author = {Andreas Gal and Christian W. Probst and Michael Franz},
+	year = {2006},
+	keywords = {dynamic compilation, embedded, software trace scheduling, {SSA,} {VM}},
+	pages = {144--153}
+},
+
+ at inproceedings{carl_friedrich_bolz_how_2007,
+	title = {How to not write a Virtual Machine},
+	abstract = {Typical modern dynamic languages have a growing number of implementations. We explore the reasons for this situation, and the limitations it imposes on open source or academic communities that lack the resources to fine-tune and maintain them all. It is sometimes proposed that implementing dynamic languages on top of a standardized general-purpose object-oriented virtual machine (like Java or {.NET)} would help reduce this burden. We propose a complementary alternative to writing custom virtual machine {(VMs)} by hand, validated by the {PyPy} project: flexibly generating {VMs} from a high-level "specification",
+inserting features and low-level details automatically {\textendash} including good just-in-time compilers tuned to the dynamic language at hand.
+We believe this to be ultimately a better investment of efforts than the development of more and more advanced general-purpose object
+oriented {VMs.} In this paper we compare these two approaches in detail.},
+	booktitle = {Proceedings of the 3rd Workshop on Dynamic Languages and Applications {(DYLA} 2007)},
+	author = {Carl Friedrich Bolz and Armin Rigo},
+	year = {2007}
+},
+
+ at inproceedings{rigo_representation-based_2004,
+	address = {Verona, Italy},
+	title = {Representation-based just-in-time specialization and the Psyco prototype for Python},
+	isbn = {1-58113-835-0},
+	url = {http://portal.acm.org/citation.cfm?id=1014010},
+	doi = {10.1145/1014007.1014010},
+	abstract = {A powerful application of specialization is to remove interpretative overhead: a language can be implemented with an interpreter, whose performance is then improved by specializing it for a given program source. This approach is only moderately successful with very high level languages, where the operation of each single step can be highly dependent on run-time data and context. In the present paper, the Psyco prototype for the Python language is presented. It introduces two novel techniques. The first is just-in-time specialization, or specialization by need, which introduces the "unlifting" ability for a value to be promoted from run-time to compile-time during specialization -- the inverse of the lift operator of partial evaluation. Its presence gives an unusual and powerful perspective on the specialization process. The second technique is representations, a theory of data-oriented specialization generalizing the traditional specialization domains (i.e. the compile-time/run-time dichotomy).},
+	booktitle = {Proceedings of the 2004 {ACM} {SIGPLAN} symposium on Partial evaluation and semantics-based program manipulation},
+	publisher = {{ACM}},
+	author = {Armin Rigo},
+	year = {2004},
+	keywords = {{JIT,} Python},
+	pages = {15--26}
+},
+
+ at incollection{carl_friedrich_bolz_back_2008,
+	title = {Back to the Future in One Week {\textemdash} Implementing a Smalltalk {VM} in {PyPy}},
+	url = {http://dx.doi.org/10.1007/978-3-540-89275-5_7},
+	abstract = {We report on our experiences with the Spy project, including implementation details and benchmark results. Spy is a re-implementation of the Squeak (i.e. Smalltalk-80) {VM} using the {PyPy} toolchain. The {PyPy} project allows code written in {RPython,} a subset of Python, to be translated
+to a multitude of different backends and architectures. During the translation, many aspects of the implementation can be
+independently tuned, such as the garbage collection algorithm or threading implementation. In this way, a whole host of interpreters
+can be derived from one abstract interpreter definition. Spy aims to bring these benefits to Squeak, allowing for greater portability and, eventually, improved performance. The current
+Spy codebase is able to run a small set of benchmarks that demonstrate performance superior to many similar Smalltalk {VMs,} but
+which still run slower than in Squeak itself. Spy was built from scratch over the course of a week during a joint {Squeak-PyPy} Sprint in Bern last autumn.},
+	booktitle = {{Self-Sustaining} Systems},
+	author = {Carl Friedrich Bolz and Adrian Kuhn and Adrian Lienhard and Nicholas Matsakis and Oscar Nierstrasz and Lukas Renggli and Armin Rigo and Toon Verwaest},
+	year = {2008},
+	pages = {123--139}
+}
\ No newline at end of file

diff --git a/talk/icooolps2011/code/map.py b/talk/icooolps2011/code/map.py
new file mode 100644
--- /dev/null
+++ b/talk/icooolps2011/code/map.py
@@ -0,0 +1,47 @@
+class Map(object):
+    def __init__(self):
+        self.indexes = {}
+        self.other_maps = {}
+
+    @purefunction
+    def getindex(self, name):
+        return self.indexes.get(name, -1)
+
+    @purefunction
+    def add_attribute(self, name):
+        if name not in self.other_maps:
+            newmap = Map()
+            newmap.indexes.update(self.indexes)
+            newmap.indexes[name] = len(self.indexes)
+            self.other_maps[name] = newmap
+        return self.other_maps[name]
+
+EMPTY_MAP = Map()
+
+class Instance(object):
+    def __init__(self, cls):
+        self.cls = cls
+        self.map = EMPTY_MAP
+        self.storage = []
+
+    def getfield(self, name):
+        map = hint(self.map, promote=True)
+        index = map.getindex(name)
+        if index != -1:
+            return self.storage[index]
+        raise AttributeError(name)
+
+    def write_attribute(self, name, value):
+        map = hint(self.map, promote=True)
+        index = map.getindex(name)
+        if index != -1:
+            self.storage[index] = value
+            return
+        self.map = map.add_attribute(name)
+        self.storage.append(value)
+
+    def getattr(self, name):
+        try:
+            return self.getfield(name)
+        except AttributeError:
+            return self.cls.find_method(name)

diff --git a/talk/icooolps2011/code/interpreter-slow.py b/talk/icooolps2011/code/interpreter-slow.py
new file mode 100644
--- /dev/null
+++ b/talk/icooolps2011/code/interpreter-slow.py
@@ -0,0 +1,37 @@
+class Class(object):
+    def __init__(self, name):
+        self.name = name
+        self.methods = {}
+
+    def instantiate(self):
+        return Instance(self)
+
+    def find_method(self, name):
+        result = self.methods.get(name)
+        if result is not None:
+            return result
+        raise AttributeError(name)
+
+    def change_method(self, name, value):
+        self.methods[name] = value
+
+
+class Instance(object):
+    def __init__(self, cls):
+        self.cls = cls
+        self.attributes = {}
+
+    def getfield(self, name):
+        result = self.attributes.get(name)
+        if result is not None:
+            return result
+        raise AttributeError(name)
+
+    def write_attribute(self, name, value):
+        self.attributes[name] = value
+
+    def getattr(self, name):
+        try:
+            return self.getfield(name)
+        except AttributeError:
+            return self.cls.find_method(name)

diff --git a/talk/icooolps2011/code/trace1.tex b/talk/icooolps2011/code/trace1.tex
new file mode 100644
--- /dev/null
+++ b/talk/icooolps2011/code/trace1.tex
@@ -0,0 +1,28 @@
+\begin{Verbatim}
+# inst.getattr("a")
+attributes1 = inst.attributes
+result1 = dict.get(attributes1, "a")
+guard(result1 is not None)
+
+# inst.getattr("b")
+attributes2 = inst.attributes
+v1 = dict.get(attributes2, "b")
+guard(v1 is None)
+cls1 = inst.cls
+methods1 = cls.methods
+result2 = dict.get(methods1, "b")
+guard(result2 is not None)
+v2 = result1 + result2
+
+# inst.getattr("c")
+attributes3 = inst.attributes
+v3 = dict.get(attributes3, "c")
+guard(v3 is None)
+cls1 = inst.cls
+methods2 = cls.methods
+result3 = dict.get(methods2, "c")
+guard(result3 is not None)
+
+v4 = v2 + result3
+return(v4)
+\end{Verbatim}


More information about the Pypy-commit mailing list