Преглед изворни кода

Worked on improving report.

Taddeus Kroes пре 13 година
родитељ
комит
9c605e96fc
2 измењених фајлова са 293 додато и 167 уклоњено
  1. 101 40
      docs/data/diagrams.tex
  2. 192 127
      docs/report.tex

+ 101 - 40
docs/data/diagrams.tex

@@ -50,8 +50,7 @@
 }
 
 \newcommand{\basicdiagram}[1]{
-    \begin{figure}[H]
-        \label{fig:basicdiagram}
+    \begin{figure}[h]
         \center
         \architecture{
             \node[block, dashed, below of=driver] (arch) {Architecture}
@@ -60,12 +59,12 @@
                 edge[linefrom] node[right] {gestures} (arch);
         }
         \caption{#1}
+        \label{fig:basicdiagram}
     \end{figure}
 }
 
 \newcommand{\driverdiagram}[1]{
-    \begin{figure}[H]
-        \label{fig:driverdiagram}
+    \begin{figure}[h]
         \center
         \architecture{
             \node[block, below of=driver] (eventdriver) {Event driver}
@@ -79,77 +78,139 @@
             \group{eventdriver}{eventdriver}{dummy}{analysis}{Architecture}
         }
         \caption{#1}
+        \label{fig:driverdiagram}
     \end{figure}
 }
 
-\newcommand{\widgetdiagram}[1]{
-    \begin{figure}[H]
-        \label{fig:widgetdiagram}
+\newcommand{\areadiagram}[1]{
+    \begin{figure}[h]
         \center
         \architecture{
             \node[block, below of=driver] (eventdriver) {Event driver}
                 edge[linefrom] node[right, near end] {driver-specific messages} (driver);
-            \node[block, below of=eventdriver] (widget) {Widget tree}
+            \node[block, below of=eventdriver] (area) {Areas}
                 edge[linefrom] node[right] {events} (eventdriver);
-            \node[block, right of=widget, xshift=7em, dashed] (analysis) {Event analysis}
-                edge[linefrom, bend right=10] node[above] {events} (widget)
-                edge[lineto, bend left=10] node[] {gestures} (widget);
-            \node[block, below of=widget] {Application}
-                edge[linefrom] node[right, near start] {gestures} (widget);
+            \node[block, right of=area, xshift=7em, dashed] (analysis) {Gesture detection}
+                edge[linefrom, bend right=10] node[above] {events} (area)
+                edge[lineto, bend left=10] node[] {gestures} (area);
+            \node[block, below of=area] {Application}
+                edge[linefrom] node[right, near start] {gestures} (area);
 
-            \group{eventdriver}{eventdriver}{analysis}{widget}{Architecture}
+            \group{eventdriver}{eventdriver}{analysis}{area}{Architecture}
         }
         \caption{#1}
+        \label{fig:areadiagram}
     \end{figure}
 }
 
 \newcommand{\trackerdiagram}[1]{
-    \begin{figure}[H]
-        \label{fig:trackerdiagram}
+    \begin{figure}[h]
         \center
         \architecture{
             \node[block, below of=driver] (eventdriver) {Event driver}
                 edge[linefrom] node[right, near end] {driver-specific messages} (driver);
-            \node[block, below of=eventdriver] (widget) {Widget tree}
+            \node[block, below of=eventdriver] (area) {Area tree}
                 edge[linefrom] node[right] {events} (eventdriver);
-            \node[block, right of=widget, xshift=7em] (tracker) {Gesture trackers}
-                edge[linefrom, bend right=10] node[above] {events} (widget)
-                edge[lineto, bend left=10] node[] {gestures} (widget);
-            \node[block, below of=widget] {Application}
-                edge[linefrom] node[right, near start] {gestures} (widget);
+            \node[block, right of=area, xshift=7em] (tracker) {Gesture trackers}
+                edge[linefrom, bend right=10] node[above] {events} (area)
+                edge[lineto, bend left=10] node[] {gestures} (area);
+            \node[block, below of=area] {Application}
+                edge[linefrom] node[right, near start] {gestures} (area);
 
-            \group{eventdriver}{eventdriver}{tracker}{widget}{Architecture}
+            \group{eventdriver}{eventdriver}{tracker}{area}{Architecture}
         }
         \caption{#1}
+        \label{fig:trackerdiagram}
     \end{figure}
 }
 
 \newcommand{\examplediagram}[1]{
-    \begin{figure}[H]
+    \begin{figure}[h]
         \center
         \architecture{
             \node[block, below of=driver] (eventdriver) {Event driver}
                 edge[linefrom] node[right, near end] {driver-specific messages} (driver);
 
-            \node[block, below of=eventdriver] (rootwidget) {Root widget}
+            \node[block, below of=eventdriver] (rootarea) {Root area}
                 edge[linefrom] (eventdriver);
 
-            \node[block, below of=rootwidget] (subwidget) {Button widget}
-                edge[linefrom] (rootwidget)
-                edge[lineto, bend right=45] node[right=3] {event propagation} (rootwidget);
-            \node[block, right of=rootwidget, xshift=5em] {\emph{pinch} tracker}
-                edge[lineto, dotted, bend left=10] (rootwidget)
-                edge[linefrom, bend right=10] (rootwidget);
-
-            \node[block, right of=subwidget, xshift=5em] (tracker) {\emph{tap} tracker}
-                edge[lineto, dotted, bend left=10] (subwidget)
-                edge[linefrom, bend right=10] (subwidget);
-            \node[block, below of=subwidget, yshift=-.5em] {Application}
-                edge[linefrom, dotted, bend left=60] (rootwidget)
-                edge[linefrom, dotted] (subwidget);
-
-            \group{subwidget}{eventdriver}{tracker}{subwidget}{Architecture}
+            \node[block, below of=rootarea] (subarea) {Button area}
+                edge[linefrom] (rootarea)
+                edge[lineto, bend right=45] node[right=3] {event propagation} (rootarea);
+            \node[block, right of=rootarea, xshift=5em] {\emph{pinch} tracker}
+                edge[lineto, dotted, bend left=10] (rootarea)
+                edge[linefrom, bend right=10] (rootarea);
+
+            \node[block, right of=subarea, xshift=5em] (tracker) {\emph{tap} tracker}
+                edge[lineto, dotted, bend left=10] (subarea)
+                edge[linefrom, bend right=10] (subarea);
+            \node[block, below of=subarea, yshift=-.5em] {Application}
+                edge[linefrom, dotted, bend left=60] (rootarea)
+                edge[linefrom, dotted] (subarea);
+
+            \group{subarea}{eventdriver}{tracker}{subarea}{Architecture}
         }
         \caption{#1}
     \end{figure}
 }
+
+\newcommand{\examplefigureone}{
+    \begin{figure}[h]
+        \center
+        % TODO: draw finger touch points as circles with rotating arrow
+        \begin{tikzpicture}
+            \draw node[draw, black, minimum width=190, minimum height=140] at (0,0) {};
+            \draw node[fill=gray!50, draw=black!70, minimum height=40, minimum width=40] at (-1,-1) {};
+            \draw node[draw=black!80, diamond, minimum height=50, minimum width=50] at (1.2,1) {};
+        \end{tikzpicture}
+        \caption{Two squares on the screen both listen to rotation.  The user
+        should be able to ``grab'' each of the squares independently and rotate
+        them at the same time.}
+        \label{fig:ex1}
+    \end{figure}
+}
+
+\newcommand{\examplefiguretwo}{
+    \begin{figure}[h]
+        \center
+        \begin{tikzpicture}
+            \draw node[draw, black, minimum width=190, minimum height=140] at (0,0) {};
+            \draw node[draw=black!80, diamond, minimum height=50, minimum width=50] at (0.5, 0.3) {};
+
+            \draw node[draw=black, diamond, dotted, minimum height=53, minimum width=53] at (0.5, 0.3) {};
+            \draw node[draw=black, dotted, circle, minimum height=80, minimum width=80] at (0.5, 0.3) {};
+
+            \fill (-0.3, -0.4) circle (0.15)
+                    (-0.4, 0.8) circle (0.15)
+                    (-0.1, 1.1) circle (0.15)
+                    (1.3, 0.9) circle (0.15);
+
+            \draw (0.15, 0.55) circle (0.15) -- (-0.3, -0.4);
+            \draw (0.15, 0.55) -- (-0.4, 0.8);
+            \draw (0.15, 0.55) -- (-0.1, 1.1);
+            \draw (0.15, 0.55) -- (1.3, 0.9);
+        \end{tikzpicture}
+        \caption{A square on the screen listens to rotation. The user can grab
+        and rotate the square by positioning fingers around (but not in) it and
+        and performing a rotating motion. An example pose of four fingers is
+        shown by the filled black circles. While the events all occur in the
+        dotted \emph{area}, the centroid of the rotation gesture is located in
+        the square.}
+        \label{fig:ex2}
+    \end{figure}
+}
+
+\newcommand{\examplefigurethree}{
+    \begin{figure}[h]
+        \center
+        \begin{tikzpicture}
+            \draw node[draw=black, minimum width=190, minimum height=140] (screen) at (0,0) {};
+            \draw node[fill=gray!50, draw=black!70, minimum height=100, minimum width=100] (screen) at (-0.1,-0.1) {};
+            \draw node[fill=white, draw=black!80, diamond, minimum height=50, minimum width=50] (screen) at (0.3,0.4) {};
+        \end{tikzpicture}
+        \caption{Two overlapping squares that listen to rotation. When the
+        white square is rotated, the gray square should keep its current
+        orientation and vice-versa.}
+        \label{fig:ex3}
+    \end{figure}
+}

+ 192 - 127
docs/report.tex

@@ -2,7 +2,7 @@
 
 \usepackage[english]{babel}
 \usepackage[utf8]{inputenc}
-\usepackage{hyperref,graphicx,float,tikz}
+\usepackage{hyperref,graphicx,tikz,subfigure}
 
 % Link colors
 \hypersetup{colorlinks=true,linkcolor=black,urlcolor=blue,citecolor=DarkGreen}
@@ -29,19 +29,19 @@
 \tableofcontents
 
 \chapter{Introduction}
+\label{chapter:introduction}
 
 Surface-touch devices have evolved from pen-based tablets to single-touch
 trackpads, to multi-touch devices like smartphones and tablets. Multi-touch
 devices enable a user to interact with software using hand gestures, making the
 interaction more expressive and intuitive. These gestures are more complex than
 primitive ``click'' or ``tap'' events that are used by single-touch devices.
-Some examples of more complex gestures are so-called ``pinch''\footnote{A
-``pinch'' gesture is formed by performing a pinching movement with multiple
-fingers on a multi-touch surface. Pinch gestures are often used to zoom in or
-out on an object.} and ``flick''\footnote{A ``flick'' gesture is the act of
-grabbing an object and throwing it in a direction on a touch surface, giving
-it momentum to move for some time after the hand releases the surface.}
-gestures.
+Some examples of more complex gestures are ``pinch''\footnote{A ``pinch''
+gesture is formed by performing a pinching movement with multiple fingers on a
+multi-touch surface. Pinch gestures are often used to zoom in or out on an
+object.} and ``flick''\footnote{A ``flick'' gesture is the act of grabbing an
+object and throwing it in a direction on a touch surface, giving it momentum to
+move for some time after the hand releases the surface.} gestures.
 
 The complexity of gestures is not limited to navigation in smartphones. Some
 multi-touch devices are already capable of recognizing objects touching the
@@ -60,32 +60,34 @@ gestures in an application.
 
 The main question in this research project is whether a generic architecture
 for the detection of complex interaction gestures can be designed, with the
-capability of managing the complexity of gesture detection logic.
+capability of managing the complexity of gesture detection logic. The ultimate
+goal would be to create an implementation of this architecture that can be
+extended to support a wide range of complex gestures. With the existence of
+such an implementation, application developers do not need to reinvent gesture
+detection for every new gesture-based application.
 
 Application frameworks for surface-touch devices, such as Nokia's Qt \cite{qt},
-include the detection of commonly used gestures like \emph{pinch} gestures.
-However, this detection logic is dependent on the application framework.
-Consequently, an application developer who wants to use multi-touch interaction
-in an application is forced to choose an application framework that includes
-support for multi-touch gestures. Therefore, a requirement of the generic
-architecture is that it must not be bound to a specific application framework.
-Moreover, the set of supported gestures is limited by the application framework
-of choice. To incorporate a custom event in an application, the application
-developer needs to extend the framework. This requires extensive knowledge of
-the framework's architecture. Also, if the same gesture is used in another
-application that is based on another framework, the detection logic has to be
-translated for use in that framework. Nevertheless, application frameworks are
-a necessity when it comes to fast, cross-platform development. Therefore, the
-architecture design should aim to be compatible with existing frameworks, but
-provide a way to detect and extend gestures independent of the framework.
-
-An application framework is written in a specific programming language. A
-generic architecture should not limited to a single programming language. The
-ultimate goal of this thesis is to provide support for complex gesture
-interaction in any application. Thus, applications should be able to address
-the architecture using a language-independent method of communication. This
-intention leads towards the concept of a dedicated gesture detection
-application that serves gestures to multiple programs at the same time.
+do already include the detection of commonly used gestures like \emph{pinch}
+gestures. However, this detection logic is dependent on the application
+framework. Consequently, an application developer who wants to use multi-touch
+interaction in an application is forced to use an application framework that
+includes support for multi-touch gestures. Moreover, the set of supported
+gestures is limited by the application framework of choice. To incorporate a
+custom event in an application, the application developer needs to extend the
+framework. This requires extensive knowledge of the framework's architecture.
+Also, if the same gesture is needed in another application that is based on
+another framework, the detection logic has to be translated for use in that
+framework. Nevertheless, application frameworks are a necessity when it comes
+to fast, cross-platform development. A generic architecture design should aim
+to be compatible with existing frameworks, and provide a way to detect and
+extend gestures independent of the framework.
+
+Application frameworks are written in a specific programming language. To
+support multiple frameworks and programming languages, the architecture should
+be accessible for applications using a language-independent method of
+communication. This intention leads towards the concept of a dedicated gesture
+detection application that serves gestures to multiple applications at the same
+time.
 
 The scope of this thesis is limited to the detection of gestures on multi-touch
 surface devices. It presents a design for a generic gesture detection
@@ -93,10 +95,6 @@ architecture for use in multi-touch based applications. A reference
 implementation of this design is used in some test case applications, whose
 goal is to test the effectiveness of the design and detect its shortcomings.
 
-% FIXME: Moet deze nog in de introductie?
-% How can the input of the architecture be normalized? This is needed, because
-% multi-touch drivers use their own specific message format.
-
     \section{Structure of this document}
 
     % TODO: pas als thesis af is
@@ -137,9 +135,7 @@ goal is to test the effectiveness of the design and detect its shortcomings.
 
     An important observation in this application is that different gestures are
     detected by different gesture trackers, thus separating gesture detection
-    code into maintainable parts. The architecture has adopted this design
-    feature by also using different gesture trackers to track different gesture
-    types.
+    code into maintainable parts.
 
     % TODO: This is not really 'related', move it to somewhere else
     \section{Processing implementation of simple gestures in Android}
@@ -213,13 +209,13 @@ goal is to test the effectiveness of the design and detect its shortcomings.
     After all, the gesture detection logic in a ``generic'' architecture should
     not be implemented based on driver-specific messages.  The event types in
     this format should be chosen so that multiple drivers can trigger the same
-    events. If each supported driver adds its own set of event types to the
-    common format, it the purpose of being ``common'' would be defeated.
+    events. If each supported driver would add its own set of event types to
+    the common format, it the purpose of being ``common'' would be defeated.
 
-    A reasonable expectation for a touch device driver is that it detects
-    simple touch points, with a ``point'' being an object at an $(x, y)$
-    position on the touch surface. This yields a basic set of events:
-    $\{point\_down, point\_move, point\_up\}$.
+    A minimal expectation for a touch device driver is that it detects simple
+    touch points, with a ``point'' being an object at an $(x, y)$ position on
+    the touch surface. This yields a basic set of events: $\{point\_down,
+    point\_move, point\_up\}$.
 
     The TUIO protocol supports fiducials\footnote{A fiducial is a pattern used
     by some touch devices to identify objects.}, which also have a rotational
@@ -236,9 +232,10 @@ goal is to test the effectiveness of the design and detect its shortcomings.
     components in the architecture for translation to gestures. This
     communication flow is illustrated in figure \ref{fig:driverdiagram}.
 
-    A touch device driver can be supported by adding an event driver
-    implementation for it.  The event driver implementation that is used in an
-    application is dependent of the support of the touch device.
+    Support for a touch device driver can be added by adding an event driver
+    implementation. The choice of event driver implementation that is used in an
+    application is dependent on the driver support of the touch device being
+    used.
 
     \driverdiagram{Extension of the diagram from figure \ref{fig:basicdiagram},
     showing the position of the event driver in the architecture. The event
@@ -246,7 +243,8 @@ goal is to test the effectiveness of the design and detect its shortcomings.
     delegated to analysis components that will interpret them as more complex
     gestures.}
 
-    \section{Restricting gestures to a screen area}
+    \section{Restricting events to a screen area}
+    \label{sec:restricting-gestures}
 
     % TODO: in introduction: gestures zijn opgebouwd uit meerdere primitieven
     Touch input devices are unaware of the graphical input widgets rendered on
@@ -255,29 +253,119 @@ goal is to test the effectiveness of the design and detect its shortcomings.
     gesture to a particular widget on screen, an application programmer must
     restrict the occurrence of a gesture to the area of the screen covered by
     that widget. An important question is if the architecture should offer a
-    solution to this problem, or leave it to the programmer to assign gestures
-    to a widget.
-
-    % TODO: eerst: aan developer overlaten, verwijzen naar vorige diagram dan:
-    % consider the following example: ... twee vierkantjes die allebei naar
-    % rotatie luisteren (figuur ter illustratie): als je ze tegelijk roteert
-    % treedt er maar één globaal event op. Dus: niet gestures beperken tot een
-    % area, maar events. dan kun je op elk vierkant een aparte detection logic
-    % zetten met als input de events op die locatie oftewel: je kan het niet
-    % aan de developer overlaten omdat de input van de detection logic moet
-    % veranderen (heeft developer geen invloed op) dus conclusie: Je moet
-    % events kunnen beperken tot een "area" van het scherm. op dit moment kan
-    % de diagram dus al worden uitgebreid
-
-    % dan: simpelste aanpak is een lijst van area's, als event erin past dan
+    solution to this problem, or leave it to the application developer to
+    assign gestures to a widget.
+
+    The latter case generates a problem when a gesture must be able to occur at
+    different screen positions at the same time. Consider the example in figure
+    \ref{fig:ex1}, where two squares must be able to be rotated independently
+    at the same time. If the developer is left the task to assign a gesture to
+    one of the squares, the event analysis component in figure
+    \ref{fig:driverdiagram} receives all events that occur on the screen.
+    Assuming that the rotation detection logic detects a single rotation
+    gesture based on all of its input events, without detecting clusters of
+    input events, only one rotation gesture can be triggered at the same time.
+    When a user attempts to ``grab'' one rectangle with each hand, the events
+    triggered by all fingers are combined to form a single rotation gesture
+    instead of two separate gestures.
+
+    \examplefigureone
+
+    To overcome this problem, groups of events must be separated by the event
+    analysis component before any detection logic is executed. An obvious
+    solution for the given example is to incorporate this separation in the
+    rotation detection logic itself, using a distance threshold that decides if
+    an event should be added to an existing rotation gesture. Leaving the task
+    of separating groups of events to detection logic leads to duplication of
+    code. For instance, if the rotation gesture is replaced by a \emph{pinch}
+    gesture that enlarges a rectangle, the detection logic that detects the
+    pinch gesture would have to contain the same code that separates groups of
+    events for different gestures. Also, a pinch gesture can be performed using
+    fingers multiple hands as well, in which case the use of a simple distance
+    threshold is insufficient. These examples show that gesture detection logic
+    is hard to implement without knowledge about (the position of) the
+    widget\footnote{``Widget'' is a name commonly used to identify an element
+    of a graphical user interface (GUI).} that is receiving the gesture.
+
+    Therefore, a better solution for the assignment of events to gesture
+    detection is to make the gesture detection component aware of the locations
+    of application widgets on the screen. To accomplish this, the architecture
+    must contain a representation of the screen area covered by a widget. This
+    leads to the concept of an \emph{area}, which represents an area on the
+    touch surface in which events should be grouped before being delegated to a
+    form of gesture detection.  Examples of simple area implementations are
+    rectangles and circles.  However, area's could be made to represent more
+    complex shapes.
+
+    An area groups events and assigns them to some piece of gesture detection
+    logic. This possibly triggers a gesture, which must be handled by the
+    client application. A common way to handle framework events in an
+    application is a ``callback'' mechanism: the application developer binds a
+    function to an event, that is called by the framework when the event
+    occurs. Because of the familiarity of this concept with developers, the
+    architecture uses a callback mechanism to handle gestures in an
+    application. Since an area controls the grouping of events and thus the
+    occurrence of gestures in an area, gesture handlers for a specific gesture
+    type are bound to an area. Figure \ref{fig:areadiagram} shows the position
+    of areas in the architecture.
+
+    \areadiagram{Extension of the diagram from figure \ref{fig:driverdiagram},
+    showing the position of areas in the architecture. An area delegate events
+    to a gesture detection component that trigger gestures. The area then calls
+    the handler that is bound to the gesture type by the application.}
+
+    Note that the boundaries of an area are only used to group events, not
+    gestures. A gesture could occur outside the area that contains its
+    originating events, as illustrated by the example in figure \ref{fig:ex2}.
+
+    \examplefiguretwo
+
+    A remark must be made about the use of areas to assign events the detection
+    of some gesture. The concept of an ``area'' is based on the assumption that
+    the set or originating events that form a particular gesture, can be
+    determined based exclusively on the location of the events. This is a
+    reasonable assumption for simple touch objects whose only parameter is a
+    position, such as a pen or a human finger. However, more complex touch
+    objects can have additional parameters, such as rotational orientation or
+    color. An even more generic concept is the \emph{event filter}, which
+    detects whether an event should be assigned to a particular piece of
+    gesture detection based on all available parameters. This level of
+    abstraction allows for constraints like ``Use all blue objects within a
+    widget for rotation, and green objects for tapping.''. As mentioned in the
+    introduction chapter [\ref{chapter:introduction}], the scope of this thesis
+    is limited to multi-touch surface based devices, for which the \emph{area}
+    concept suffices. Section \ref{sec:eventfilter} explores the possibility of
+    areas to be replaced with event filters.
+
+    \subsection*{Reserving an event for a gesture}
+
+    The most simple implementation of areas in the architecture is a list of
+    areas. When the event driver delegates an event, it is delegated to gesture
+    detection by each area that contains the event coordinates. A problem
+    occurs when areas overlap, as shown by figure \ref{fig:ex3}. When the
+    white rectangle is rotated, the gray square should keep its current
+    orientation. This means that events that are used for rotation of the white
+    square, should not be used for rotation of the gray square. To achieve
+    this, there must be some communication between the rotation detection
+    components of the two squares.
+
+    \examplefigurethree
+
+    a
+
+    --------
+
+    % simpelste aanpak is een lijst van area's, als event erin past dan
     % delegeren. probleem (aangeven met voorbeeld van geneste widgets die
     % allebei naar tap luisteren): als area's overlappen wil je bepaalde events
     % reserveren voor bepaalde stukjes detection logic
+
     % oplossing: area'a opslaan in boomstructuur en event propagatie gebruiken
     % -> area binnenin een parent area kan events propageren naar die parent,
     % detection logic kan propagatie tegenhouden. om omhoog in de boom te
     % propageren moet het event eerst bij de leaf aankomen, dus eerst delegatie
     % tot laagste leaf node die het event bevat.
+
     % speciaal geval: overlappende area's in dezelfde laag v/d boom. in dat
     % geval: area die later is toegevoegd (rechter sibling) wordt aangenomen
     % bovenop de sibling links ervan te liggen en krijgt dus eerst het event.
@@ -288,37 +376,16 @@ goal is to test the effectiveness of the design and detect its shortcomings.
     % die voor widgets een boomstructuur gebruikt -> voor elke widget die touch
     % events heeft een area aanmaken
 
-    Gestures are composed of primitive events using detection logic. If a
-    particular gesture should only occur within some area of the screen, it
-    should be composed of only events that occur within that area Events that
-    occur outside the area are not likely to be relevant to the . In other
-    words, the gesture detection logic is affected by the area in which the
-    gestures should be detected. Since the detection logic is part of the
-    architecture, the architecture must be able to restrict the set of events
-    to that are delegated to the particular piece of detection logic for the
-    gesture being detected in the area.
-
-    For example, a button tap\footnote{A ``tap'' gesture is triggered when a
-    touch object releases a touch surface within a certain time and distance
-    from the point where it initially touched the surface.} should only occur
-    on the button itself, and not in any other area of the screen. A solution
-    to this problem is the use of \emph{widgets}. The button from the example
-    can be represented as a rectangular widget with a position and size. The
-    position and size are compared with event coordinates to determine whether
-    an event should occur within the button.
-
-    \subsection*{Callbacks}
-    \label{sec:callbacks}
-
-        When an event is propagated by a widget, it is first used for event
-        analysis on that widget. The event analysis can then trigger a gesture
-        in the widget, which has to be handled by the application. To handle a
-        gesture, the widget should provide a callback mechanism: the
-        application binds a handler for a specific type of gesture to a widget.
-        When a gesture of that type is triggered after event analysis, the
-        widget triggers the callback.
-
-    \subsection*{Widget tree}
+    %For example, a button tap\footnote{A ``tap'' gesture is triggered when a
+    %touch object releases a touch surface within a certain time and distance
+    %from the point where it initially touched the surface.} should only occur
+    %on the button itself, and not in any other area of the screen. A solution
+    %to this problem is the use of \emph{widgets}. The button from the example
+    %can be represented as a rectangular widget with a position and size. The
+    %position and size are compared with event coordinates to determine whether
+    %an event should occur within the button.
+
+    \subsection*{Area tree}
 
         A problem occurs when widgets overlap. If a button in placed over a
         container and an event occurs occurs inside the button, should the
@@ -347,34 +414,25 @@ goal is to test the effectiveness of the design and detect its shortcomings.
         \texttt{GtkTouchWidget} that synchronises the position of a touch
         widget with that of a GTK widget, using GTK signals.
 
-    \subsection*{Position of widget tree in architecture}
-
-        \widgetdiagram{Extension of the diagram from figure
-        \ref{fig:driverdiagram}, showing the position of widgets in the
-        architecture.}
+    \section{Detecting gestures from events}
+    \label{sec:gesture-detection}
 
-    \section{Event analysis}
-    \label{sec:event-analysis}
+    The events that are grouped by areas must be translated to complex gestures
+    in some way. This analysis is specific to the type of gesture being
+    detected.  E.g. the detection of a ``tap'' gesture is very different from
+    detection of a ``rotate'' gesture. The architecture has adopted the
+    \emph{gesture tracker}-based design described by \cite{win7touch}, which
+    separates the detection of different gestures into different \emph{gesture
+    trackers}. This keeps the different pieces of gesture detection code
+    manageable and extendable. A single gesture tracker detects a specific set
+    of gesture types, given a set of primitive events. An example of a possible
+    gesture tracker implementation is a ``transformation tracker'' that detects
+    rotation, scaling and translation gestures.
 
-    % TODO: essentie moet zijn dat gesture trackers detection logic opdelen in
-    % behapbare stukken, en worden toegewezen aan een enkele area waardoor er
-    % meerdere trackers tegelijk kunnen draaien op verschillende delen v/h
-    % scherm. een formele definitie van gestures zou wellicht beter zijn, maar
+    % TODO: een formele definitie van gestures zou wellicht beter zijn, maar
     % wordt niet gegeven in deze thesis (wel besproken in future work)
 
-    The events that are delegated to widgets must be analyzed in some way to
-    gestures. This analysis is specific to the type of gesture being detected.
-    E.g. the detection of a ``tap'' gesture is very different from detection of
-    a ``rotate'' gesture. The implementation described in \cite{win7touch}
-    separates the detection of different gestures into different \emph{gesture
-    trackers}. This keeps the different pieces of detection code managable and
-    extandable. Therefore, the architecture also uses gesture trackers to
-    separate the analysis of events. A single gesture tracker detects a
-    specific set of gesture types, given a sequence of events. An example of a
-    possible gesture tracker implementation is a ``transformation tracker''
-    that detects rotation, scaling and translation gestures.
-
-    \subsection*{Assignment of a gesture tracker to a widget}
+    \subsection*{Assignment of a gesture tracker to an area}
 
         As explained in section \ref{sec:callbacks}, events are delegated from
         a widget to some event analysis. The analysis component of a widget
@@ -404,7 +462,8 @@ goal is to test the effectiveness of the design and detect its shortcomings.
     The button is located inside an application window, which can be resized
     using pinch gestures.
 
-    % TODO: comments weg, in pseudocode opschrijven
+    % TODO: comments weg, in pseudocode opschrijven, uitbreiden met draggable
+    % circle en illustrerende figuur
     \begin{verbatim}
     initialize GUI, creating a window
 
@@ -442,6 +501,13 @@ goal is to test the effectiveness of the design and detect its shortcomings.
 
 \chapter{Test applications}
 
+\section{Reference implementation in Python}
+\label{sec:implementation}
+
+% TODO
+% alleen window.contains op point down, niet move/up
+% een paar simpele windows en trackers
+
 To test multi-touch interaction properly, a multi-touch device is required. The
 University of Amsterdam (UvA) has provided access to a multi-touch table from
 PQlabs. The table uses the TUIO protocol \cite{TUIO} to communicate touch
@@ -449,6 +515,8 @@ events. See appendix \ref{app:tuio} for details regarding the TUIO protocol.
 The reference implementation is a Proof of Concept that translates TUIO
 messages to some simple touch gestures (see appendix \ref{app:implementation}
 for details).
+% omdat we alleen deze tafel hebben kunnen we het concept van de event driver
+% alleen met het TUIO protocol testen, en niet vergelijken met andere drivers
 
 % TODO
 % testprogramma's met PyGame/Cairo
@@ -461,6 +529,10 @@ for details).
 %   bijv. een state machine
 % - volgende stap: maken van een library die meerdere drivers en complexe
 %   gestures bevat
+% - "event filter" ipv "area"
+
+\section{A generic way for grouping events}
+\label{sec:eventfilter}
 
 \bibliographystyle{plain}
 \bibliography{report}{}
@@ -567,11 +639,4 @@ algorithms based on its test program.
 Also, the different detection algorithms are all implemented in the same file,
 making it complex to read or debug, and difficult to extend.
 
-\chapter{Reference implementation in Python}
-\label{app:implementation}
-
-% TODO
-% alleen window.contains op point down, niet move/up
-% een paar simpele windows en trackers
-
 \end{document}