فهرست منبع

Restructured section about event areas.

Taddeus Kroes 13 سال پیش
والد
کامیت
f39c98e40c
3فایلهای تغییر یافته به همراه173 افزوده شده و 153 حذف شده
  1. 19 20
      docs/data/diagrams.tex
  2. 7 0
      docs/report.bib
  3. 147 133
      docs/report.tex

+ 19 - 20
docs/data/diagrams.tex

@@ -121,21 +121,20 @@
         \architecture{
             \node[block, below of=driver] (eventdriver) {Event driver}
                 edge[linefrom] node[right, near end] {driver-specific messages} (driver);
-            \node[block, below of=eventdriver] (area) {Areas}
+            \node[block, below of=eventdriver] (area) {Event areas}
                 edge[linefrom] node[right] {events} (eventdriver);
             \node[block, right of=area, xshift=7em, dashed] (analysis) {Gesture detection}
                 edge[linefrom, bend right=10] node[above] {events} (area)
                 edge[lineto, bend left=10] node[] {gestures} (area);
             \node[block, below of=area] {Application}
-                edge[linefrom] node[right, near start] {gestures} (area);
+                edge[linefrom] node[right, near start] {gestures through callback function} (area);
 
             \group{eventdriver}{eventdriver}{analysis}{area}{Architecture}
         }
         \caption{Extension of the diagram from figure \ref{fig:driverdiagram},
-        showing the position of areas in the architecture. An area delegate
-        events to a gesture detection component that trigger gestures. The area
-        then calls the handler that is bound to the gesture type by the
-        application.}
+        with event areas. An event area delegates events to a gesture detection
+        component that triggers a gesture. The event area then calls the
+        handlers that are bound to the gesture type by the application.}
         \label{fig:areadiagram}
     \end{figure}
 }
@@ -210,7 +209,7 @@
 \def\righthand{\reflectbox{\includegraphics[width=50pt, angle=-45]{data/hand.png}}}
 
 \def\examplefigureone{
-    \begin{figure}[h]
+    \begin{figure}[h!]
         \center
         % TODO: draw finger touch points as circles with rotating arrow
         \begin{tikzpicture}
@@ -259,11 +258,11 @@
 \def\eventpropagationfigure{
     \begin{figure}[h!]
         \center
-        \vspace{-2em}
         \subfigure[An event is triggered in the white area. The event is first
         delegated to the white area from te gray area (2). After gesture
         detection, it is propagated back to the gray area (6) \emph{unless}
-        propagation has been stopped in the gesture tracker between (3) and (4).]{
+        propagation has been stopped in the rotation detection component
+        between (3) and (4).]{
             \begin{tikzpicture}[node distance=5.5em]
                 \draw node[draw=black, minimum width=190, minimum height=140] (screen) at (0,0) {};
                 \draw node[fill=gray!50, draw=black!70, minimum height=100, minimum width=100] (screen) at (-0.1,-0.1) {};
@@ -271,15 +270,15 @@
                 \fill (0.4, 0.6) circle (0.15);
 
                 \draw node[block, yshift=-10em, xshift=-3em] (driver) {Event driver};
-                \draw node[block, below of=driver] (gray) {Gray area}
+                \draw node[block, below of=driver] (gray) {Gray event area}
                     edge[linefrom] node[left] {1} (driver);
-                \draw node[block, below of=gray] (white) {White area}
+                \draw node[block, below of=gray] (white) {White event area}
                     edge[linefrom, bend left=15] node[left] {2} (gray)
                     edge[lineto, bend right=15] node[right] {6} (gray);
-                \draw node[block, right of=white, xshift=4em] {\emph{rotation} tracker}
+                \draw node[block, right of=white, xshift=4em] {rotation detection}
                     edge[linefrom, bend right=15] node[above] {3} (white)
                     edge[lineto, dotted, bend left=15] node[below] {4} (white);
-                \draw node[block, right of=gray, xshift=4em] {\emph{rotation} tracker}
+                \draw node[block, right of=gray, xshift=4em] {rotation detection}
                     edge[linefrom, bend right=15] node[above] {7} (gray)
                     edge[lineto, dotted, bend left=15] node[below] {8} (gray);
                 \draw node[block, below of=white] {Application}
@@ -288,8 +287,8 @@
             \end{tikzpicture}
         }
         \quad
-        \subfigure[An event is triggered in the gray area, it does not even
-        reach the white area.]{
+        \subfigure[An event is triggered in the gray event area, it does not even
+        reach the white event area.]{
             \begin{tikzpicture}[node distance=5.5em]
                 \draw node[draw=black, minimum width=190, minimum height=140] (screen) at (0,0) {};
                 \draw node[fill=gray!50, draw=black!70, minimum height=100, minimum width=100] (screen) at (-0.1,-0.1) {};
@@ -297,11 +296,11 @@
                 \fill (-0.5, -0.7) circle (0.15);
 
                 \draw node[block, yshift=-10em, xshift=-3em] (driver) {Event driver};
-                \draw node[block, below of=driver] (gray) {Gray area}
+                \draw node[block, below of=driver] (gray) {Gray event area}
                     edge[linefrom] node[left] {1} (driver);
-                \draw node[block, below of=gray] (white) {White area};
-                \draw node[block, right of=white, xshift=4em] {\emph{rotation} tracker};
-                \draw node[block, right of=gray, xshift=4em] {\emph{rotation} tracker}
+                \draw node[block, below of=gray] (white) {White event area};
+                \draw node[block, right of=white, xshift=4em] {rotation detection};
+                \draw node[block, right of=gray, xshift=4em] {rotation detection}
                     edge[linefrom, bend right=15] node[above] {2} (gray)
                     edge[lineto, dotted, bend left=15] node[below] {3} (gray);
                 \draw node[block, below of=white] {Application}
@@ -310,7 +309,7 @@
         }
         \caption{Two nested squares both listen to rotation gestures. The two
         figures both show a touch object triggering an event, which is
-        delegated through the area tree in the order indicated by the numbered
+        delegated through the event area tree in the order indicated by the numbered
         arrow labels. Normal arrows represent events, dotted arrows represent
         gestures. Note that the dotted arrows only represent the path a gesture
         would travel in the tree \emph{if triggered}, not an actual triggered

+ 7 - 0
docs/report.bib

@@ -178,3 +178,10 @@
 	year = "2007"
 }
 
+@misc{gtkeventpropagation,
+	author = "Reis, Christian",
+	note = "\url{faq.pygtk.org/index.py?file=faq03.011.htp\&req=show}",
+	title = "{How do signals and events propagate in GTK+?}",
+	year = "2002"
+}
+

+ 147 - 133
docs/report.tex

@@ -243,116 +243,168 @@ goal is to test the effectiveness of the design and detect its shortcomings.
     % TODO: in introduction: gestures zijn opgebouwd uit meerdere primitieven
     Touch input devices are unaware of the graphical input
     widgets\footnote{``Widget'' is a name commonly used to identify an element
-    of a graphical user interface (GUI).} rendered on screen and therefore
+    of a graphical user interface (GUI).} of an application, and therefore
     generate events that simply identify the screen location at which an event
-    takes place. In order to be able to direct a gesture to a particular widget
-    on screen, an application programmer must restrict a gesture to the area of
-    the screen covered by that widget. An important question is if the
-    architecture should offer a solution to this problem, or leave it to the
-    application developer.
-
-    The latter case generates a problem when a gesture must be able to occur at
-    different screen positions at the same time. Consider the example in figure
-    \ref{fig:ex1}, where two squares can be rotated independently at the same
-    time. If the developer is left the task to assign a gesture to one of the
-    squares, the event analysis component in figure \ref{fig:driverdiagram}
-    receives all events that occur on the screen.  Assuming that the rotation
-    detection logic detects a single rotation gesture based on all of its input
-    events, without detecting clusters of input events, only one rotation
-    gesture can be triggered at the same time.  When a user attempts to
-    ``grab'' one rectangle with each hand, the events triggered by all fingers
-    are combined to form a single rotation gesture instead of two separate
-    gestures.
+    takes place. User interfaces of applications that do not run in full screen
+    modus are contained in a window. Events which occur outside the application
+    window should not be handled by the program in most cases. What's more,
+    widget within the application window itself should be able to respond to
+    different gestures. E.g. a button widget may respond to a ``tap'' gesture
+    to be activated, whereas the application window responds to a ``pinch''
+    gesture to be resized. In order to be able to direct a gesture to a
+    particular widget in an application, a gesture must be restricted to the
+    area of the screen covered by that widget. An important question is if the
+    architecture should offer a solution to this problem, or leave the task of
+    assigning gestures to application widgets to the application developer.
+
+    If the architecture does not provide a solution, the ``Event analysis''
+    component in figure \ref{fig:multipledrivers} receives all events that
+    occur on the screen surface. The gesture detection logic thus uses all
+    events as input to detect a gesture. This leaves no possibility for a
+    gesture to occur at multiple screen positions at the same time, unless the
+    gesture detection logic incorporates event cluster detection. The problem
+    is illustrated in figure \ref{fig:ex1}, where two widgets on the screen can
+    be rotated independently. The rotation detection component that detects
+    rotation gestures receives all four fingers as input. If the two groups of
+    finger events are not separated by cluster detection, only one rotation
+    event will occur.
 
     \examplefigureone
 
-    To overcome this problem, groups of events must be clustered by the event
-    analysis component before any detection logic is executed. An obvious
-    solution for the given example is to incorporate this separation in the
-    rotation detection logic itself, using a distance threshold that decides if
-    an event should be added to an existing rotation gesture. Leaving the task
-    of separating groups of events to detection logic leads to duplication of
-    code. For instance, if the rotation gesture is replaced by a \emph{pinch}
-    gesture that enlarges a rectangle, the detection logic that detects the
-    pinch gesture would have to contain the same code that separates groups of
-    events for different gestures. Also, a pinch gesture can be performed using
-    fingers multiple hands as well, in which case the use of a simple distance
-    threshold is insufficient. These examples show that gesture detection logic
-    is hard to implement without knowledge about (the position of) the
-    widget that is receiving the gesture.
-
-    A better solution for the assignment of events to gesture detection is to
-    make the gesture detection component aware of the locations of application
-    widgets on the screen. To accomplish this, the architecture must contain a
-    representation of the screen area covered by a widget. This leads to the
-    concept of an \emph{area}, which represents an area on the touch surface in
-    which events should be grouped before being delegated to a form of gesture
-    detection.  Examples of simple area implementations are rectangles and
-    circles.  However, area's could also be made to represent more complex
-    shapes.
-
-    An area groups events and assigns them to gesture detection logic. This
-    possibly triggers a gesture, which must be handled by the client
-    application. A common way to handle events in an application is a
-    ``callback'' mechanism: the application developer binds a function to an
-    event, that is called when the event occurs. Because of the familiarity of
-    this concept with developers, the architecture uses a callback mechanism to
-    handle gestures in an application. Since an area controls the grouping of
-    events and thus the occurrence of gestures in an area, gesture handlers for
-    a specific gesture type are bound to an area. Figure \ref{fig:areadiagram}
-    shows the position of areas in the architecture.
+    A gesture detection component could perform a heuristic way of cluster
+    detection based on the distance between events. However, this method cannot
+    guarantee that a cluster of events corresponds with a particular
+    application widget. In short, gesture detection is difficult to implement
+    without awareness of the location of application widgets. Moreover, the
+    application developer still needs to direct gestures to a particular widget
+    manually.  This requires geometric calculations in the application logic,
+    which is a tedious and error-prone task for the developer.
+
+    A better solution is to group events that occur inside the area covered by
+    a widget, before passing them on to a gesture detection component.
+    Different gesture detection components can then detect gestures
+    simultaneously, based on different sets of input events. An area of the
+    screen surface will be represented by an \emph{event area}. An event area
+    filters input events based on their location, and then delegates events to
+    gesture detection components that are assigned to the event area. Events
+    which are located outside the event area are not delegated to its gesture
+    detection components.
+
+    In the example of figure \ref{fig:ex1}, the two rotatable widgets can be
+    represented by two event areas, each having a different rotation detection
+    component.
+
+    \subsection*{Callback mechanism}
+
+    When a gesture is detected by a gesture detection component, it must be
+    handled by the client application. A common way to handle events in an
+    application is a ``callback'' mechanism: the application developer binds a
+    function to an event, that is called when the event occurs. Because of the
+    familiarity of this concept with developers, the architecture uses a
+    callback mechanism to handle gestures in an application. Since an area
+    controls the grouping of events and thus the occurrence of gestures in an
+    area, gesture handlers for a specific gesture type are bound to an area.
+    Figure \ref{fig:areadiagram} shows the position of areas in the
+    architecture.
 
     \areadiagram
 
-    An area can be seen as an independent subset of a touch surface. Therefore,
-    the parameters (coordinates) of events and gestures within an area should
-    be relative to the area.
-
-    Note that the boundaries of an area are only used to group events, not
-    gestures. A gesture could occur outside the area that contains its
-    originating events, as illustrated by the example in figure \ref{fig:ex2}.
-
-    \examplefiguretwo
-
-    A remark must be made about the use of areas to assign events the detection
-    of some gesture. The concept of an ``area'' is based on the assumption that
-    the set or originating events that form a particular gesture, can be
-    determined based exclusively on the location of the events. This is a
-    reasonable assumption for simple touch objects whose only parameter is a
-    position, such as a pen or a human finger. However, more complex touch
-    objects can have additional parameters, such as rotational orientation or
-    color. An even more generic concept is the \emph{event filter}, which
-    detects whether an event should be assigned to a particular piece of
-    gesture detection based on all available parameters. This level of
+    %Note that the boundaries of an area are only used to group events, not
+    %gestures. A gesture could occur outside the area that contains its
+    %originating events, as illustrated by the example in figure \ref{fig:ex2}.
+
+    %\examplefiguretwo
+
+    A remark must be made about the use of event areas to assign events to the
+    detection of some gesture. The concept of an event area is based on the
+    assumption that the set or originating events that form a particular
+    gesture, can be determined based exclusively on the location of the events.
+    This is a reasonable assumption for simple touch objects whose only
+    parameter is a position, such as a pen or a human finger. However, more
+    complex touch objects can have additional parameters, such as rotational
+    orientation or color. An even more generic concept is the \emph{event
+    filter}, which detects whether an event should be assigned to a particular
+    gesture detection component based on all available parameters. This level of
     abstraction allows for constraints like ``Use all blue objects within a
-    widget for rotation, and green objects for tapping.''. As mentioned in the
+    widget for rotation, and green objects for dragging.''. As mentioned in the
     introduction chapter [\ref{chapter:introduction}], the scope of this thesis
-    is limited to multi-touch surface based devices, for which the \emph{area}
-    concept suffices. Section \ref{sec:eventfilter} explores the possibility of
-    areas to be replaced with event filters.
+    is limited to multi-touch surface based devices, for which the \emph{event
+    area} concept suffices. Section \ref{sec:eventfilter} explores the
+    possibility of event areas to be replaced with event filters.
 
     \subsection{Area tree}
     \label{sec:tree}
 
-    The most simple implementation of areas in the architecture is a list of
-    areas. When the event driver delegates an event, it is delegated to gesture
-    detection by each area that contains the event coordinates.
+    The most simple usage of event areas in the architecture would be a list of
+    event areas. When the event driver delegates an event, it is accepted by
+    each event area that contains the event coordinates.
 
     If the architecture were to be used in combination with an application
-    framework like GTK \cite{GTK}, each GTK widget that must receive gestures
-    should have a mirroring area that synchronizes its position with that of
-    the widget.  Consider a panel with five buttons that all listen to a
-    ``tap'' event. If the panel is moved as a result of movement of the
-    application window, the position of each button has to be updated.
-
-    This process is simplified by the arrangement of areas in a tree structure.
-    A root area represents the panel, containing five subareas which are
-    positioned relative to the root area. The relative positions do not need to
-    be updated when the panel area changes its position. GUI frameworks, like
-    GTK, use this kind of tree structure to manage widgets. A recommended first
-    step when developing an application is to create some subclass of the area
-    that synchronizes with the position of a widget from the GUI framework
-    automatically.
+    framework like GTK \cite{GTK}, each GTK widget that responds to gestures
+    should have a mirroring event area that synchronizes its location with that
+    of the widget. Consider a panel with five buttons that all listen to a
+    ``tap'' event. If the location of the panel changes as a result of movement
+    of the application window, the positions of all buttons have to be updated
+    too.
+
+    This process is simplified by the arrangement of event areas in a tree
+    structure.  A root event area represents the panel, containing five other
+    event areas which are positioned relative to the root area. The relative
+    positions do not need to be updated when the panel area changes its
+    position. GUI frameworks, like GTK, use this kind of tree structure to
+    manage graphical widgets.
+
+    If the GUI toolkit provides an API for requesting the position and size of
+    a widget, a recommended first step when developing an application is to
+    create some subclass of the area that automatically synchronizes with the
+    position of a widget from the GUI framework.
+
+    \subsection{Event propagation}
+    \label{sec:eventpropagation}
+
+    A problem occurs when event areas overlap, as shown by figure
+    \ref{fig:eventpropagation}. When the white square is rotated, the gray
+    square should keep its current orientation. This means that events that are
+    used for rotation of the white square, should not be used for rotation of
+    the gray square. The use of event areas alone does not provide a solution
+    here, since both the gray and the white event area accept an event that
+    occurs within the white square.
+
+    The problem described above is a common problem in GUI applications, and
+    there is a common solution (used by GTK \cite{gtkeventpropagation}, among
+    others). An event is passed to an ``event handler''. If the handler returns
+    \texttt{true}, the event is considered ``handled'' and is not
+    ``propagated'' to other widgets.
+
+    Applied to the example of the rotating squares, the rotation detection
+    component of the white square should stop the propagation of events to the
+    event area of the gray square. This is illustrated in figure
+    \ref{fig:eventpropagation}.
+
+    In the example, rotation of the white square has priority over rotation of
+    the gray square because the white area is the widget actually being touched
+    at the screen surface. In general, events should be delegated to event
+    areas according to the order in which the event areas are positioned over
+    each other. The tree structure in which event areas are arranged, is an
+    ideal tool to determine the order in which an event is delegated. Event
+    areas in deeper layers of the tree are positioned on top of their parent.
+    An object touching the screen is essentially touching the deepest event
+    area in the tree that contains the triggered event. That event area should
+    be the first to delegate the event to its gesture detection components, and
+    then propagate the event up in the tree to its ancestors. A gesture
+    detection component can stop the propagation of the event.
+
+    An additional type of event propagation is ``immediate propagation'', which
+    indicates propagation of an event from one gesture detection component to
+    another. This is applicable when an event area uses more than one gesture
+    detection component. One of the components can stop the immediate
+    propagation of an event, so that the event is not passed to the next
+    gesture detection component, nor to the ancestors of the event area.
+    When regular propagation is stopped, the event is propagated to other
+    gesture detection components first, before actually being stopped.
+
+    \eventpropagationfigure
+    \newpage
 
     \section{Detecting gestures from events}
     \label{sec:gesture-detection}
@@ -410,39 +462,6 @@ goal is to test the effectiveness of the design and detect its shortcomings.
     ``transformation tracker'' that detects rotation, scaling and translation
     gestures.
 
-    \section{Reserving an event for a gesture}
-    \label{sec:reserve-event}
-
-    A problem occurs when areas overlap, as shown by figure
-    \ref{fig:eventpropagation}. When the white square is rotated, the gray
-    square should keep its current orientation. This means that events that are
-    used for rotation of the white square, should not be used for rotation of
-    the gray square. To achieve this, there must be some communication between
-    the gesture trackers of the two squares. When an event in the white square
-    is used for rotation, that event should not be used for rotation in the
-    gray square. In other words, the event must be \emph{reserved} for the
-    rotation gesture in the white square. In order to reserve an event, the
-    event needs to be handled by the rotation tracker of the white before the
-    rotation tracker of the grey square receives it. Otherwise, the gray square
-    has already triggered a rotation gesture and it will be too late to reserve
-    the event for rotation of the white square.
-
-    When an object touches the touch surface, the event that is triggered
-    should be delegated according to the order in which its corresponding areas
-    are positioned over each other. The tree structure in which areas are
-    arranged (see section \ref{sec:tree}), is an ideal tool to determine the
-    order in which an event is delegated to different areas.  Areas in the tree
-    are positioned on top of their parent. An object touching the screen is
-    essentially touching the deepest area in the tree that contains the
-    triggered event. That area should be the first to delegate the event to its
-    gesture trackers, and then move the event up in the tree to its ancestors.
-    The movement of an event up in the area tree will be called \emph{event
-    propagation}. To reserve an event for a particular gesture, a gesture
-    tracker can stop its propagation. When propagation of an event is stopped,
-    it will not be passed on the ancestor areas, thus reserving the event.
-    The diagram in appendix \ref{app:eventpropagation} illustrates the use of
-    event propagation, applied to the example of the white and gray squares.
-
     \section{Serving multiple applications}
     \label{sec:daemon}
 
@@ -787,11 +806,6 @@ client application, as stated by the online specification
     values back to the actual screen dimension.
 \end{quote}
 
-\chapter{Diagram demonstrating event propagation}
-\label{app:eventpropagation}
-
-\eventpropagationfigure
-
 \chapter{Gesture detection in the reference implementation}
 \label{app:implementation-details}