Commit 9c605e96 authored by Taddeüs Kroes's avatar Taddeüs Kroes

Worked on improving report.

parent 346cf020
...@@ -50,8 +50,7 @@ ...@@ -50,8 +50,7 @@
} }
\newcommand{\basicdiagram}[1]{ \newcommand{\basicdiagram}[1]{
\begin{figure}[H] \begin{figure}[h]
\label{fig:basicdiagram}
\center \center
\architecture{ \architecture{
\node[block, dashed, below of=driver] (arch) {Architecture} \node[block, dashed, below of=driver] (arch) {Architecture}
...@@ -60,12 +59,12 @@ ...@@ -60,12 +59,12 @@
edge[linefrom] node[right] {gestures} (arch); edge[linefrom] node[right] {gestures} (arch);
} }
\caption{#1} \caption{#1}
\label{fig:basicdiagram}
\end{figure} \end{figure}
} }
\newcommand{\driverdiagram}[1]{ \newcommand{\driverdiagram}[1]{
\begin{figure}[H] \begin{figure}[h]
\label{fig:driverdiagram}
\center \center
\architecture{ \architecture{
\node[block, below of=driver] (eventdriver) {Event driver} \node[block, below of=driver] (eventdriver) {Event driver}
...@@ -79,77 +78,139 @@ ...@@ -79,77 +78,139 @@
\group{eventdriver}{eventdriver}{dummy}{analysis}{Architecture} \group{eventdriver}{eventdriver}{dummy}{analysis}{Architecture}
} }
\caption{#1} \caption{#1}
\label{fig:driverdiagram}
\end{figure} \end{figure}
} }
\newcommand{\widgetdiagram}[1]{ \newcommand{\areadiagram}[1]{
\begin{figure}[H] \begin{figure}[h]
\label{fig:widgetdiagram}
\center \center
\architecture{ \architecture{
\node[block, below of=driver] (eventdriver) {Event driver} \node[block, below of=driver] (eventdriver) {Event driver}
edge[linefrom] node[right, near end] {driver-specific messages} (driver); edge[linefrom] node[right, near end] {driver-specific messages} (driver);
\node[block, below of=eventdriver] (widget) {Widget tree} \node[block, below of=eventdriver] (area) {Areas}
edge[linefrom] node[right] {events} (eventdriver); edge[linefrom] node[right] {events} (eventdriver);
\node[block, right of=widget, xshift=7em, dashed] (analysis) {Event analysis} \node[block, right of=area, xshift=7em, dashed] (analysis) {Gesture detection}
edge[linefrom, bend right=10] node[above] {events} (widget) edge[linefrom, bend right=10] node[above] {events} (area)
edge[lineto, bend left=10] node[] {gestures} (widget); edge[lineto, bend left=10] node[] {gestures} (area);
\node[block, below of=widget] {Application} \node[block, below of=area] {Application}
edge[linefrom] node[right, near start] {gestures} (widget); edge[linefrom] node[right, near start] {gestures} (area);
\group{eventdriver}{eventdriver}{analysis}{widget}{Architecture} \group{eventdriver}{eventdriver}{analysis}{area}{Architecture}
} }
\caption{#1} \caption{#1}
\label{fig:areadiagram}
\end{figure} \end{figure}
} }
\newcommand{\trackerdiagram}[1]{ \newcommand{\trackerdiagram}[1]{
\begin{figure}[H] \begin{figure}[h]
\label{fig:trackerdiagram}
\center \center
\architecture{ \architecture{
\node[block, below of=driver] (eventdriver) {Event driver} \node[block, below of=driver] (eventdriver) {Event driver}
edge[linefrom] node[right, near end] {driver-specific messages} (driver); edge[linefrom] node[right, near end] {driver-specific messages} (driver);
\node[block, below of=eventdriver] (widget) {Widget tree} \node[block, below of=eventdriver] (area) {Area tree}
edge[linefrom] node[right] {events} (eventdriver); edge[linefrom] node[right] {events} (eventdriver);
\node[block, right of=widget, xshift=7em] (tracker) {Gesture trackers} \node[block, right of=area, xshift=7em] (tracker) {Gesture trackers}
edge[linefrom, bend right=10] node[above] {events} (widget) edge[linefrom, bend right=10] node[above] {events} (area)
edge[lineto, bend left=10] node[] {gestures} (widget); edge[lineto, bend left=10] node[] {gestures} (area);
\node[block, below of=widget] {Application} \node[block, below of=area] {Application}
edge[linefrom] node[right, near start] {gestures} (widget); edge[linefrom] node[right, near start] {gestures} (area);
\group{eventdriver}{eventdriver}{tracker}{widget}{Architecture} \group{eventdriver}{eventdriver}{tracker}{area}{Architecture}
} }
\caption{#1} \caption{#1}
\label{fig:trackerdiagram}
\end{figure} \end{figure}
} }
\newcommand{\examplediagram}[1]{ \newcommand{\examplediagram}[1]{
\begin{figure}[H] \begin{figure}[h]
\center \center
\architecture{ \architecture{
\node[block, below of=driver] (eventdriver) {Event driver} \node[block, below of=driver] (eventdriver) {Event driver}
edge[linefrom] node[right, near end] {driver-specific messages} (driver); edge[linefrom] node[right, near end] {driver-specific messages} (driver);
\node[block, below of=eventdriver] (rootwidget) {Root widget} \node[block, below of=eventdriver] (rootarea) {Root area}
edge[linefrom] (eventdriver); edge[linefrom] (eventdriver);
\node[block, below of=rootwidget] (subwidget) {Button widget} \node[block, below of=rootarea] (subarea) {Button area}
edge[linefrom] (rootwidget) edge[linefrom] (rootarea)
edge[lineto, bend right=45] node[right=3] {event propagation} (rootwidget); edge[lineto, bend right=45] node[right=3] {event propagation} (rootarea);
\node[block, right of=rootwidget, xshift=5em] {\emph{pinch} tracker} \node[block, right of=rootarea, xshift=5em] {\emph{pinch} tracker}
edge[lineto, dotted, bend left=10] (rootwidget) edge[lineto, dotted, bend left=10] (rootarea)
edge[linefrom, bend right=10] (rootwidget); edge[linefrom, bend right=10] (rootarea);
\node[block, right of=subwidget, xshift=5em] (tracker) {\emph{tap} tracker} \node[block, right of=subarea, xshift=5em] (tracker) {\emph{tap} tracker}
edge[lineto, dotted, bend left=10] (subwidget) edge[lineto, dotted, bend left=10] (subarea)
edge[linefrom, bend right=10] (subwidget); edge[linefrom, bend right=10] (subarea);
\node[block, below of=subwidget, yshift=-.5em] {Application} \node[block, below of=subarea, yshift=-.5em] {Application}
edge[linefrom, dotted, bend left=60] (rootwidget) edge[linefrom, dotted, bend left=60] (rootarea)
edge[linefrom, dotted] (subwidget); edge[linefrom, dotted] (subarea);
\group{subwidget}{eventdriver}{tracker}{subwidget}{Architecture} \group{subarea}{eventdriver}{tracker}{subarea}{Architecture}
} }
\caption{#1} \caption{#1}
\end{figure} \end{figure}
} }
\newcommand{\examplefigureone}{
\begin{figure}[h]
\center
% TODO: draw finger touch points as circles with rotating arrow
\begin{tikzpicture}
\draw node[draw, black, minimum width=190, minimum height=140] at (0,0) {};
\draw node[fill=gray!50, draw=black!70, minimum height=40, minimum width=40] at (-1,-1) {};
\draw node[draw=black!80, diamond, minimum height=50, minimum width=50] at (1.2,1) {};
\end{tikzpicture}
\caption{Two squares on the screen both listen to rotation. The user
should be able to ``grab'' each of the squares independently and rotate
them at the same time.}
\label{fig:ex1}
\end{figure}
}
\newcommand{\examplefiguretwo}{
\begin{figure}[h]
\center
\begin{tikzpicture}
\draw node[draw, black, minimum width=190, minimum height=140] at (0,0) {};
\draw node[draw=black!80, diamond, minimum height=50, minimum width=50] at (0.5, 0.3) {};
\draw node[draw=black, diamond, dotted, minimum height=53, minimum width=53] at (0.5, 0.3) {};
\draw node[draw=black, dotted, circle, minimum height=80, minimum width=80] at (0.5, 0.3) {};
\fill (-0.3, -0.4) circle (0.15)
(-0.4, 0.8) circle (0.15)
(-0.1, 1.1) circle (0.15)
(1.3, 0.9) circle (0.15);
\draw (0.15, 0.55) circle (0.15) -- (-0.3, -0.4);
\draw (0.15, 0.55) -- (-0.4, 0.8);
\draw (0.15, 0.55) -- (-0.1, 1.1);
\draw (0.15, 0.55) -- (1.3, 0.9);
\end{tikzpicture}
\caption{A square on the screen listens to rotation. The user can grab
and rotate the square by positioning fingers around (but not in) it and
and performing a rotating motion. An example pose of four fingers is
shown by the filled black circles. While the events all occur in the
dotted \emph{area}, the centroid of the rotation gesture is located in
the square.}
\label{fig:ex2}
\end{figure}
}
\newcommand{\examplefigurethree}{
\begin{figure}[h]
\center
\begin{tikzpicture}
\draw node[draw=black, minimum width=190, minimum height=140] (screen) at (0,0) {};
\draw node[fill=gray!50, draw=black!70, minimum height=100, minimum width=100] (screen) at (-0.1,-0.1) {};
\draw node[fill=white, draw=black!80, diamond, minimum height=50, minimum width=50] (screen) at (0.3,0.4) {};
\end{tikzpicture}
\caption{Two overlapping squares that listen to rotation. When the
white square is rotated, the gray square should keep its current
orientation and vice-versa.}
\label{fig:ex3}
\end{figure}
}
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
\usepackage[english]{babel} \usepackage[english]{babel}
\usepackage[utf8]{inputenc} \usepackage[utf8]{inputenc}
\usepackage{hyperref,graphicx,float,tikz} \usepackage{hyperref,graphicx,tikz,subfigure}
% Link colors % Link colors
\hypersetup{colorlinks=true,linkcolor=black,urlcolor=blue,citecolor=DarkGreen} \hypersetup{colorlinks=true,linkcolor=black,urlcolor=blue,citecolor=DarkGreen}
...@@ -29,19 +29,19 @@ ...@@ -29,19 +29,19 @@
\tableofcontents \tableofcontents
\chapter{Introduction} \chapter{Introduction}
\label{chapter:introduction}
Surface-touch devices have evolved from pen-based tablets to single-touch Surface-touch devices have evolved from pen-based tablets to single-touch
trackpads, to multi-touch devices like smartphones and tablets. Multi-touch trackpads, to multi-touch devices like smartphones and tablets. Multi-touch
devices enable a user to interact with software using hand gestures, making the devices enable a user to interact with software using hand gestures, making the
interaction more expressive and intuitive. These gestures are more complex than interaction more expressive and intuitive. These gestures are more complex than
primitive ``click'' or ``tap'' events that are used by single-touch devices. primitive ``click'' or ``tap'' events that are used by single-touch devices.
Some examples of more complex gestures are so-called ``pinch''\footnote{A Some examples of more complex gestures are ``pinch''\footnote{A ``pinch''
``pinch'' gesture is formed by performing a pinching movement with multiple gesture is formed by performing a pinching movement with multiple fingers on a
fingers on a multi-touch surface. Pinch gestures are often used to zoom in or multi-touch surface. Pinch gestures are often used to zoom in or out on an
out on an object.} and ``flick''\footnote{A ``flick'' gesture is the act of object.} and ``flick''\footnote{A ``flick'' gesture is the act of grabbing an
grabbing an object and throwing it in a direction on a touch surface, giving object and throwing it in a direction on a touch surface, giving it momentum to
it momentum to move for some time after the hand releases the surface.} move for some time after the hand releases the surface.} gestures.
gestures.
The complexity of gestures is not limited to navigation in smartphones. Some The complexity of gestures is not limited to navigation in smartphones. Some
multi-touch devices are already capable of recognizing objects touching the multi-touch devices are already capable of recognizing objects touching the
...@@ -60,32 +60,34 @@ gestures in an application. ...@@ -60,32 +60,34 @@ gestures in an application.
The main question in this research project is whether a generic architecture The main question in this research project is whether a generic architecture
for the detection of complex interaction gestures can be designed, with the for the detection of complex interaction gestures can be designed, with the
capability of managing the complexity of gesture detection logic. capability of managing the complexity of gesture detection logic. The ultimate
goal would be to create an implementation of this architecture that can be
extended to support a wide range of complex gestures. With the existence of
such an implementation, application developers do not need to reinvent gesture
detection for every new gesture-based application.
Application frameworks for surface-touch devices, such as Nokia's Qt \cite{qt}, Application frameworks for surface-touch devices, such as Nokia's Qt \cite{qt},
include the detection of commonly used gestures like \emph{pinch} gestures. do already include the detection of commonly used gestures like \emph{pinch}
However, this detection logic is dependent on the application framework. gestures. However, this detection logic is dependent on the application
Consequently, an application developer who wants to use multi-touch interaction framework. Consequently, an application developer who wants to use multi-touch
in an application is forced to choose an application framework that includes interaction in an application is forced to use an application framework that
support for multi-touch gestures. Therefore, a requirement of the generic includes support for multi-touch gestures. Moreover, the set of supported
architecture is that it must not be bound to a specific application framework. gestures is limited by the application framework of choice. To incorporate a
Moreover, the set of supported gestures is limited by the application framework custom event in an application, the application developer needs to extend the
of choice. To incorporate a custom event in an application, the application framework. This requires extensive knowledge of the framework's architecture.
developer needs to extend the framework. This requires extensive knowledge of Also, if the same gesture is needed in another application that is based on
the framework's architecture. Also, if the same gesture is used in another another framework, the detection logic has to be translated for use in that
application that is based on another framework, the detection logic has to be framework. Nevertheless, application frameworks are a necessity when it comes
translated for use in that framework. Nevertheless, application frameworks are to fast, cross-platform development. A generic architecture design should aim
a necessity when it comes to fast, cross-platform development. Therefore, the to be compatible with existing frameworks, and provide a way to detect and
architecture design should aim to be compatible with existing frameworks, but extend gestures independent of the framework.
provide a way to detect and extend gestures independent of the framework.
Application frameworks are written in a specific programming language. To
An application framework is written in a specific programming language. A support multiple frameworks and programming languages, the architecture should
generic architecture should not limited to a single programming language. The be accessible for applications using a language-independent method of
ultimate goal of this thesis is to provide support for complex gesture communication. This intention leads towards the concept of a dedicated gesture
interaction in any application. Thus, applications should be able to address detection application that serves gestures to multiple applications at the same
the architecture using a language-independent method of communication. This time.
intention leads towards the concept of a dedicated gesture detection
application that serves gestures to multiple programs at the same time.
The scope of this thesis is limited to the detection of gestures on multi-touch The scope of this thesis is limited to the detection of gestures on multi-touch
surface devices. It presents a design for a generic gesture detection surface devices. It presents a design for a generic gesture detection
...@@ -93,10 +95,6 @@ architecture for use in multi-touch based applications. A reference ...@@ -93,10 +95,6 @@ architecture for use in multi-touch based applications. A reference
implementation of this design is used in some test case applications, whose implementation of this design is used in some test case applications, whose
goal is to test the effectiveness of the design and detect its shortcomings. goal is to test the effectiveness of the design and detect its shortcomings.
% FIXME: Moet deze nog in de introductie?
% How can the input of the architecture be normalized? This is needed, because
% multi-touch drivers use their own specific message format.
\section{Structure of this document} \section{Structure of this document}
% TODO: pas als thesis af is % TODO: pas als thesis af is
...@@ -137,9 +135,7 @@ goal is to test the effectiveness of the design and detect its shortcomings. ...@@ -137,9 +135,7 @@ goal is to test the effectiveness of the design and detect its shortcomings.
An important observation in this application is that different gestures are An important observation in this application is that different gestures are
detected by different gesture trackers, thus separating gesture detection detected by different gesture trackers, thus separating gesture detection
code into maintainable parts. The architecture has adopted this design code into maintainable parts.
feature by also using different gesture trackers to track different gesture
types.
% TODO: This is not really 'related', move it to somewhere else % TODO: This is not really 'related', move it to somewhere else
\section{Processing implementation of simple gestures in Android} \section{Processing implementation of simple gestures in Android}
...@@ -213,13 +209,13 @@ goal is to test the effectiveness of the design and detect its shortcomings. ...@@ -213,13 +209,13 @@ goal is to test the effectiveness of the design and detect its shortcomings.
After all, the gesture detection logic in a ``generic'' architecture should After all, the gesture detection logic in a ``generic'' architecture should
not be implemented based on driver-specific messages. The event types in not be implemented based on driver-specific messages. The event types in
this format should be chosen so that multiple drivers can trigger the same this format should be chosen so that multiple drivers can trigger the same
events. If each supported driver adds its own set of event types to the events. If each supported driver would add its own set of event types to
common format, it the purpose of being ``common'' would be defeated. the common format, it the purpose of being ``common'' would be defeated.
A reasonable expectation for a touch device driver is that it detects A minimal expectation for a touch device driver is that it detects simple
simple touch points, with a ``point'' being an object at an $(x, y)$ touch points, with a ``point'' being an object at an $(x, y)$ position on
position on the touch surface. This yields a basic set of events: the touch surface. This yields a basic set of events: $\{point\_down,
$\{point\_down, point\_move, point\_up\}$. point\_move, point\_up\}$.
The TUIO protocol supports fiducials\footnote{A fiducial is a pattern used The TUIO protocol supports fiducials\footnote{A fiducial is a pattern used
by some touch devices to identify objects.}, which also have a rotational by some touch devices to identify objects.}, which also have a rotational
...@@ -236,9 +232,10 @@ goal is to test the effectiveness of the design and detect its shortcomings. ...@@ -236,9 +232,10 @@ goal is to test the effectiveness of the design and detect its shortcomings.
components in the architecture for translation to gestures. This components in the architecture for translation to gestures. This
communication flow is illustrated in figure \ref{fig:driverdiagram}. communication flow is illustrated in figure \ref{fig:driverdiagram}.
A touch device driver can be supported by adding an event driver Support for a touch device driver can be added by adding an event driver
implementation for it. The event driver implementation that is used in an implementation. The choice of event driver implementation that is used in an
application is dependent of the support of the touch device. application is dependent on the driver support of the touch device being
used.
\driverdiagram{Extension of the diagram from figure \ref{fig:basicdiagram}, \driverdiagram{Extension of the diagram from figure \ref{fig:basicdiagram},
showing the position of the event driver in the architecture. The event showing the position of the event driver in the architecture. The event
...@@ -246,7 +243,8 @@ goal is to test the effectiveness of the design and detect its shortcomings. ...@@ -246,7 +243,8 @@ goal is to test the effectiveness of the design and detect its shortcomings.
delegated to analysis components that will interpret them as more complex delegated to analysis components that will interpret them as more complex
gestures.} gestures.}
\section{Restricting gestures to a screen area} \section{Restricting events to a screen area}
\label{sec:restricting-gestures}
% TODO: in introduction: gestures zijn opgebouwd uit meerdere primitieven % TODO: in introduction: gestures zijn opgebouwd uit meerdere primitieven
Touch input devices are unaware of the graphical input widgets rendered on Touch input devices are unaware of the graphical input widgets rendered on
...@@ -255,29 +253,119 @@ goal is to test the effectiveness of the design and detect its shortcomings. ...@@ -255,29 +253,119 @@ goal is to test the effectiveness of the design and detect its shortcomings.
gesture to a particular widget on screen, an application programmer must gesture to a particular widget on screen, an application programmer must
restrict the occurrence of a gesture to the area of the screen covered by restrict the occurrence of a gesture to the area of the screen covered by
that widget. An important question is if the architecture should offer a that widget. An important question is if the architecture should offer a
solution to this problem, or leave it to the programmer to assign gestures solution to this problem, or leave it to the application developer to
to a widget. assign gestures to a widget.
% TODO: eerst: aan developer overlaten, verwijzen naar vorige diagram dan: The latter case generates a problem when a gesture must be able to occur at
% consider the following example: ... twee vierkantjes die allebei naar different screen positions at the same time. Consider the example in figure
% rotatie luisteren (figuur ter illustratie): als je ze tegelijk roteert \ref{fig:ex1}, where two squares must be able to be rotated independently
% treedt er maar één globaal event op. Dus: niet gestures beperken tot een at the same time. If the developer is left the task to assign a gesture to
% area, maar events. dan kun je op elk vierkant een aparte detection logic one of the squares, the event analysis component in figure
% zetten met als input de events op die locatie oftewel: je kan het niet \ref{fig:driverdiagram} receives all events that occur on the screen.
% aan de developer overlaten omdat de input van de detection logic moet Assuming that the rotation detection logic detects a single rotation
% veranderen (heeft developer geen invloed op) dus conclusie: Je moet gesture based on all of its input events, without detecting clusters of
% events kunnen beperken tot een "area" van het scherm. op dit moment kan input events, only one rotation gesture can be triggered at the same time.
% de diagram dus al worden uitgebreid When a user attempts to ``grab'' one rectangle with each hand, the events
triggered by all fingers are combined to form a single rotation gesture
% dan: simpelste aanpak is een lijst van area's, als event erin past dan instead of two separate gestures.
\examplefigureone
To overcome this problem, groups of events must be separated by the event
analysis component before any detection logic is executed. An obvious
solution for the given example is to incorporate this separation in the
rotation detection logic itself, using a distance threshold that decides if
an event should be added to an existing rotation gesture. Leaving the task
of separating groups of events to detection logic leads to duplication of
code. For instance, if the rotation gesture is replaced by a \emph{pinch}
gesture that enlarges a rectangle, the detection logic that detects the
pinch gesture would have to contain the same code that separates groups of
events for different gestures. Also, a pinch gesture can be performed using
fingers multiple hands as well, in which case the use of a simple distance
threshold is insufficient. These examples show that gesture detection logic
is hard to implement without knowledge about (the position of) the
widget\footnote{``Widget'' is a name commonly used to identify an element
of a graphical user interface (GUI).} that is receiving the gesture.
Therefore, a better solution for the assignment of events to gesture
detection is to make the gesture detection component aware of the locations
of application widgets on the screen. To accomplish this, the architecture
must contain a representation of the screen area covered by a widget. This
leads to the concept of an \emph{area}, which represents an area on the
touch surface in which events should be grouped before being delegated to a
form of gesture detection. Examples of simple area implementations are
rectangles and circles. However, area's could be made to represent more
complex shapes.
An area groups events and assigns them to some piece of gesture detection
logic. This possibly triggers a gesture, which must be handled by the
client application. A common way to handle framework events in an
application is a ``callback'' mechanism: the application developer binds a
function to an event, that is called by the framework when the event
occurs. Because of the familiarity of this concept with developers, the
architecture uses a callback mechanism to handle gestures in an
application. Since an area controls the grouping of events and thus the
occurrence of gestures in an area, gesture handlers for a specific gesture
type are bound to an area. Figure \ref{fig:areadiagram} shows the position
of areas in the architecture.
\areadiagram{Extension of the diagram from figure \ref{fig:driverdiagram},
showing the position of areas in the architecture. An area delegate events
to a gesture detection component that trigger gestures. The area then calls
the handler that is bound to the gesture type by the application.}
Note that the boundaries of an area are only used to group events, not
gestures. A gesture could occur outside the area that contains its
originating events, as illustrated by the example in figure \ref{fig:ex2}.
\examplefiguretwo
A remark must be made about the use of areas to assign events the detection
of some gesture. The concept of an ``area'' is based on the assumption that
the set or originating events that form a particular gesture, can be
determined based exclusively on the location of the events. This is a
reasonable assumption for simple touch objects whose only parameter is a
position, such as a pen or a human finger. However, more complex touch
objects can have additional parameters, such as rotational orientation or
color. An even more generic concept is the \emph{event filter}, which
detects whether an event should be assigned to a particular piece of
gesture detection based on all available parameters. This level of
abstraction allows for constraints like ``Use all blue objects within a
widget for rotation, and green objects for tapping.''. As mentioned in the
introduction chapter [\ref{chapter:introduction}], the scope of this thesis
is limited to multi-touch surface based devices, for which the \emph{area}
concept suffices. Section \ref{sec:eventfilter} explores the possibility of
areas to be replaced with event filters.
\subsection*{Reserving an event for a gesture}
The most simple implementation of areas in the architecture is a list of
areas. When the event driver delegates an event, it is delegated to gesture
detection by each area that contains the event coordinates. A problem
occurs when areas overlap, as shown by figure \ref{fig:ex3}. When the
white rectangle is rotated, the gray square should keep its current
orientation. This means that events that are used for rotation of the white
square, should not be used for rotation of the gray square. To achieve
this, there must be some communication between the rotation detection
components of the two squares.
\examplefigurethree
a
--------
% simpelste aanpak is een lijst van area's, als event erin past dan
% delegeren. probleem (aangeven met voorbeeld van geneste widgets die % delegeren. probleem (aangeven met voorbeeld van geneste widgets die
% allebei naar tap luisteren): als area's overlappen wil je bepaalde events % allebei naar tap luisteren): als area's overlappen wil je bepaalde events
% reserveren voor bepaalde stukjes detection logic % reserveren voor bepaalde stukjes detection logic
% oplossing: area'a opslaan in boomstructuur en event propagatie gebruiken % oplossing: area'a opslaan in boomstructuur en event propagatie gebruiken
% -> area binnenin een parent area kan events propageren naar die parent, % -> area binnenin een parent area kan events propageren naar die parent,
% detection logic kan propagatie tegenhouden. om omhoog in de boom te % detection logic kan propagatie tegenhouden. om omhoog in de boom te
% propageren moet het event eerst bij de leaf aankomen, dus eerst delegatie % propageren moet het event eerst bij de leaf aankomen, dus eerst delegatie
% tot laagste leaf node die het event bevat. % tot laagste leaf node die het event bevat.
% speciaal geval: overlappende area's in dezelfde laag v/d boom. in dat % speciaal geval: overlappende area's in dezelfde laag v/d boom. in dat
% geval: area die later is toegevoegd (rechter sibling) wordt aangenomen % geval: area die later is toegevoegd (rechter sibling) wordt aangenomen
% bovenop de sibling links ervan te liggen en krijgt dus eerst het event. % bovenop de sibling links ervan te liggen en krijgt dus eerst het event.
...@@ -288,37 +376,16 @@ goal is to test the effectiveness of the design and detect its shortcomings. ...@@ -288,37 +376,16 @@ goal is to test the effectiveness of the design and detect its shortcomings.
% die voor widgets een boomstructuur gebruikt -> voor elke widget die touch % die voor widgets een boomstructuur gebruikt -> voor elke widget die touch
% events heeft een area aanmaken % events heeft een area aanmaken
Gestures are composed of primitive events using detection logic. If a %For example, a button tap\footnote{A ``tap'' gesture is triggered when a
particular gesture should only occur within some area of the screen, it %touch object releases a touch surface within a certain time and distance
should be composed of only events that occur within that area Events that %from the point where it initially touched the surface.} should only occur
occur outside the area are not likely to be relevant to the . In other %on the button itself, and not in any other area of the screen. A solution
words, the gesture detection logic is affected by the area in which the %to this problem is the use of \emph{widgets}. The button from the example
gestures should be detected. Since the detection logic is part of the %can be represented as a rectangular widget with a position and size. The
architecture, the architecture must be able to restrict the set of events %position and size are compared with event coordinates to determine whether
to that are delegated to the particular piece of detection logic for the %an event should occur within the button.
gesture being detected in the area.
\subsection*{Area tree}
For example, a button tap\footnote{A ``tap'' gesture is triggered when a
touch object releases a touch surface within a certain time and distance
from the point where it initially touched the surface.} should only occur
on the button itself, and not in any other area of the screen. A solution
to this problem is the use of \emph{widgets}. The button from the example
can be represented as a rectangular widget with a position and size. The
position and size are compared with event coordinates to determine whether
an event should occur within the button.
\subsection*{Callbacks}
\label{sec:callbacks}
When an event is propagated by a widget, it is first used for event
analysis on that widget. The event analysis can then trigger a gesture
in the widget, which has to be handled by the application. To handle a
gesture, the widget should provide a callback mechanism: the
application binds a handler for a specific type of gesture to a widget.
When a gesture of that type is triggered after event analysis, the
widget triggers the callback.
\subsection*{Widget tree}
A problem occurs when widgets overlap. If a button in placed over a A problem occurs when widgets overlap. If a button in placed over a
container and an event occurs occurs inside the button, should the container and an event occurs occurs inside the button, should the
...@@ -347,34 +414,25 @@ goal is to test the effectiveness of the design and detect its shortcomings. ...@@ -347,34 +414,25 @@ goal is to test the effectiveness of the design and detect its shortcomings.
\texttt{GtkTouchWidget} that synchronises the position of a touch \texttt{GtkTouchWidget} that synchronises the position of a touch
widget with that of a GTK widget, using GTK signals. widget with that of a GTK widget, using GTK signals.
\subsection*{Position of widget tree in architecture} \section{Detecting gestures from events}
\label{sec:gesture-detection}
\widgetdiagram{Extension of the diagram from figure
\ref{fig:driverdiagram}, showing the position of widgets in the
architecture.}
\section{Event analysis} The events that are grouped by areas must be translated to complex gestures
\label{sec:event-analysis} in some way. This analysis is specific to the type of gesture being
detected. E.g. the detection of a ``tap'' gesture is very different from
detection of a ``rotate'' gesture. The architecture has adopted the
\emph{gesture tracker}-based design described by \cite{win7touch}, which
separates the detection of different gestures into different \emph{gesture
trackers}. This keeps the different pieces of gesture detection code
manageable and extendable. A single gesture tracker detects a specific set
of gesture types, given a set of primitive events. An example of a possible
gesture tracker implementation is a ``transformation tracker'' that detects
rotation, scaling and translation gestures.
% TODO: essentie moet zijn dat gesture trackers detection logic opdelen in % TODO: een formele definitie van gestures zou wellicht beter zijn, maar
% behapbare stukken, en worden toegewezen aan een enkele area waardoor er
% meerdere trackers tegelijk kunnen draaien op verschillende delen v/h
% scherm. een formele definitie van gestures zou wellicht beter zijn, maar
% wordt niet gegeven in deze thesis (wel besproken in future work) % wordt niet gegeven in deze thesis (wel besproken in future work)
The events that are delegated to widgets must be analyzed in some way to \subsection*{Assignment of a gesture tracker to an area}
gestures. This analysis is specific to the type of gesture being detected.
E.g. the detection of a ``tap'' gesture is very different from detection of
a ``rotate'' gesture. The implementation described in \cite{win7touch}
separates the detection of different gestures into different \emph{gesture
trackers}. This keeps the different pieces of detection code managable and
extandable. Therefore, the architecture also uses gesture trackers to
separate the analysis of events. A single gesture tracker detects a
specific set of gesture types, given a sequence of events. An example of a
possible gesture tracker implementation is a ``transformation tracker''
that detects rotation, scaling and translation gestures.
\subsection*{Assignment of a gesture tracker to a widget}
As explained in section \ref{sec:callbacks}, events are delegated from As explained in section \ref{sec:callbacks}, events are delegated from
a widget to some event analysis. The analysis component of a widget a widget to some event analysis. The analysis component of a widget
...@@ -404,7 +462,8 @@ goal is to test the effectiveness of the design and detect its shortcomings. ...@@ -404,7 +462,8 @@ goal is to test the effectiveness of the design and detect its shortcomings.
The button is located inside an application window, which can be resized The button is located inside an application window, which can be resized
using pinch gestures. using pinch gestures.
% TODO: comments weg, in pseudocode opschrijven % TODO: comments weg, in pseudocode opschrijven, uitbreiden met draggable
% circle en illustrerende figuur
\begin{verbatim} \begin{verbatim}
initialize GUI, creating a window initialize GUI, creating a window
...@@ -442,6 +501,13 @@ goal is to test the effectiveness of the design and detect its shortcomings. ...@@ -442,6 +501,13 @@ goal is to test the effectiveness of the design and detect its shortcomings.
\chapter{Test applications} \chapter{Test applications}
\section{Reference implementation in Python}
\label{sec:implementation}
% TODO
% alleen window.contains op point down, niet move/up
% een paar simpele windows en trackers
To test multi-touch interaction properly, a multi-touch device is required. The To test multi-touch interaction properly, a multi-touch device is required. The
University of Amsterdam (UvA) has provided access to a multi-touch table from University of Amsterdam (UvA) has provided access to a multi-touch table from
PQlabs. The table uses the TUIO protocol \cite{TUIO} to communicate touch PQlabs. The table uses the TUIO protocol \cite{TUIO} to communicate touch
...@@ -449,6 +515,8 @@ events. See appendix \ref{app:tuio} for details regarding the TUIO protocol. ...@@ -449,6 +515,8 @@ events. See appendix \ref{app:tuio} for details regarding the TUIO protocol.
The reference implementation is a Proof of Concept that translates TUIO The reference implementation is a Proof of Concept that translates TUIO
messages to some simple touch gestures (see appendix \ref{app:implementation} messages to some simple touch gestures (see appendix \ref{app:implementation}
for details). for details).
% omdat we alleen deze tafel hebben kunnen we het concept van de event driver
% alleen met het TUIO protocol testen, en niet vergelijken met andere drivers
% TODO % TODO
% testprogramma's met PyGame/Cairo % testprogramma's met PyGame/Cairo
...@@ -461,6 +529,10 @@ for details). ...@@ -461,6 +529,10 @@ for details).
% bijv. een state machine % bijv. een state machine
% - volgende stap: maken van een library die meerdere drivers en complexe % - volgende stap: maken van een library die meerdere drivers en complexe
% gestures bevat % gestures bevat
% - "event filter" ipv "area"
\section{A generic way for grouping events}
\label{sec:eventfilter}
\bibliographystyle{plain} \bibliographystyle{plain}
\bibliography{report}{} \bibliography{report}{}
...@@ -567,11 +639,4 @@ algorithms based on its test program. ...@@ -567,11 +639,4 @@ algorithms based on its test program.
Also, the different detection algorithms are all implemented in the same file, Also, the different detection algorithms are all implemented in the same file,
making it complex to read or debug, and difficult to extend. making it complex to read or debug, and difficult to extend.
\chapter{Reference implementation in Python}
\label{app:implementation}
% TODO
% alleen window.contains op point down, niet move/up
% een paar simpele windows en trackers
\end{document} \end{document}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment