[PATCH weston 1/6] input: Add helpers for all keyboard/pointer/touch events

2016-08-12 Thread Quentin Glidic
From: Quentin Glidic 

These are useful to implement grabs.

Signed-off-by: Quentin Glidic 
Reviewed-by: Jonas Ådahl 
Acked-by: Giulio Camuffo 

Differential Revision: https://phabricator.freedesktop.org/D1245
---
 libweston/compositor.h |  33 +
 libweston/input.c  | 374 +
 2 files changed, 316 insertions(+), 91 deletions(-)

diff --git a/libweston/compositor.h b/libweston/compositor.h
index 0133084..fdf4592 100644
--- a/libweston/compositor.h
+++ b/libweston/compositor.h
@@ -424,6 +424,14 @@ weston_pointer_create(struct weston_seat *seat);
 void
 weston_pointer_destroy(struct weston_pointer *pointer);
 void
+weston_pointer_send_motion(struct weston_pointer *pointer, uint32_t time,
+  struct weston_pointer_motion_event *event);
+bool
+weston_pointer_has_focus_resource(struct weston_pointer *pointer);
+void
+weston_pointer_send_button(struct weston_pointer *pointer,
+  uint32_t time, uint32_t button, uint32_t state_w);
+void
 weston_pointer_send_axis(struct weston_pointer *pointer,
 uint32_t time,
 struct weston_pointer_axis_event *event);
@@ -477,6 +485,18 @@ int
 weston_keyboard_set_locks(struct weston_keyboard *keyboard,
  uint32_t mask, uint32_t value);
 
+bool
+weston_keyboard_has_focus_resource(struct weston_keyboard *keyboard);
+void
+weston_keyboard_send_key(struct weston_keyboard *keyboard,
+uint32_t time, uint32_t key,
+enum wl_keyboard_key_state state);
+void
+weston_keyboard_send_modifiers(struct weston_keyboard *keyboard,
+  uint32_t serial, uint32_t mods_depressed,
+  uint32_t mods_latched,
+  uint32_t mods_locked, uint32_t group);
+
 struct weston_touch *
 weston_touch_create(void);
 void
@@ -490,6 +510,19 @@ weston_touch_start_grab(struct weston_touch *device,
 void
 weston_touch_end_grab(struct weston_touch *touch);
 
+bool
+weston_touch_has_focus_resource(struct weston_touch *touch);
+void
+weston_touch_send_down(struct weston_touch *touch, uint32_t time,
+  int touch_id, wl_fixed_t x, wl_fixed_t y);
+void
+weston_touch_send_up(struct weston_touch *touch, uint32_t time, int touch_id);
+void
+weston_touch_send_motion(struct weston_touch *touch, uint32_t time,
+int touch_id, wl_fixed_t x, wl_fixed_t y);
+void
+weston_touch_send_frame(struct weston_touch *touch);
+
 void
 wl_data_device_set_keyboard_focus(struct weston_seat *seat);
 
diff --git a/libweston/input.c b/libweston/input.c
index 6e931bd..4ed08fd 100644
--- a/libweston/input.c
+++ b/libweston/input.c
@@ -340,9 +340,9 @@ default_grab_pointer_focus(struct weston_pointer_grab *grab)
 }
 
 static void
-weston_pointer_send_relative_motion(struct weston_pointer *pointer,
-   uint32_t time,
-   struct weston_pointer_motion_event *event)
+pointer_send_relative_motion(struct weston_pointer *pointer,
+uint32_t time,
+struct weston_pointer_motion_event *event)
 {
uint64_t time_usec;
double dx, dy, dx_unaccel, dy_unaccel;
@@ -379,8 +379,8 @@ weston_pointer_send_relative_motion(struct weston_pointer 
*pointer,
 }
 
 static void
-weston_pointer_send_motion(struct weston_pointer *pointer, uint32_t time,
-  wl_fixed_t sx, wl_fixed_t sy)
+pointer_send_motion(struct weston_pointer *pointer, uint32_t time,
+   wl_fixed_t sx, wl_fixed_t sy)
 {
struct wl_list *resource_list;
struct wl_resource *resource;
@@ -393,11 +393,10 @@ weston_pointer_send_motion(struct weston_pointer 
*pointer, uint32_t time,
wl_pointer_send_motion(resource, time, sx, sy);
 }
 
-static void
-default_grab_pointer_motion(struct weston_pointer_grab *grab, uint32_t time,
-   struct weston_pointer_motion_event *event)
+WL_EXPORT void
+weston_pointer_send_motion(struct weston_pointer *pointer, uint32_t time,
+  struct weston_pointer_motion_event *event)
 {
-   struct weston_pointer *pointer = grab->pointer;
wl_fixed_t x, y;
wl_fixed_t old_sx = pointer->sx;
wl_fixed_t old_sy = pointer->sy;
@@ -411,50 +410,78 @@ default_grab_pointer_motion(struct weston_pointer_grab 
*grab, uint32_t time,
weston_pointer_move(pointer, event);
 
if (old_sx != pointer->sx || old_sy != pointer->sy) {
-   weston_pointer_send_motion(pointer, time,
-  pointer->sx, pointer->sy);
+   pointer_send_motion(pointer, time,
+   pointer->sx, pointer->sy);
}
 
-   weston_pointer_send_relative_motion(pointer, time, event);
+   pointer_send_relative_motion(pointer, time, ev

Re: Touch events not reviving with wayland-ivi-extenssion 1.4.0 and wayland video sink

2016-07-13 Thread Vikas Patil
Dear All,

Seeing the below wayland log without above workaround focus is going 0
to surface. I think it means surface is failing to grab the touch
focus. What could be the reasons of focus not getting setup?

I think this [3687363.407] ivi_input@19.input_focus(90, 4, 0) need to
be [3687363.407] ivi_input@19.input_focus(90, 4, 1) for touch to work.
any ideas here?



[3687298.588]  -> wl_surface@9.commit()
[3687314.601] wl_buffer@30.release()
[3687314.718] wl_callb...@29.done(299298)
[3687314.793]  -> wl_surface@12.frame(new id wl_callback@29)
[3687314.910]  -> wl_surface@12.attach(wl_buffer@28, 0, 0)
[3687315.022]  -> wl_surface@12.damage(0, 0, 800, 480)
[3687315.140]  -> wl_surface@12.commit()
[3687340.439] wl_display@1.delete_id(14)
[3687340.620] wl_display@1.delete_id(26)
[3687340.690] wl_display@1.delete_id(29)
[3687340.811] wl_callb...@26.done(299368)
[3687341.024]  -> wl_surface@12.frame(new id wl_callback@26)
[3687363.407] ivi_input@19.input_focus(90, 4, 0)
[3687366.986]  -> wl_compositor@3.create_region(new id wl_region@14)
[3687367.165]  -> wl_reg...@14.add(0, 0, 800, 480)
[3687367.286]  -> wl_surface@12.set_opaque_region(wl_region@14)
[3687367.347]  -> wl_region@14.destroy()
[3687367.394]  -> wl_surface@9.commit()
[3687376.888] wl_buffer@27.release()
[3687376.987] wl_callb...@29.done(299368)
[3687377.077]  -> wl_surface@12.frame(new id wl_callback@29)
[3687377.161]  -> wl_surface@12.attach(wl_buffer@30, 0, 0)


Thanks & Regards,
Vikash

On Tue, Dec 15, 2015 at 8:24 PM, Vikas Patil  wrote:
> If I force to pass the if condition as follows using the surface id
> even though surfaces doesn't match, it is working. Does this might be
> due to the way I created the surface 90? Surface 90 also has
> subsurface, does it anyway related to subsurface?
>
> Alos "grab->touch->focus->surface" what this surface is and who creates it?
>
> surfID = interface->get_id_of_surface(surf_ctx->layout_surface);
>
> /* Touches set touch focus */
>
> if (grab->touch->num_tp == 1) {
>
> if (surf == grab->touch->focus->surface ||  surfID == 90) {
>
> surf_ctx->focus |= ILM_INPUT_DEVICE_TOUCH;
>
> send_input_focus(seat->input_ctx,
>
>
> interface->get_id_of_surface(surf_ctx->layout_surface),
>
>  ILM_INPUT_DEVICE_TOUCH, ILM_TRUE);
>
> }
>
>
> Thanks & Regards,
> Vikash
>
> On Mon, Dec 14, 2015 at 8:53 PM, Vikas Patil  wrote:
>> I am hitting else part of below code for this issue. Any ideas?
>>
>> touch_grab_down() from ivi-input-controller.c
>>
>>  /* Touches set touch focus */
>> if (grab->touch->num_tp == 1) {
>> if (surf == grab->touch->focus->surface) {
>> surf_ctx->focus |= ILM_INPUT_DEVICE_TOUCH;
>> send_input_focus(seat->input_ctx,
>>
>> interface->get_id_of_surface(surf_ctx->layout_surface),
>>  ILM_INPUT_DEVICE_TOUCH, ILM_TRUE);
>> } else {
>> surf_ctx->focus &= ~ILM_INPUT_DEVICE_TOUCH;
>> <--
>> send_input_focus(seat->input_ctx,
>>
>> interface->get_id_of_surface(surf_ctx->layout_surface),
>>  ILM_INPUT_DEVICE_TOUCH, ILM_FALSE);
>> }
>> }
>>
>> /* This code below is slightly redundant, since we have already
>>  * decided only one surface has touch focus */
>> if (!(surf_ctx->focus & ILM_INPUT_DEVICE_TOUCH))
>> continue;
>> <-
>>
>>
>> Thanks & Regards,
>> Vikash
>>
>> On Mon, Dec 14, 2015 at 3:25 PM, Vikas Patil  wrote:
>>> Sorry. Forgot to attach the log file. Attached here.
>>>
>>> Thanks & Regards,
>>> Vikas
>>>
>>> On Mon, Dec 14, 2015 at 3:24 PM, Vikas Patil  wrote:
>>>> Hi Eugen Friedrich
>>>>
>>>> Thanks a lot for your quick reply.
>>>>
>>>> Attached here the file with WAYLAND_DEBUG=1 log when the gstreamer
>>>> plug-in is in use. I can see "ivi_input@18.input_focus(90, 4, 0)" for
>>>> the surface from plug-in but no touch events.
>>>>
>>>> Here is the output of APIs
>>>>
>>>> root@linux-9939-a1:~# LayerManagerControl get input device default 
>>>> capabilities
>>>> failed to get surface con

Re: [libinput] How I get extra touch events using libinput?

2016-01-06 Thread Andreas Pokorny
Hi,


On Wed, Jan 6, 2016 at 8:30 AM, Peter Hutterer 
wrote:

> CC-ing Andreas this time, forgot about it in my original reply, sorry about
> that.
>
> On Wed, Jan 06, 2016 at 03:35:49PM +0900, 강정현 wrote:
> > In Andreas Pokorny patch, only support part of standard linux MT
> protocols
> > except ABS_MT_DISTANCE, ABS_MT_TOOL_TYPE, etc..  Do you have any plan to
> > support whole MT protocol events? Or These events are meaningless to
> > support via libinput?
>

This was driven by the axes provided by a few android phones and tablets
ubuntu touch was ported to. Which I believe resulted into a common set of
properties, shared by a lot of touch screens.


> > 3.
> > How can we deal with the additional MT protocol events in libinput, if
> our
> > kernel support more MT protocol events than the events listed in input.h
> > in libinput ?
> > Can they be supported ? Do you have any ideas?
>

The problem I think is that most of us have no influence both ends at the
same time: applications or toolkits and the actual hardware. If the end
result is a nice appliance I doubt anyone will object to extend the
intermediate components. But there also has to be some sane fallback or
default for when the information is not available.

I will get back to that topic soon.. I still need to add a calibration
option(s) to get the contact sizes properly scaled...

regards
Andreas
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [libinput] How I get extra touch events using libinput?

2016-01-05 Thread Peter Hutterer
CC-ing Andreas this time, forgot about it in my original reply, sorry about
that.

On Wed, Jan 06, 2016 at 03:35:49PM +0900, 강정현 wrote:
> Dear Peter and guys,
> 
> thank you for your information about Andreas Pokorny's patches.
> As I checked them out, most of them seem to make sense to me. :)
>  
> I have more queries. Would you please kindly answer to me?
> 
> 1.
> How can we send the touch pressure/area events via wayland protocol?
> I know this question is something irrelative to libinput. I'd like to get
> your opinion. :)

> Suppose now we have get touch pressure and area events as libinput events
> and in our wayland server is ready to dealing with them.
> When it comes to sending those events (pressure, area) via wayland
> protocol, as we know, we can't send them because there are no arguments
> exist in the current latest version of wl_touch_interface.
> 
> There can be two or more ways to send the events to wayland client(s).
>   - One is extending wayland protocol to have the more arguments.
>   - The other is implementing our own wayland extension protocol for our 
> wayland server and clients.
> Can you tell me which one is more acceptable to you ? :)

I'd go with adding an experimental protocol to wayland-protocols first and
figuring out the details and interactions. Once it looks good enough it can
be merged back into wl_touch if needed.

> 2.
> In Andreas Pokorny patch, only support part of standard linux MT protocols
> except ABS_MT_DISTANCE, ABS_MT_TOOL_TYPE, etc..  Do you have any plan to
> support whole MT protocol events? Or These events are meaningless to
> support via libinput?

do you have hardware that provides these and software that requires them?
git grep shows one device that uses ABS_MT_TOOL_TYPE (pretty sure the wacom
w8001 is a false positive and should be removed). the same device is also
the only one that uses ABS_MT_DISTANCE.

anyway: tool type won't be added as external option. the pen tool would be a
tablet device, and the palm tool probably just discarded.

distance we could talk about. anything else you'll have to bring up and
discuss one-by-one. I don't want to add features just because we can, one of
the things libinput should do is to sanitize the axes and make them sensible
and easy to use from the callers. without knowing what an axis is and how
the devices provide information we can't do that.

> 3.
> How can we deal with the additional MT protocol events in libinput, if our
> kernel support more MT protocol events than the events listed in input.h
> in libinput ?
> Can they be supported ? Do you have any ideas?

you'll need to add them to the kernel first. what types of events are we
talking about here?

Cheers,
   Peter

> > -Original Message-
> > From: Peter Hutterer [mailto:peter.hutte...@who-t.net]
> > Sent: Tuesday, January 05, 2016 5:46 PM
> > Cc: wayland-devel@lists.freedesktop.org; sj76.p...@samsung.com;
> > duna...@samsung.com
> > Subject: Re: [libinput] How I get extra touch events using libinput?
> > 
> > On Tue, Jan 05, 2016 at 05:01:33PM +0900,wrote:
> > > Hello, guys.
> > > I have a query regarding linux MT protocol event handling in libinput
> > side.
> > > I wanted to get the extra events such as ABS_MT_TOUCH_MAJOR,
> > > ABS_MT_PRESSURE as an event of libinput, but I found that libinput
> > > doesn't handle those events.
> > >
> > > Thus I'd like to know whether there is any plan regarding ABS_MT_XXX
> > > event handling in libinput side.
> > > If there are no plan to support those events, how can I handling those
> > > events?
> > 
> > correct, there is currently no handling of these events, but Andreas
> > Pokorny has a number of patches to add extra axes for touch events. I
> > think this one was the last version:
> > http://lists.freedesktop.org/archives/wayland-devel/2015-
> > November/025415.html
> > and the comments:
> > http://lists.freedesktop.org/archives/wayland-devel/2015-
> > November/025687.html
> > 
> > Cheers,
> >Peter
> 
> 
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


RE: [libinput] How I get extra touch events using libinput?

2016-01-05 Thread 강정현
Dear Peter and guys,

thank you for your information about Andreas Pokorny's patches.
As I checked them out, most of them seem to make sense to me. :)
 
I have more queries. Would you please kindly answer to me?

1.
How can we send the touch pressure/area events via wayland protocol?
I know this question is something irrelative to libinput. I'd like to get your 
opinion. :)

Suppose now we have get touch pressure and area events as libinput events and 
in our wayland server is ready to dealing with them.
When it comes to sending those events (pressure, area) via wayland protocol, as 
we know, we can't send them because there are no arguments exist in the current 
latest version of wl_touch_interface.

There can be two or more ways to send the events to wayland client(s).
  - One is extending wayland protocol to have the more arguments.
  - The other is implementing our own wayland extension protocol for our 
wayland server and clients.
Can you tell me which one is more acceptable to you ? :)


2.
In Andreas Pokorny patch, only support part of standard linux MT protocols 
except ABS_MT_DISTANCE, ABS_MT_TOOL_TYPE, etc..
Do you have any plan to support whole MT protocol events? Or These events are 
meaningless to support via libinput?


3.
How can we deal with the additional MT protocol events in libinput, if our 
kernel support more MT protocol events than the events listed in input.h in 
libinput ?
Can they be supported ? Do you have any ideas?


Thank you in advance.
JengHyun Kang.

> -Original Message-
> From: Peter Hutterer [mailto:peter.hutte...@who-t.net]
> Sent: Tuesday, January 05, 2016 5:46 PM
> Cc: wayland-devel@lists.freedesktop.org; sj76.p...@samsung.com;
> duna...@samsung.com
> Subject: Re: [libinput] How I get extra touch events using libinput?
> 
> On Tue, Jan 05, 2016 at 05:01:33PM +0900,wrote:
> > Hello, guys.
> > I have a query regarding linux MT protocol event handling in libinput
> side.
> > I wanted to get the extra events such as ABS_MT_TOUCH_MAJOR,
> > ABS_MT_PRESSURE as an event of libinput, but I found that libinput
> > doesn't handle those events.
> >
> > Thus I'd like to know whether there is any plan regarding ABS_MT_XXX
> > event handling in libinput side.
> > If there are no plan to support those events, how can I handling those
> > events?
> 
> correct, there is currently no handling of these events, but Andreas
> Pokorny has a number of patches to add extra axes for touch events. I
> think this one was the last version:
> http://lists.freedesktop.org/archives/wayland-devel/2015-
> November/025415.html
> and the comments:
> http://lists.freedesktop.org/archives/wayland-devel/2015-
> November/025687.html
> 
> Cheers,
>Peter

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [libinput] How I get extra touch events using libinput?

2016-01-05 Thread Peter Hutterer
On Tue, Jan 05, 2016 at 05:01:33PM +0900, �� wrote:
> Hello, guys.
> I have a query regarding linux MT protocol event handling in libinput side.
> I wanted to get the extra events such as ABS_MT_TOUCH_MAJOR,
> ABS_MT_PRESSURE as an event of libinput,
> but I found that libinput doesn't handle those events.
> 
> Thus I'd like to know whether there is any plan regarding ABS_MT_XXX event
> handling in libinput side.
> If there are no plan to support those events, how can I handling those
> events?

correct, there is currently no handling of these events, but Andreas Pokorny
has a number of patches to add extra axes for touch events. I think this one
was the last version:
http://lists.freedesktop.org/archives/wayland-devel/2015-November/025415.html
and the comments:
http://lists.freedesktop.org/archives/wayland-devel/2015-November/025687.html

Cheers,
   Peter
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


[libinput] How I get extra touch events using libinput?

2016-01-05 Thread 강정현
Hello, guys.
I have a query regarding linux MT protocol event handling in libinput side.
I wanted to get the extra events such as ABS_MT_TOUCH_MAJOR,
ABS_MT_PRESSURE as an event of libinput,
but I found that libinput doesn't handle those events.

Thus I'd like to know whether there is any plan regarding ABS_MT_XXX event
handling in libinput side.
If there are no plan to support those events, how can I handling those
events?

Please kindly answer for my query.

Thanks and regards,
JengHyun Kang.

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events not reviving with wayland-ivi-extenssion 1.4.0 and wayland video sink

2015-12-15 Thread Vikas Patil
If I force to pass the if condition as follows using the surface id
even though surfaces doesn't match, it is working. Does this might be
due to the way I created the surface 90? Surface 90 also has
subsurface, does it anyway related to subsurface?

Alos "grab->touch->focus->surface" what this surface is and who creates it?

surfID = interface->get_id_of_surface(surf_ctx->layout_surface);

/* Touches set touch focus */

if (grab->touch->num_tp == 1) {

if (surf == grab->touch->focus->surface ||  surfID == 90) {

surf_ctx->focus |= ILM_INPUT_DEVICE_TOUCH;

send_input_focus(seat->input_ctx,


interface->get_id_of_surface(surf_ctx->layout_surface),

 ILM_INPUT_DEVICE_TOUCH, ILM_TRUE);

}


Thanks & Regards,
Vikash

On Mon, Dec 14, 2015 at 8:53 PM, Vikas Patil  wrote:
> I am hitting else part of below code for this issue. Any ideas?
>
> touch_grab_down() from ivi-input-controller.c
>
>  /* Touches set touch focus */
> if (grab->touch->num_tp == 1) {
> if (surf == grab->touch->focus->surface) {
> surf_ctx->focus |= ILM_INPUT_DEVICE_TOUCH;
> send_input_focus(seat->input_ctx,
>
> interface->get_id_of_surface(surf_ctx->layout_surface),
>  ILM_INPUT_DEVICE_TOUCH, ILM_TRUE);
> } else {
> surf_ctx->focus &= ~ILM_INPUT_DEVICE_TOUCH;
> <--
> send_input_focus(seat->input_ctx,
>
> interface->get_id_of_surface(surf_ctx->layout_surface),
>  ILM_INPUT_DEVICE_TOUCH, ILM_FALSE);
> }
> }
>
> /* This code below is slightly redundant, since we have already
>  * decided only one surface has touch focus */
> if (!(surf_ctx->focus & ILM_INPUT_DEVICE_TOUCH))
> continue;
> <-
>
>
> Thanks & Regards,
> Vikash
>
> On Mon, Dec 14, 2015 at 3:25 PM, Vikas Patil  wrote:
>> Sorry. Forgot to attach the log file. Attached here.
>>
>> Thanks & Regards,
>> Vikas
>>
>> On Mon, Dec 14, 2015 at 3:24 PM, Vikas Patil  wrote:
>>> Hi Eugen Friedrich
>>>
>>> Thanks a lot for your quick reply.
>>>
>>> Attached here the file with WAYLAND_DEBUG=1 log when the gstreamer
>>> plug-in is in use. I can see "ivi_input@18.input_focus(90, 4, 0)" for
>>> the surface from plug-in but no touch events.
>>>
>>> Here is the output of APIs
>>>
>>> root@linux-9939-a1:~# LayerManagerControl get input device default 
>>> capabilities
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> pointer
>>> keyboard
>>> touch
>>>
>>> root@linux-9939-a1:~# LayerManagerControl get surface 90 acceptance
>>> Interpreter error: 'acceptance' not recognized.
>>>
>>> root@orinoco-9939-a1:~# LayerManagerControl get input devices with all
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> default
>>>
>>> root@linux-9939-a1:~# LayerManagerControl get input focus
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> failed to get surface context in ilmControl
>>> surface 90:
>>> surface 63:
>>> surface 62:
>>> surface 61:
>>> surface 60: pointer keyboard
>>>
>>>
>>> Thanks & Regards,
>>> Vikas
>>>
>>> On Mon, Dec 14, 2015 at 3:01 PM, Friedrich, Eugen (ADITG/SW1)
>>>  wrote:
>>>> Hello Vikas,
>>>>
>>>> Could you please add the WAYLAND_DEBUG=1 traces from you application, to 
>>>> see if the input events are reaching the client.
>>>>
>>>> Also the output of the following API would be helpful:
>>>> ilm_getInputFocus,
>>>> i

Re: Touch events not reviving with wayland-ivi-extenssion 1.4.0 and wayland video sink

2015-12-14 Thread Vikas Patil
I am hitting else part of below code for this issue. Any ideas?

touch_grab_down() from ivi-input-controller.c

 /* Touches set touch focus */
if (grab->touch->num_tp == 1) {
if (surf == grab->touch->focus->surface) {
surf_ctx->focus |= ILM_INPUT_DEVICE_TOUCH;
send_input_focus(seat->input_ctx,

interface->get_id_of_surface(surf_ctx->layout_surface),
 ILM_INPUT_DEVICE_TOUCH, ILM_TRUE);
} else {
surf_ctx->focus &= ~ILM_INPUT_DEVICE_TOUCH;
<--
send_input_focus(seat->input_ctx,

interface->get_id_of_surface(surf_ctx->layout_surface),
 ILM_INPUT_DEVICE_TOUCH, ILM_FALSE);
}
}

/* This code below is slightly redundant, since we have already
 * decided only one surface has touch focus */
if (!(surf_ctx->focus & ILM_INPUT_DEVICE_TOUCH))
continue;
<-


Thanks & Regards,
Vikash

On Mon, Dec 14, 2015 at 3:25 PM, Vikas Patil  wrote:
> Sorry. Forgot to attach the log file. Attached here.
>
> Thanks & Regards,
> Vikas
>
> On Mon, Dec 14, 2015 at 3:24 PM, Vikas Patil  wrote:
>> Hi Eugen Friedrich
>>
>> Thanks a lot for your quick reply.
>>
>> Attached here the file with WAYLAND_DEBUG=1 log when the gstreamer
>> plug-in is in use. I can see "ivi_input@18.input_focus(90, 4, 0)" for
>> the surface from plug-in but no touch events.
>>
>> Here is the output of APIs
>>
>> root@linux-9939-a1:~# LayerManagerControl get input device default 
>> capabilities
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> pointer
>> keyboard
>> touch
>>
>> root@linux-9939-a1:~# LayerManagerControl get surface 90 acceptance
>> Interpreter error: 'acceptance' not recognized.
>>
>> root@orinoco-9939-a1:~# LayerManagerControl get input devices with all
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> default
>>
>> root@linux-9939-a1:~# LayerManagerControl get input focus
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> failed to get surface context in ilmControl
>> surface 90:
>> surface 63:
>> surface 62:
>> surface 61:
>> surface 60: pointer keyboard
>>
>>
>> Thanks & Regards,
>> Vikas
>>
>> On Mon, Dec 14, 2015 at 3:01 PM, Friedrich, Eugen (ADITG/SW1)
>>  wrote:
>>> Hello Vikas,
>>>
>>> Could you please add the WAYLAND_DEBUG=1 traces from you application, to 
>>> see if the input events are reaching the client.
>>>
>>> Also the output of the following API would be helpful:
>>> ilm_getInputFocus,
>>> ilm_getInputDevices,
>>> ilm_getInputAcceptanceOn(with you surface id)
>>>
>>> the API's will return a list of devices or surfaces, please print the 
>>> complete list.
>>>
>>>
>>> Best regards
>>>
>>> Eugen Friedrich
>>> Software Group I (ADITG/SW1)
>>>
>>> Tel. +49 5121 49 6921
>>>
>>>> -Original Message-
>>>> From: genivi-ivi-layer-management-boun...@lists.genivi.org [mailto:genivi-
>>>> ivi-layer-management-boun...@lists.genivi.org] On Behalf Of Vikas Patil
>>>> Sent: Samstag, 12. Dezember 2015 12:57
>>>> To: genivi-ivi-layer-managem...@lists.genivi.org; meta-
>>>> freesc...@yoctoproject.org; Tanibata, Nobuhiko (ADITJ/SWG); Carlos Rafael
>>>> Giani; wayland mailing list
>>>> Subject: Touch events not reviving with wayland-ivi-extenssion 1.4.0 and
>>>> wayland video sink
>>>>
>>>> Dear All,
>>>>
>>>> I am using wayland video sink (i.e. imxeglvivsink) from 
>>>> gstreamer1.0-plugins-
>>>> imx [1] to play the video along with weston 1.8.0 and 
>>>> wayland-ivi-extenstion
>>>> 1.4.0. I have modified “im

RE: Touch events not reviving with wayland-ivi-extenssion 1.4.0 and wayland video sink

2015-12-14 Thread Friedrich, Eugen (ADITG/SW1)
Hello Vikas,

Could you please add the WAYLAND_DEBUG=1 traces from you application, to see if 
the input events are reaching the client.

Also the output of the following API would be helpful:
ilm_getInputFocus,
ilm_getInputDevices,
ilm_getInputAcceptanceOn(with you surface id)

the API's will return a list of devices or surfaces, please print the complete 
list.


Best regards

Eugen Friedrich
Software Group I (ADITG/SW1)

Tel. +49 5121 49 6921

> -Original Message-
> From: genivi-ivi-layer-management-boun...@lists.genivi.org [mailto:genivi-
> ivi-layer-management-boun...@lists.genivi.org] On Behalf Of Vikas Patil
> Sent: Samstag, 12. Dezember 2015 12:57
> To: genivi-ivi-layer-managem...@lists.genivi.org; meta-
> freesc...@yoctoproject.org; Tanibata, Nobuhiko (ADITJ/SWG); Carlos Rafael
> Giani; wayland mailing list
> Subject: Touch events not reviving with wayland-ivi-extenssion 1.4.0 and
> wayland video sink
> 
> Dear All,
> 
> I am using wayland video sink (i.e. imxeglvivsink) from gstreamer1.0-plugins-
> imx [1] to play the video along with weston 1.8.0 and wayland-ivi-extenstion
> 1.4.0. I have modified “imxeglvivsink” to have the ilm and touch input
> support [2]. Basically I am posting touch events on GST bus and application
> want to have the touch can read the messages and process the touch. This is
> working fine if I don’t load the “ivi-input-controller.so”. If I load the 
> “ivi-input-
> controller.so”
> I am not able to get the touch event inside this plug-in. I have tried setting
> touch input focus to the surface from this wayland video plug-in using
> “LayermanagerControl” and ilm_setInputFocus” [3] but no luck.
> 
> 
> Also touch works fine even if I load “ivi-input-controller.so” with other
> applications. So I suspect some modification are required to ”imxeglvivsink”
> [2] or “wayland-ivi-extension/weston”.
> 
> Do you know what might be going wrong? Could anyone here give some
> suggestions/ideas to tryout and fix this?
> 
> Also “LayerManagerControl get surface 90 acceptance” doesn’t seem to
> work for me. Any inputs for this?
> 
> I have tried modifying “gst_imx_egl_viv_sink_egl_platform_mainloop”
> function in various ways but no luck and I think implementation is correct (as
> it works well without ivi-input-controller)
> 
> Following is the platform setup and weston configuration.
> 
> i.MX6 Duallite
> Linux 3.14.28
> Weston 1.8.0 with (ivi-shell.so with fbdev backend and gal2d renderer)
> Wayland-ivi-extension 1.4.0 (using ivi-controller.so, ivi-input-controller.so
> gstreamer-imx plugin QTwayland 5.4.2/Qt 5.4.2
> 
> Weston.ini contains:
> 
> [core]
> shell=ivi-shell.so
> 
> [ivi-shell]
> ivi-module=ivi-controller.so,ivi-input-controller.so
> ivi-shell-user-interface=/usr/lib/weston/weston-ivi-shell-user-interface
> 
> 
> [1] https://github.com/Freescale/gstreamer-imx/tree/master/src/eglvivsink
> [2]See attached modified file “egl_platform_wayland.c” from imxeglvivsink
> [3]
> http://wiki.projects.genivi.org/index.php/Getting_Started_with_new_Input
> _Handling_APIs
> 
> 
> 
> Thanks & Regards,
> Vikash
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events not reviving with wayland-ivi-extenssion 1.4.0 and wayland video sink

2015-12-14 Thread Vikas Patil
Hi Eugen Friedrich

Thanks a lot for your quick reply.

Attached here the file with WAYLAND_DEBUG=1 log when the gstreamer
plug-in is in use. I can see "ivi_input@18.input_focus(90, 4, 0)" for
the surface from plug-in but no touch events.

Here is the output of APIs

root@linux-9939-a1:~# LayerManagerControl get input device default capabilities
failed to get surface context in ilmControl
failed to get surface context in ilmControl
failed to get surface context in ilmControl
failed to get surface context in ilmControl
failed to get surface context in ilmControl
pointer
keyboard
touch

root@linux-9939-a1:~# LayerManagerControl get surface 90 acceptance
Interpreter error: 'acceptance' not recognized.

root@orinoco-9939-a1:~# LayerManagerControl get input devices with all
failed to get surface context in ilmControl
failed to get surface context in ilmControl
failed to get surface context in ilmControl
failed to get surface context in ilmControl
failed to get surface context in ilmControl
default

root@linux-9939-a1:~# LayerManagerControl get input focus
failed to get surface context in ilmControl
failed to get surface context in ilmControl
failed to get surface context in ilmControl
failed to get surface context in ilmControl
failed to get surface context in ilmControl
surface 90:
surface 63:
surface 62:
surface 61:
surface 60: pointer keyboard


Thanks & Regards,
Vikas

On Mon, Dec 14, 2015 at 3:01 PM, Friedrich, Eugen (ADITG/SW1)
 wrote:
> Hello Vikas,
>
> Could you please add the WAYLAND_DEBUG=1 traces from you application, to see 
> if the input events are reaching the client.
>
> Also the output of the following API would be helpful:
> ilm_getInputFocus,
> ilm_getInputDevices,
> ilm_getInputAcceptanceOn(with you surface id)
>
> the API's will return a list of devices or surfaces, please print the 
> complete list.
>
>
> Best regards
>
> Eugen Friedrich
> Software Group I (ADITG/SW1)
>
> Tel. +49 5121 49 6921
>
>> -Original Message-
>> From: genivi-ivi-layer-management-boun...@lists.genivi.org [mailto:genivi-
>> ivi-layer-management-boun...@lists.genivi.org] On Behalf Of Vikas Patil
>> Sent: Samstag, 12. Dezember 2015 12:57
>> To: genivi-ivi-layer-managem...@lists.genivi.org; meta-
>> freesc...@yoctoproject.org; Tanibata, Nobuhiko (ADITJ/SWG); Carlos Rafael
>> Giani; wayland mailing list
>> Subject: Touch events not reviving with wayland-ivi-extenssion 1.4.0 and
>> wayland video sink
>>
>> Dear All,
>>
>> I am using wayland video sink (i.e. imxeglvivsink) from gstreamer1.0-plugins-
>> imx [1] to play the video along with weston 1.8.0 and wayland-ivi-extenstion
>> 1.4.0. I have modified “imxeglvivsink” to have the ilm and touch input
>> support [2]. Basically I am posting touch events on GST bus and application
>> want to have the touch can read the messages and process the touch. This is
>> working fine if I don’t load the “ivi-input-controller.so”. If I load the 
>> “ivi-input-
>> controller.so”
>> I am not able to get the touch event inside this plug-in. I have tried 
>> setting
>> touch input focus to the surface from this wayland video plug-in using
>> “LayermanagerControl” and ilm_setInputFocus” [3] but no luck.
>>
>>
>> Also touch works fine even if I load “ivi-input-controller.so” with other
>> applications. So I suspect some modification are required to ”imxeglvivsink”
>> [2] or “wayland-ivi-extension/weston”.
>>
>> Do you know what might be going wrong? Could anyone here give some
>> suggestions/ideas to tryout and fix this?
>>
>> Also “LayerManagerControl get surface 90 acceptance” doesn’t seem to
>> work for me. Any inputs for this?
>>
>> I have tried modifying “gst_imx_egl_viv_sink_egl_platform_mainloop”
>> function in various ways but no luck and I think implementation is correct 
>> (as
>> it works well without ivi-input-controller)
>>
>> Following is the platform setup and weston configuration.
>>
>> i.MX6 Duallite
>> Linux 3.14.28
>> Weston 1.8.0 with (ivi-shell.so with fbdev backend and gal2d renderer)
>> Wayland-ivi-extension 1.4.0 (using ivi-controller.so, ivi-input-controller.so
>> gstreamer-imx plugin QTwayland 5.4.2/Qt 5.4.2
>>
>> Weston.ini contains:
>>
>> [core]
>> shell=ivi-shell.so
>>
>> [ivi-shell]
>> ivi-module=ivi-controller.so,ivi-input-controller.so
>> ivi-shell-user-interface=/usr/lib/weston/weston-ivi-shell-user-interface
>>
>>
>> [1] https://github.com/Freescale/gstreamer-imx/tree/master/src/eglvivsink
>> [2]See attached modified file “egl_platform_wayland.c” from imxeglvivsink
>> [3]
>> http://wiki.projects.genivi.org/index.php/Getting_Started_with_new_Input
>> _Handling_APIs
>>
>>
>>
>> Thanks & Regards,
>> Vikash
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Touch events not reviving with wayland-ivi-extenssion 1.4.0 and wayland video sink

2015-12-12 Thread Vikas Patil
Dear All,

I am using wayland video sink (i.e. imxeglvivsink) from
gstreamer1.0-plugins-imx [1] to play the video along with weston 1.8.0
and wayland-ivi-extenstion 1.4.0. I have modified “imxeglvivsink” to
have the ilm and touch input support [2]. Basically I am posting touch
events on GST bus and application want to have the touch can read the
messages and process the touch. This is working fine if I don’t load
the “ivi-input-controller.so”. If I load the “ivi-input-controller.so”
I am not able to get the touch event inside this plug-in. I have tried
setting touch input focus to the surface from this wayland video
plug-in using “LayermanagerControl” and ilm_setInputFocus” [3] but no
luck.


Also touch works fine even if I load “ivi-input-controller.so” with
other applications. So I suspect some modification are required to
”imxeglvivsink” [2] or “wayland-ivi-extension/weston”.

Do you know what might be going wrong? Could anyone here give some
suggestions/ideas to tryout and fix this?

Also “LayerManagerControl get surface 90 acceptance” doesn’t seem to
work for me. Any inputs for this?

I have tried modifying “gst_imx_egl_viv_sink_egl_platform_mainloop”
function in various ways but no luck and I think implementation is
correct (as it works well without ivi-input-controller)

Following is the platform setup and weston configuration.

i.MX6 Duallite
Linux 3.14.28
Weston 1.8.0 with (ivi-shell.so with fbdev backend and gal2d renderer)
Wayland-ivi-extension 1.4.0 (using ivi-controller.so, ivi-input-controller.so
gstreamer-imx plugin
QTwayland 5.4.2/Qt 5.4.2

Weston.ini contains:

[core]
shell=ivi-shell.so

[ivi-shell]
ivi-module=ivi-controller.so,ivi-input-controller.so
ivi-shell-user-interface=/usr/lib/weston/weston-ivi-shell-user-interface


[1] https://github.com/Freescale/gstreamer-imx/tree/master/src/eglvivsink
[2]See attached modified file “egl_platform_wayland.c” from imxeglvivsink
[3] 
http://wiki.projects.genivi.org/index.php/Getting_Started_with_new_Input_Handling_APIs



Thanks & Regards,
Vikash
#include 
#include 
#include 
#include 
#include 

#include 

#include "egl_platform.h"
#include "egl_misc.h"
#include "gl_headers.h"

#define ILM_SUPPORT 1



#ifdef ILM_SUPPORT
#include "ilm/ilm_control.h"
#include "ilm/ilm_client.h"
#include "ilm/ilm_input.h"

//int   width = 800;
//int   height = 480;
t_ilm_layer   ilmLayerId = 1000;
t_ilm_surface ilmSurfaceId = 90;
#endif 


GST_DEBUG_CATEGORY_STATIC(imx_egl_platform_wl_debug);
#define GST_CAT_DEFAULT imx_egl_platform_wl_debug


struct _GstImxEglVivSinkEGLPlatform
{
	EGLNativeDisplayType native_display;
	EGLNativeWindowType native_main_window;
	EGLNativeWindowType native_window;
	EGLDisplay egl_display;
	EGLContext egl_context;
	EGLSurface egl_main_surface;
	EGLSurface egl_surface;
	
	GstElement* sink;
	
	GstImxEglVivSinkWindowResizedEventCallback window_resized_event_cb;
	GstImxEglVivSinkWindowRenderFrameCallback render_frame_cb;

	gpointer user_context;

	gboolean fullscreen;
	guint video_par_n, video_par_d;
	guint fixed_window_width, fixed_window_height, video_width, video_height;
	guint current_width, current_height;
	guint screen_width, screen_height;
	gint pending_x_coord, pending_y_coord;
	gint x_coord, y_coord;
	gboolean pending_subsurface_desync;

	GMutex mutex;

	struct wl_display *display;
	struct wl_registry *registry;
	int display_fd;
	struct wl_compositor *compositor;
	struct wl_subcompositor *subcompositor;
#ifndef ILM_SUPPORT
	struct wl_shell *shell;
#endif
	struct wl_output *output;

	struct wl_surface *main_surface;
	struct wl_surface *surface;
	struct wl_subsurface *subsurface;
#ifndef ILM_SUPPORT
	struct wl_shell_surface *shell_surface;
#endif


	struct wl_seat *seat;
	struct wl_touch *wl_touch;


	struct wl_callback *frame_cb;
	gboolean frame_callback_invoked;

	int ctrl_pipe[2];

	gboolean configured, do_render;
};

typedef enum
{
	TOUCH_HANDLE_DOWN,
	TOUCH_HANDLE_UP,
	TOUCH_HANDLE_MOTION,
	TOUCH_HANDLE_FRAME,
	TOUCH_HANDLE_CANCEL
};

#define EGL_PLATFORM_LOCK(platform) g_mutex_lock(&((platform)->mutex))
#define EGL_PLATFORM_UNLOCK(platform) g_mutex_unlock(&((platform)->mutex))


typedef enum
{
	GSTIMX_EGLWL_CMD_REDRAW,
	GSTIMX_EGLWL_CMD_CALL_RESIZE_CB,
	GSTIMX_EGLWL_CMD_STOP_MAINLOOP
}
GstImxEGLWLCmds;




static void log_handler(const char *format, va_list args)
{
	gst_debug_log_valist(imx_egl_platform_wl_debug, GST_LEVEL_LOG, __FILE__, __func__, __LINE__, NULL, format, args);
}


static void static_global_init(void)
{
	static gboolean initialized = FALSE;
	if (!initialized)
	{
		GST_DEBUG_CATEGORY_INIT(imx_egl_platform_wl_debug, "imxeglplatform_wl", 0, "imxeglvivsink Wayland platform");

		wl_log_set_handler_client(log_handler);

		initialized = TRUE;
	}
}




static void calculate_adjusted_window_size(GstImxEglVivSinkEGLPlatform *platform, guint *actual_width, guint *actual_height)
{
	gboolean b;
	guint window

[PATCH libinput 3/6] Expand documentation on touch events, listing what is permitted when

2015-03-12 Thread Peter Hutterer
Signed-off-by: Peter Hutterer 
---
 src/libinput.h | 28 +++-
 1 file changed, 19 insertions(+), 9 deletions(-)

diff --git a/src/libinput.h b/src/libinput.h
index f978b37..356c1bf 100644
--- a/src/libinput.h
+++ b/src/libinput.h
@@ -839,8 +839,11 @@ libinput_event_touch_get_seat_slot(struct 
libinput_event_touch *event);
  * the top left corner of the device. To get the corresponding output screen
  * coordinate, use libinput_event_touch_get_x_transformed().
  *
- * @note this function should only be called for @ref
- * LIBINPUT_EVENT_TOUCH_DOWN and @ref LIBINPUT_EVENT_TOUCH_MOTION.
+ * For events not of type @ref LIBINPUT_EVENT_TOUCH_DOWN, @ref
+ * LIBINPUT_EVENT_TOUCH_MOTION, this function returns 0.
+ *
+ * @note It is an application bug to call this function for events of type
+ * @ref LIBINPUT_EVENT_TOUCH_DOWN or @ref LIBINPUT_EVENT_TOUCH_MOTION.
  *
  * @param event The libinput touch event
  * @return The current absolute x coordinate
@@ -855,10 +858,11 @@ libinput_event_touch_get_x(struct libinput_event_touch 
*event);
  * the top left corner of the device. To get the corresponding output screen
  * coordinate, use libinput_event_touch_get_y_transformed().
  *
- * For @ref LIBINPUT_EVENT_TOUCH_UP 0 is returned.
+ * For events not of type @ref LIBINPUT_EVENT_TOUCH_DOWN, @ref
+ * LIBINPUT_EVENT_TOUCH_MOTION, this function returns 0.
  *
- * @note this function should only be called for @ref 
LIBINPUT_EVENT_TOUCH_DOWN and
- * @ref LIBINPUT_EVENT_TOUCH_MOTION.
+ * @note It is an application bug to call this function for events of type
+ * @ref LIBINPUT_EVENT_TOUCH_DOWN or @ref LIBINPUT_EVENT_TOUCH_MOTION.
  *
  * @param event The libinput touch event
  * @return The current absolute y coordinate
@@ -872,8 +876,11 @@ libinput_event_touch_get_y(struct libinput_event_touch 
*event);
  * Return the current absolute x coordinate of the touch event, transformed to
  * screen coordinates.
  *
- * @note this function should only be called for @ref
- * LIBINPUT_EVENT_TOUCH_DOWN and @ref LIBINPUT_EVENT_TOUCH_MOTION.
+ * For events not of type @ref LIBINPUT_EVENT_TOUCH_DOWN, @ref
+ * LIBINPUT_EVENT_TOUCH_MOTION, this function returns 0.
+ *
+ * @note It is an application bug to call this function for events of type
+ * @ref LIBINPUT_EVENT_TOUCH_DOWN or @ref LIBINPUT_EVENT_TOUCH_MOTION.
  *
  * @param event The libinput touch event
  * @param width The current output screen width
@@ -889,8 +896,11 @@ libinput_event_touch_get_x_transformed(struct 
libinput_event_touch *event,
  * Return the current absolute y coordinate of the touch event, transformed to
  * screen coordinates.
  *
- * @note this function should only be called for @ref
- * LIBINPUT_EVENT_TOUCH_DOWN and @ref LIBINPUT_EVENT_TOUCH_MOTION.
+ * For events not of type @ref LIBINPUT_EVENT_TOUCH_DOWN, @ref
+ * LIBINPUT_EVENT_TOUCH_MOTION, this function returns 0.
+ *
+ * @note It is an application bug to call this function for events of type
+ * @ref LIBINPUT_EVENT_TOUCH_DOWN or @ref LIBINPUT_EVENT_TOUCH_MOTION.
  *
  * @param event The libinput touch event
  * @param height The current output screen height
-- 
2.1.0

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


[PATCH libinput 09/12] Use typesafe coordinates in touch events

2015-03-12 Thread Peter Hutterer
Signed-off-by: Peter Hutterer 
---
 src/evdev.c| 13 +
 src/libinput-private.h |  6 ++
 src/libinput.c | 23 +--
 3 files changed, 16 insertions(+), 26 deletions(-)

diff --git a/src/evdev.c b/src/evdev.c
index d1b0504..cab7e85 100644
--- a/src/evdev.c
+++ b/src/evdev.c
@@ -239,7 +239,6 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint64_t time)
 {
struct libinput *libinput = device->base.seat->libinput;
struct motion_params motion;
-   int32_t x, y;
int slot;
int seat_slot;
struct libinput_device *base = &device->base;
@@ -304,7 +303,7 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint64_t time)
transform_absolute(device, &point);
 
touch_notify_touch_down(base, time, slot, seat_slot,
-   point.x, point.y);
+   &point);
break;
case EVDEV_ABSOLUTE_MT_MOTION:
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
@@ -318,7 +317,7 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint64_t time)
 
transform_absolute(device, &point);
touch_notify_touch_motion(base, time, slot, seat_slot,
- point.x, point.y);
+ &point);
break;
case EVDEV_ABSOLUTE_MT_UP:
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
@@ -357,14 +356,11 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint64_t time)
point = device->abs.point;
transform_absolute(device, &point);
 
-   touch_notify_touch_down(base, time, -1, seat_slot,
-   point.x, point.y);
+   touch_notify_touch_down(base, time, -1, seat_slot, &point);
break;
case EVDEV_ABSOLUTE_MOTION:
point = device->abs.point;
transform_absolute(device, &point);
-   x = point.x;
-   y = point.y;
 
if (device->seat_caps & EVDEV_DEVICE_TOUCH) {
seat_slot = device->abs.seat_slot;
@@ -372,7 +368,8 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint64_t time)
if (seat_slot == -1)
break;
 
-   touch_notify_touch_motion(base, time, -1, seat_slot, x, 
y);
+   touch_notify_touch_motion(base, time, -1, seat_slot,
+ &point);
} else if (device->seat_caps & EVDEV_DEVICE_POINTER) {
pointer_notify_motion_absolute(base, time, &point);
}
diff --git a/src/libinput-private.h b/src/libinput-private.h
index a7e0b07..91bfc37 100644
--- a/src/libinput-private.h
+++ b/src/libinput-private.h
@@ -318,16 +318,14 @@ touch_notify_touch_down(struct libinput_device *device,
uint64_t time,
int32_t slot,
int32_t seat_slot,
-   double x,
-   double y);
+   const struct device_coords *point);
 
 void
 touch_notify_touch_motion(struct libinput_device *device,
  uint64_t time,
  int32_t slot,
  int32_t seat_slot,
- double x,
- double y);
+ const struct device_coords *point);
 
 void
 touch_notify_touch_up(struct libinput_device *device,
diff --git a/src/libinput.c b/src/libinput.c
index ea31fb8..3b1d482 100644
--- a/src/libinput.c
+++ b/src/libinput.c
@@ -75,8 +75,7 @@ struct libinput_event_touch {
uint32_t time;
int32_t slot;
int32_t seat_slot;
-   double x;
-   double y;
+   struct device_coords point;
 };
 
 static void
@@ -469,7 +468,7 @@ libinput_event_touch_get_x(struct libinput_event_touch 
*event)
struct evdev_device *device =
(struct evdev_device *) event->base.device;
 
-   return evdev_convert_to_mm(device->abs.absinfo_x, event->x);
+   return evdev_convert_to_mm(device->abs.absinfo_x, event->point.x);
 }
 
 LIBINPUT_EXPORT double
@@ -479,7 +478,7 @@ libinput_event_touch_get_x_transformed(struct 
libinput_event_touch *event,
struct evdev_device *device =
(struct evdev_device *) event->base.device;
 
-   return evdev_device_transform_x(device, event->x, width);
+   return evdev_device_transform_x(device, event->point.x, width);
 }
 
 LIBINPUT_EXPORT double
@@ -489,7 +488,7 @@ libinput_event_touch_get_y_transformed(struct 
libinput_event_touch *event,
struct evdev_device *device =
(struct evdev_device *) event->base.device;
 
-   return evdev_device_transform_y(device, event->y, height

[PATCH libinput 3/4] Change absolute and touch events to use mm as default unit

2014-06-18 Thread Peter Hutterer
Instead of device-specific coordinates that the caller can't interpret without
knowing the range anyway, return mm as the default value.

Signed-off-by: Peter Hutterer 
---
 src/evdev.h |  7 +++
 src/libinput.c  | 20 
 src/libinput.h  | 32 
 tools/event-debug.c |  7 +--
 4 files changed, 40 insertions(+), 26 deletions(-)

diff --git a/src/evdev.h b/src/evdev.h
index 03b6742..eebfab1 100644
--- a/src/evdev.h
+++ b/src/evdev.h
@@ -164,4 +164,11 @@ evdev_device_remove(struct evdev_device *device);
 void
 evdev_device_destroy(struct evdev_device *device);
 
+static inline double
+evdev_convert_to_mm(const struct input_absinfo *absinfo, double v)
+{
+   double value = v - absinfo->minimum;
+   return value/absinfo->resolution;
+}
+
 #endif /* EVDEV_H */
diff --git a/src/libinput.c b/src/libinput.c
index 5b10a10..f384f43 100644
--- a/src/libinput.c
+++ b/src/libinput.c
@@ -319,13 +319,19 @@ libinput_event_pointer_get_dy(struct 
libinput_event_pointer *event)
 LIBINPUT_EXPORT double
 libinput_event_pointer_get_absolute_x(struct libinput_event_pointer *event)
 {
-   return event->x;
+   struct evdev_device *device =
+   (struct evdev_device *) event->base.device;
+
+   return evdev_convert_to_mm(device->abs.absinfo_x, event->x);
 }
 
 LIBINPUT_EXPORT double
 libinput_event_pointer_get_absolute_y(struct libinput_event_pointer *event)
 {
-   return event->y;
+   struct evdev_device *device =
+   (struct evdev_device *) event->base.device;
+
+   return evdev_convert_to_mm(device->abs.absinfo_y, event->y);
 }
 
 LIBINPUT_EXPORT double
@@ -402,7 +408,10 @@ libinput_event_touch_get_seat_slot(struct 
libinput_event_touch *event)
 LIBINPUT_EXPORT double
 libinput_event_touch_get_x(struct libinput_event_touch *event)
 {
-   return event->x;
+   struct evdev_device *device =
+   (struct evdev_device *) event->base.device;
+
+   return evdev_convert_to_mm(device->abs.absinfo_x, event->x);
 }
 
 LIBINPUT_EXPORT double
@@ -428,7 +437,10 @@ libinput_event_touch_get_y_transformed(struct 
libinput_event_touch *event,
 LIBINPUT_EXPORT double
 libinput_event_touch_get_y(struct libinput_event_touch *event)
 {
-   return event->y;
+   struct evdev_device *device =
+   (struct evdev_device *) event->base.device;
+
+   return evdev_convert_to_mm(device->abs.absinfo_y, event->y);
 }
 
 struct libinput_source *
diff --git a/src/libinput.h b/src/libinput.h
index 54c96e5..c19460b 100644
--- a/src/libinput.h
+++ b/src/libinput.h
@@ -452,11 +452,9 @@ libinput_event_pointer_get_dy(struct 
libinput_event_pointer *event);
 /**
  * @ingroup event_pointer
  *
- * Return the current absolute x coordinate of the pointer event.
- *
- * The coordinate is in a device specific coordinate space; to get the
- * corresponding output screen coordinate, use
- * libinput_event_pointer_get_x_transformed().
+ * Return the current absolute x coordinate of the pointer event, in mm from
+ * the top left corner of the device. To get the corresponding output screen
+ * coordinate, use libinput_event_pointer_get_x_transformed().
  *
  * For pointer events that are not of type
  * LIBINPUT_EVENT_POINTER_MOTION_ABSOLUTE, this function returns 0.
@@ -472,11 +470,9 @@ libinput_event_pointer_get_absolute_x(struct 
libinput_event_pointer *event);
 /**
  * @ingroup event_pointer
  *
- * Return the current absolute y coordinate of the pointer event.
- *
- * The coordinate is in a device specific coordinate space; to get the
- * corresponding output screen coordinate, use
- * libinput_event_pointer_get_y_transformed().
+ * Return the current absolute y coordinate of the pointer event, in mm from
+ * the top left corner of the device. To get the corresponding output screen
+ * coordinate, use libinput_event_pointer_get_x_transformed().
  *
  * For pointer events that are not of type
  * LIBINPUT_EVENT_POINTER_MOTION_ABSOLUTE, this function returns 0.
@@ -677,11 +673,9 @@ libinput_event_touch_get_seat_slot(struct 
libinput_event_touch *event);
 /**
  * @ingroup event_touch
  *
- * Return the current absolute x coordinate of the touch event.
- *
- * The coordinate is in a device specific coordinate space; to get the
- * corresponding output screen coordinate, use
- * libinput_event_touch_get_x_transformed().
+ * Return the current absolute x coordinate of the touch event, in mm from
+ * the top left corner of the device. To get the corresponding output screen
+ * coordinate, use libinput_event_touch_get_x_transformed().
  *
  * @note this function should only be called for LIBINPUT_EVENT_TOUCH_DOWN and
  * LIBINPUT_EVENT_TOUCH_MOTION.
@@ -695,11 +689,9 @@ libinput_event_touch_get_x(struct libinput_event_touch 
*event);
 /**
  * @ingroup event_touch
  *
- * Return the current absolute y coordinate of the touch event.
- *
- * The coordinate is in a device specific coordinate space; to get the
- * corres

[PATCH weston 1/4] libinput: Don't process touch events for devices without a valid output

2014-04-24 Thread Ander Conselvan de Oliveira
From: Ander Conselvan de Oliveira 

That would be the case of a touch screen mapped to an output that was
unplugged.
---
 src/libinput-device.c | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/src/libinput-device.c b/src/libinput-device.c
index 0ca6c4b..a67c119 100644
--- a/src/libinput-device.c
+++ b/src/libinput-device.c
@@ -147,6 +147,9 @@ handle_touch_with_coords(struct libinput_device 
*libinput_device,
uint32_t time;
int32_t slot;
 
+   if (!device->output)
+   return;
+
time = libinput_event_touch_get_time(touch_event);
slot = libinput_event_touch_get_seat_slot(touch_event);
 
-- 
1.8.3.2

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


[PATCH weston 1/4] libinput: Don't process touch events for devices without a valid output

2014-04-23 Thread Ander Conselvan de Oliveira
From: Ander Conselvan de Oliveira 

That would be the case of a touch screen mapped to an output that was
unplugged.
---
 src/libinput-device.c | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/src/libinput-device.c b/src/libinput-device.c
index 0ca6c4b..a67c119 100644
--- a/src/libinput-device.c
+++ b/src/libinput-device.c
@@ -147,6 +147,9 @@ handle_touch_with_coords(struct libinput_device 
*libinput_device,
uint32_t time;
int32_t slot;
 
+   if (!device->output)
+   return;
+
time = libinput_event_touch_get_time(touch_event);
slot = libinput_event_touch_get_seat_slot(touch_event);
 
-- 
1.8.3.2

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH libinput v2] Add seat wide slot to touch events

2014-02-23 Thread Peter Hutterer
On Sat, Feb 22, 2014 at 03:38:27PM +0100, Jonas Ådahl wrote:
> Since a Wayland compositor have to represent all touch devices of a seat
> as one virtual device, lets make that easier by also providing seat wide
> slots with touch events.
> 
> Seat wide slots may be accessed using
> libinput_event_touch_get_seat_slot().
> 
> Signed-off-by: Jonas Ådahl 

Reviewed-by: Peter Hutterer 

Cheers,
   Peter

> ---
> 
> Changes since v1:
> 
> For now, drop touch events we cant assign a seat slot.
> 
>  src/evdev.c| 48 
>  src/evdev.h|  3 +++
>  src/libinput-private.h |  2 ++
>  src/libinput.c |  9 +
>  src/libinput.h | 16 
>  tools/event-debug.c|  3 ++-
>  6 files changed, 80 insertions(+), 1 deletion(-)
> 
> diff --git a/src/evdev.c b/src/evdev.c
> index beec75e..ad5009d 100644
> --- a/src/evdev.c
> +++ b/src/evdev.c
> @@ -109,7 +109,9 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>  {
>   int32_t cx, cy;
>   int slot;
> + int seat_slot;
>   struct libinput_device *base = &device->base;
> + struct libinput_seat *seat = base->seat;
>  
>   slot = device->mt.slot;
>  
> @@ -128,9 +130,18 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = ffs(~seat->slot_map) - 1;
> + device->mt.slots[slot].seat_slot = seat_slot;
> +
> + if (seat_slot == -1)
> + break;
> +
> + seat->slot_map |= 1 << seat_slot;
> +
>   touch_notify_touch(base,
>  time,
>  slot,
> +seat_slot,
>  li_fixed_from_int(device->mt.slots[slot].x),
>  li_fixed_from_int(device->mt.slots[slot].y),
>  LIBINPUT_TOUCH_TYPE_DOWN);
> @@ -139,9 +150,15 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = device->mt.slots[slot].seat_slot;
> +
> + if (seat_slot == -1)
> + break;
> +
>   touch_notify_touch(base,
>  time,
>  slot,
> +seat_slot,
>  li_fixed_from_int(device->mt.slots[slot].x),
>  li_fixed_from_int(device->mt.slots[slot].y),
>  LIBINPUT_TOUCH_TYPE_MOTION);
> @@ -150,9 +167,17 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = device->mt.slots[slot].seat_slot;
> +
> + if (seat_slot == -1)
> + break;
> +
> + seat->slot_map &= ~(1 << seat_slot);
> +
>   touch_notify_touch(base,
>  time,
>  slot,
> +seat_slot,
>  0, 0,
>  LIBINPUT_TOUCH_TYPE_UP);
>   break;
> @@ -160,10 +185,19 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = ffs(~seat->slot_map) - 1;
> + device->abs.seat_slot = seat_slot;
> +
> + if (seat_slot == -1)
> + break;
> +
> + seat->slot_map |= 1 << seat_slot;
> +
>   transform_absolute(device, &cx, &cy);
>   touch_notify_touch(base,
>  time,
>  -1,
> +seat_slot,
>  li_fixed_from_int(cx),
>  li_fixed_from_int(cy),
>  LIBINPUT_TOUCH_TYPE_DOWN);
> @@ -171,9 +205,15 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   case EVDEV_ABSOLUTE_MOTION:
>   transform_absolute(device, &cx, &cy);
>

[PATCH libinput v2] Add seat wide slot to touch events

2014-02-22 Thread Jonas Ådahl
Since a Wayland compositor have to represent all touch devices of a seat
as one virtual device, lets make that easier by also providing seat wide
slots with touch events.

Seat wide slots may be accessed using
libinput_event_touch_get_seat_slot().

Signed-off-by: Jonas Ådahl 
---

Changes since v1:

For now, drop touch events we cant assign a seat slot.

 src/evdev.c| 48 
 src/evdev.h|  3 +++
 src/libinput-private.h |  2 ++
 src/libinput.c |  9 +
 src/libinput.h | 16 
 tools/event-debug.c|  3 ++-
 6 files changed, 80 insertions(+), 1 deletion(-)

diff --git a/src/evdev.c b/src/evdev.c
index beec75e..ad5009d 100644
--- a/src/evdev.c
+++ b/src/evdev.c
@@ -109,7 +109,9 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
 {
int32_t cx, cy;
int slot;
+   int seat_slot;
struct libinput_device *base = &device->base;
+   struct libinput_seat *seat = base->seat;
 
slot = device->mt.slot;
 
@@ -128,9 +130,18 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = ffs(~seat->slot_map) - 1;
+   device->mt.slots[slot].seat_slot = seat_slot;
+
+   if (seat_slot == -1)
+   break;
+
+   seat->slot_map |= 1 << seat_slot;
+
touch_notify_touch(base,
   time,
   slot,
+  seat_slot,
   li_fixed_from_int(device->mt.slots[slot].x),
   li_fixed_from_int(device->mt.slots[slot].y),
   LIBINPUT_TOUCH_TYPE_DOWN);
@@ -139,9 +150,15 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = device->mt.slots[slot].seat_slot;
+
+   if (seat_slot == -1)
+   break;
+
touch_notify_touch(base,
   time,
   slot,
+  seat_slot,
   li_fixed_from_int(device->mt.slots[slot].x),
   li_fixed_from_int(device->mt.slots[slot].y),
   LIBINPUT_TOUCH_TYPE_MOTION);
@@ -150,9 +167,17 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = device->mt.slots[slot].seat_slot;
+
+   if (seat_slot == -1)
+   break;
+
+   seat->slot_map &= ~(1 << seat_slot);
+
touch_notify_touch(base,
   time,
   slot,
+  seat_slot,
   0, 0,
   LIBINPUT_TOUCH_TYPE_UP);
break;
@@ -160,10 +185,19 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = ffs(~seat->slot_map) - 1;
+   device->abs.seat_slot = seat_slot;
+
+   if (seat_slot == -1)
+   break;
+
+   seat->slot_map |= 1 << seat_slot;
+
transform_absolute(device, &cx, &cy);
touch_notify_touch(base,
   time,
   -1,
+  seat_slot,
   li_fixed_from_int(cx),
   li_fixed_from_int(cy),
   LIBINPUT_TOUCH_TYPE_DOWN);
@@ -171,9 +205,15 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
case EVDEV_ABSOLUTE_MOTION:
transform_absolute(device, &cx, &cy);
if (device->seat_caps & EVDEV_DEVICE_TOUCH) {
+   seat_slot = device->abs.seat_slot;
+
+   if (seat_slot == -1)
+   break;
+
touch_notify_touch(base,
   time,
   -1,
+  seat_slot,
   li_fixed_from_int(cx),
   li_fixed_from_int(cy),
   LIBINPUT_

Re: [PATCH libinput 1/2] Add seat wide slot to touch events

2014-02-19 Thread Peter Hutterer
On Thu, Feb 20, 2014 at 08:39:08AM +0100, Jonas Ådahl wrote:
> On Thu, Feb 20, 2014 at 02:53:18PM +1000, Peter Hutterer wrote:
> > On Wed, Feb 19, 2014 at 10:04:10PM +0100, Jonas Ådahl wrote:
> > > Since a Wayland compositor have to represent all touch devices of a seat
> > > as one virtual device, lets make that easier by also providing seat wide
> > > slots with touch events.
> > > 
> > > Seat wide slots may be accessed using
> > > libinput_event_touch_get_seat_slot().
> > > 
> > > Signed-off-by: Jonas Ådahl 
> > > ---
> > >  src/evdev.c| 24 
> > >  src/evdev.h|  3 +++
> > >  src/libinput-private.h |  2 ++
> > >  src/libinput.c |  9 +
> > >  src/libinput.h | 16 
> > >  tools/event-debug.c|  3 ++-
> > >  6 files changed, 56 insertions(+), 1 deletion(-)
> > > 
> > > diff --git a/src/evdev.c b/src/evdev.c
> > > index d2cdbaf..2b7070a 100644
> > > --- a/src/evdev.c
> > > +++ b/src/evdev.c
> > > @@ -111,7 +111,9 @@ evdev_flush_pending_event(struct evdev_device 
> > > *device, uint32_t time)
> > >  {
> > >   int32_t cx, cy;
> > >   int slot;
> > > + uint32_t seat_slot;
> > >   struct libinput_device *base = &device->base;
> > > + struct libinput_seat *seat = base->seat;
> > >  
> > >   slot = device->mt.slot;
> > >  
> > > @@ -130,9 +132,14 @@ evdev_flush_pending_event(struct evdev_device 
> > > *device, uint32_t time)
> > >   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
> > >   break;
> > >  
> > > + seat_slot = ffs(~seat->slot_map) - 1;
> > 
> > funny, had a bit of a deja-vu here :)
> > see 
> > http://lists.freedesktop.org/archives/wayland-devel/2014-February/013238.html
> > for my comments, both still apply.
> 
> Hmm :)
> 
> > ... >32 touch points ...
> 
> Oops, seems I stashed away that part together with the unfinished test case.
> 
> > ... seat_slot is never -1, ...
> 
> This I did address without stashing away though.

oh, right, "non-negative", I read over that searching for something that
explicitly uses "-1"...

Cheers,
   Peter

> > 
> > Cheers,
> >Peter
> > 
> > 
> > > + device->mt.slots[slot].seat_slot = seat_slot;
> > > + seat->slot_map |= 1 << seat_slot;
> > > +
> > >   touch_notify_touch(base,
> > >  time,
> > >  slot,
> > > +seat_slot,
> > >  li_fixed_from_int(device->mt.slots[slot].x),
> > >  li_fixed_from_int(device->mt.slots[slot].y),
> > >  LIBINPUT_TOUCH_TYPE_DOWN);
> > > @@ -141,9 +148,12 @@ evdev_flush_pending_event(struct evdev_device 
> > > *device, uint32_t time)
> > >   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
> > >   break;
> > >  
> > > + seat_slot = device->mt.slots[slot].seat_slot;
> > > +
> > >   touch_notify_touch(base,
> > >  time,
> > >  slot,
> > > +seat_slot,
> > >  li_fixed_from_int(device->mt.slots[slot].x),
> > >  li_fixed_from_int(device->mt.slots[slot].y),
> > >  LIBINPUT_TOUCH_TYPE_MOTION);
> > > @@ -152,9 +162,13 @@ evdev_flush_pending_event(struct evdev_device 
> > > *device, uint32_t time)
> > >   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
> > >   break;
> > >  
> > > + seat_slot = device->mt.slots[slot].seat_slot;
> > > + seat->slot_map &= ~(1 << seat_slot);
> > > +
> > >   touch_notify_touch(base,
> > >  time,
> > >  slot,
> > > +seat_slot,
> > >  0, 0,
> > >  LIBINPUT_TOUCH_TYPE_UP);
> > >   break;
> > > @@ -162,10 +176,15 @@ evdev_flush_pending_event(struct evdev_device 
> > > *device, uint32

Re: [PATCH libinput 1/2] Add seat wide slot to touch events

2014-02-19 Thread Jonas Ådahl
On Thu, Feb 20, 2014 at 02:53:18PM +1000, Peter Hutterer wrote:
> On Wed, Feb 19, 2014 at 10:04:10PM +0100, Jonas Ådahl wrote:
> > Since a Wayland compositor have to represent all touch devices of a seat
> > as one virtual device, lets make that easier by also providing seat wide
> > slots with touch events.
> > 
> > Seat wide slots may be accessed using
> > libinput_event_touch_get_seat_slot().
> > 
> > Signed-off-by: Jonas Ådahl 
> > ---
> >  src/evdev.c| 24 
> >  src/evdev.h|  3 +++
> >  src/libinput-private.h |  2 ++
> >  src/libinput.c |  9 +
> >  src/libinput.h | 16 
> >  tools/event-debug.c|  3 ++-
> >  6 files changed, 56 insertions(+), 1 deletion(-)
> > 
> > diff --git a/src/evdev.c b/src/evdev.c
> > index d2cdbaf..2b7070a 100644
> > --- a/src/evdev.c
> > +++ b/src/evdev.c
> > @@ -111,7 +111,9 @@ evdev_flush_pending_event(struct evdev_device *device, 
> > uint32_t time)
> >  {
> > int32_t cx, cy;
> > int slot;
> > +   uint32_t seat_slot;
> > struct libinput_device *base = &device->base;
> > +   struct libinput_seat *seat = base->seat;
> >  
> > slot = device->mt.slot;
> >  
> > @@ -130,9 +132,14 @@ evdev_flush_pending_event(struct evdev_device *device, 
> > uint32_t time)
> > if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
> > break;
> >  
> > +   seat_slot = ffs(~seat->slot_map) - 1;
> 
> funny, had a bit of a deja-vu here :)
> see 
> http://lists.freedesktop.org/archives/wayland-devel/2014-February/013238.html
> for my comments, both still apply.

Hmm :)

> ... >32 touch points ...

Oops, seems I stashed away that part together with the unfinished test case.

> ... seat_slot is never -1, ...

This I did address without stashing away though.

Jonas

> 
> Cheers,
>Peter
> 
> 
> > +   device->mt.slots[slot].seat_slot = seat_slot;
> > +   seat->slot_map |= 1 << seat_slot;
> > +
> > touch_notify_touch(base,
> >time,
> >slot,
> > +  seat_slot,
> >li_fixed_from_int(device->mt.slots[slot].x),
> >li_fixed_from_int(device->mt.slots[slot].y),
> >LIBINPUT_TOUCH_TYPE_DOWN);
> > @@ -141,9 +148,12 @@ evdev_flush_pending_event(struct evdev_device *device, 
> > uint32_t time)
> > if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
> > break;
> >  
> > +   seat_slot = device->mt.slots[slot].seat_slot;
> > +
> > touch_notify_touch(base,
> >time,
> >slot,
> > +  seat_slot,
> >li_fixed_from_int(device->mt.slots[slot].x),
> >li_fixed_from_int(device->mt.slots[slot].y),
> >LIBINPUT_TOUCH_TYPE_MOTION);
> > @@ -152,9 +162,13 @@ evdev_flush_pending_event(struct evdev_device *device, 
> > uint32_t time)
> > if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
> > break;
> >  
> > +   seat_slot = device->mt.slots[slot].seat_slot;
> > +   seat->slot_map &= ~(1 << seat_slot);
> > +
> > touch_notify_touch(base,
> >time,
> >slot,
> > +  seat_slot,
> >0, 0,
> >LIBINPUT_TOUCH_TYPE_UP);
> > break;
> > @@ -162,10 +176,15 @@ evdev_flush_pending_event(struct evdev_device 
> > *device, uint32_t time)
> > if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
> > break;
> >  
> > +   seat_slot = ffs(~seat->slot_map) - 1;
> > +   device->abs.seat_slot = seat_slot;
> > +   seat->slot_map |= 1 << seat_slot;
> > +
> > transform_absolute(device, &cx, &cy);
> > touch_notify_touch(base,
> >time,
> >-1,
> > +  seat_slot,
> >li_fixed_from_int(cx)

Re: [PATCH libinput 1/2] Add seat wide slot to touch events

2014-02-19 Thread Peter Hutterer
On Wed, Feb 19, 2014 at 10:04:10PM +0100, Jonas Ådahl wrote:
> Since a Wayland compositor have to represent all touch devices of a seat
> as one virtual device, lets make that easier by also providing seat wide
> slots with touch events.
> 
> Seat wide slots may be accessed using
> libinput_event_touch_get_seat_slot().
> 
> Signed-off-by: Jonas Ådahl 
> ---
>  src/evdev.c| 24 
>  src/evdev.h|  3 +++
>  src/libinput-private.h |  2 ++
>  src/libinput.c |  9 +
>  src/libinput.h | 16 
>  tools/event-debug.c|  3 ++-
>  6 files changed, 56 insertions(+), 1 deletion(-)
> 
> diff --git a/src/evdev.c b/src/evdev.c
> index d2cdbaf..2b7070a 100644
> --- a/src/evdev.c
> +++ b/src/evdev.c
> @@ -111,7 +111,9 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>  {
>   int32_t cx, cy;
>   int slot;
> + uint32_t seat_slot;
>   struct libinput_device *base = &device->base;
> + struct libinput_seat *seat = base->seat;
>  
>   slot = device->mt.slot;
>  
> @@ -130,9 +132,14 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = ffs(~seat->slot_map) - 1;

funny, had a bit of a deja-vu here :)
see 
http://lists.freedesktop.org/archives/wayland-devel/2014-February/013238.html
for my comments, both still apply.

Cheers,
   Peter


> + device->mt.slots[slot].seat_slot = seat_slot;
> + seat->slot_map |= 1 << seat_slot;
> +
>   touch_notify_touch(base,
>  time,
>  slot,
> +seat_slot,
>  li_fixed_from_int(device->mt.slots[slot].x),
>  li_fixed_from_int(device->mt.slots[slot].y),
>  LIBINPUT_TOUCH_TYPE_DOWN);
> @@ -141,9 +148,12 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = device->mt.slots[slot].seat_slot;
> +
>   touch_notify_touch(base,
>  time,
>  slot,
> +seat_slot,
>  li_fixed_from_int(device->mt.slots[slot].x),
>  li_fixed_from_int(device->mt.slots[slot].y),
>  LIBINPUT_TOUCH_TYPE_MOTION);
> @@ -152,9 +162,13 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = device->mt.slots[slot].seat_slot;
> + seat->slot_map &= ~(1 << seat_slot);
> +
>   touch_notify_touch(base,
>  time,
>  slot,
> +seat_slot,
>  0, 0,
>  LIBINPUT_TOUCH_TYPE_UP);
>   break;
> @@ -162,10 +176,15 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = ffs(~seat->slot_map) - 1;
> + device->abs.seat_slot = seat_slot;
> + seat->slot_map |= 1 << seat_slot;
> +
>   transform_absolute(device, &cx, &cy);
>   touch_notify_touch(base,
>  time,
>  -1,
> +seat_slot,
>  li_fixed_from_int(cx),
>  li_fixed_from_int(cy),
>  LIBINPUT_TOUCH_TYPE_DOWN);
> @@ -176,6 +195,7 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   touch_notify_touch(base,
>  time,
>  -1,
> +device->abs.seat_slot,
>  li_fixed_from_int(cx),
>  li_fixed_from_int(cy),
>  LIBINPUT_TOUCH_TYPE_DOWN);

[PATCH libinput 1/2] Add seat wide slot to touch events

2014-02-19 Thread Jonas Ådahl
Since a Wayland compositor have to represent all touch devices of a seat
as one virtual device, lets make that easier by also providing seat wide
slots with touch events.

Seat wide slots may be accessed using
libinput_event_touch_get_seat_slot().

Signed-off-by: Jonas Ådahl 
---
 src/evdev.c| 24 
 src/evdev.h|  3 +++
 src/libinput-private.h |  2 ++
 src/libinput.c |  9 +
 src/libinput.h | 16 
 tools/event-debug.c|  3 ++-
 6 files changed, 56 insertions(+), 1 deletion(-)

diff --git a/src/evdev.c b/src/evdev.c
index d2cdbaf..2b7070a 100644
--- a/src/evdev.c
+++ b/src/evdev.c
@@ -111,7 +111,9 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
 {
int32_t cx, cy;
int slot;
+   uint32_t seat_slot;
struct libinput_device *base = &device->base;
+   struct libinput_seat *seat = base->seat;
 
slot = device->mt.slot;
 
@@ -130,9 +132,14 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = ffs(~seat->slot_map) - 1;
+   device->mt.slots[slot].seat_slot = seat_slot;
+   seat->slot_map |= 1 << seat_slot;
+
touch_notify_touch(base,
   time,
   slot,
+  seat_slot,
   li_fixed_from_int(device->mt.slots[slot].x),
   li_fixed_from_int(device->mt.slots[slot].y),
   LIBINPUT_TOUCH_TYPE_DOWN);
@@ -141,9 +148,12 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = device->mt.slots[slot].seat_slot;
+
touch_notify_touch(base,
   time,
   slot,
+  seat_slot,
   li_fixed_from_int(device->mt.slots[slot].x),
   li_fixed_from_int(device->mt.slots[slot].y),
   LIBINPUT_TOUCH_TYPE_MOTION);
@@ -152,9 +162,13 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = device->mt.slots[slot].seat_slot;
+   seat->slot_map &= ~(1 << seat_slot);
+
touch_notify_touch(base,
   time,
   slot,
+  seat_slot,
   0, 0,
   LIBINPUT_TOUCH_TYPE_UP);
break;
@@ -162,10 +176,15 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = ffs(~seat->slot_map) - 1;
+   device->abs.seat_slot = seat_slot;
+   seat->slot_map |= 1 << seat_slot;
+
transform_absolute(device, &cx, &cy);
touch_notify_touch(base,
   time,
   -1,
+  seat_slot,
   li_fixed_from_int(cx),
   li_fixed_from_int(cy),
   LIBINPUT_TOUCH_TYPE_DOWN);
@@ -176,6 +195,7 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
touch_notify_touch(base,
   time,
   -1,
+  device->abs.seat_slot,
   li_fixed_from_int(cx),
   li_fixed_from_int(cy),
   LIBINPUT_TOUCH_TYPE_DOWN);
@@ -190,9 +210,13 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = device->abs.seat_slot;
+   seat->slot_map &= ~(1 << seat_slot);
+
touch_notify_touch(base,
   time,
   -1,
+  seat_slot,
   0, 0,
   LIBINPUT_TOUCH_TYPE_UP);
break;
diff --git a/src/evdev.h b/src/evdev.h
index 3c9f93a..6

Re: [PATCH libinput 5/5] Add seat wide slot to touch events

2014-02-12 Thread Peter Hutterer
On Wed, Feb 12, 2014 at 09:36:42PM +0100, Jonas Ådahl wrote:
> Since a Wayland compositor have to represent all touch devices of a seat
> as one virtual device, lets make that easier by also providing seat wide
> slots with touch events.
> 
> Seat wide slots may be accessed using
> libinput_event_touch_get_seat_slot().
> 
> Signed-off-by: Jonas Ådahl 
> ---
>  src/evdev.c| 24 
>  src/evdev.h|  3 +++
>  src/libinput-private.h |  2 ++
>  src/libinput.c |  9 +
>  src/libinput.h | 13 +
>  5 files changed, 51 insertions(+)
> 
> diff --git a/src/evdev.c b/src/evdev.c
> index 3fe28e4..7393df7 100644
> --- a/src/evdev.c
> +++ b/src/evdev.c
> @@ -109,7 +109,9 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>  {
>   int32_t cx, cy;
>   int slot;
> + uint32_t seat_slot;
>   struct libinput_device *base = &device->base;
> + struct libinput_seat *seat = base->seat;
>  
>   slot = device->mt.slot;
>  
> @@ -128,9 +130,14 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = ffs(~seat->slot_map) - 1;

this needs some check for ffs() returning 0, it's not that hard to create
32 touchpoints.

> + device->mt.slots[slot].seat_slot = seat_slot;
> + seat->slot_map |= 1 << seat_slot;
> +
>   touch_notify_touch(base,
>  time,
>  slot,
> +seat_slot,
>  li_fixed_from_int(device->mt.slots[slot].x),
>  li_fixed_from_int(device->mt.slots[slot].y),
>  LIBINPUT_TOUCH_TYPE_DOWN);
> @@ -139,9 +146,12 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = device->mt.slots[slot].seat_slot;
> +
>   touch_notify_touch(base,
>  time,
>  slot,
> +seat_slot,
>  li_fixed_from_int(device->mt.slots[slot].x),
>  li_fixed_from_int(device->mt.slots[slot].y),
>  LIBINPUT_TOUCH_TYPE_MOTION);
> @@ -150,9 +160,13 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = device->mt.slots[slot].seat_slot;
> + seat->slot_map &= ~(1 << seat_slot);
> +
>   touch_notify_touch(base,
>  time,
>  slot,
> +seat_slot,
>  0, 0,
>  LIBINPUT_TOUCH_TYPE_UP);
>   break;
> @@ -160,10 +174,15 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
>   break;
>  
> + seat_slot = ffs(~seat->slot_map) - 1;
> + device->abs.seat_slot = seat_slot;
> + seat->slot_map |= 1 << seat_slot;

I think in light of 4/5 it should be documented that the seat_slot is never
-1, lest people expect the same behaviour for both functions.

Reviewed-by: Peter Hutterer 
for the series otherwise.

Cheers,
   Peter


> +
>   transform_absolute(device, &cx, &cy);
>   touch_notify_touch(base,
>  time,
>  -1,
> +seat_slot,
>  li_fixed_from_int(cx),
>  li_fixed_from_int(cy),
>  LIBINPUT_TOUCH_TYPE_DOWN);
> @@ -174,6 +193,7 @@ evdev_flush_pending_event(struct evdev_device *device, 
> uint32_t time)
>   touch_notify_touch(base,
>  time,
>  -1,
> +device->abs.seat_slot,
>  li_fixed_from_int(cx),
>  li_fixed_from

[PATCH libinput 4/5] evdev: Use -1 to represent touch events slots from single touch devices

2014-02-12 Thread Jonas Ådahl
Signed-off-by: Jonas Ådahl 
---
 src/evdev.c| 8 +---
 src/libinput.h | 3 +++
 2 files changed, 8 insertions(+), 3 deletions(-)

diff --git a/src/evdev.c b/src/evdev.c
index d8dff65..3fe28e4 100644
--- a/src/evdev.c
+++ b/src/evdev.c
@@ -163,7 +163,7 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
transform_absolute(device, &cx, &cy);
touch_notify_touch(base,
   time,
-  slot,
+  -1,
   li_fixed_from_int(cx),
   li_fixed_from_int(cy),
   LIBINPUT_TOUCH_TYPE_DOWN);
@@ -173,7 +173,7 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (device->seat_caps & EVDEV_DEVICE_TOUCH) {
touch_notify_touch(base,
   time,
-  slot,
+  -1,
   li_fixed_from_int(cx),
   li_fixed_from_int(cy),
   LIBINPUT_TOUCH_TYPE_DOWN);
@@ -190,7 +190,9 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
 
touch_notify_touch(base,
   time,
-  0, 0, 0, LIBINPUT_TOUCH_TYPE_UP);
+  -1,
+  0, 0,
+  LIBINPUT_TOUCH_TYPE_UP);
break;
default:
assert(0 && "Unknown pending event type");
diff --git a/src/libinput.h b/src/libinput.h
index b5e881a..30b6011 100644
--- a/src/libinput.h
+++ b/src/libinput.h
@@ -558,6 +558,9 @@ libinput_event_touch_get_time(struct libinput_event_touch 
*event);
  * Get the slot of this touch event. See the kernel's multitouch
  * protocol B documentation for more information.
  *
+ * If the touch event has no assigned slot, for example if it is from a
+ * single touch device, this function returns -1.
+ *
  * @note this function should not be called for LIBINPUT_EVENT_TOUCH_FRAME.
  *
  * @return The slot of this touch event
-- 
1.8.3.2

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


[PATCH libinput 5/5] Add seat wide slot to touch events

2014-02-12 Thread Jonas Ådahl
Since a Wayland compositor have to represent all touch devices of a seat
as one virtual device, lets make that easier by also providing seat wide
slots with touch events.

Seat wide slots may be accessed using
libinput_event_touch_get_seat_slot().

Signed-off-by: Jonas Ådahl 
---
 src/evdev.c| 24 
 src/evdev.h|  3 +++
 src/libinput-private.h |  2 ++
 src/libinput.c |  9 +
 src/libinput.h | 13 +
 5 files changed, 51 insertions(+)

diff --git a/src/evdev.c b/src/evdev.c
index 3fe28e4..7393df7 100644
--- a/src/evdev.c
+++ b/src/evdev.c
@@ -109,7 +109,9 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
 {
int32_t cx, cy;
int slot;
+   uint32_t seat_slot;
struct libinput_device *base = &device->base;
+   struct libinput_seat *seat = base->seat;
 
slot = device->mt.slot;
 
@@ -128,9 +130,14 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = ffs(~seat->slot_map) - 1;
+   device->mt.slots[slot].seat_slot = seat_slot;
+   seat->slot_map |= 1 << seat_slot;
+
touch_notify_touch(base,
   time,
   slot,
+  seat_slot,
   li_fixed_from_int(device->mt.slots[slot].x),
   li_fixed_from_int(device->mt.slots[slot].y),
   LIBINPUT_TOUCH_TYPE_DOWN);
@@ -139,9 +146,12 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = device->mt.slots[slot].seat_slot;
+
touch_notify_touch(base,
   time,
   slot,
+  seat_slot,
   li_fixed_from_int(device->mt.slots[slot].x),
   li_fixed_from_int(device->mt.slots[slot].y),
   LIBINPUT_TOUCH_TYPE_MOTION);
@@ -150,9 +160,13 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = device->mt.slots[slot].seat_slot;
+   seat->slot_map &= ~(1 << seat_slot);
+
touch_notify_touch(base,
   time,
   slot,
+  seat_slot,
   0, 0,
   LIBINPUT_TOUCH_TYPE_UP);
break;
@@ -160,10 +174,15 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = ffs(~seat->slot_map) - 1;
+   device->abs.seat_slot = seat_slot;
+   seat->slot_map |= 1 << seat_slot;
+
transform_absolute(device, &cx, &cy);
touch_notify_touch(base,
   time,
   -1,
+  seat_slot,
   li_fixed_from_int(cx),
   li_fixed_from_int(cy),
   LIBINPUT_TOUCH_TYPE_DOWN);
@@ -174,6 +193,7 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
touch_notify_touch(base,
   time,
   -1,
+  device->abs.seat_slot,
   li_fixed_from_int(cx),
   li_fixed_from_int(cy),
   LIBINPUT_TOUCH_TYPE_DOWN);
@@ -188,9 +208,13 @@ evdev_flush_pending_event(struct evdev_device *device, 
uint32_t time)
if (!(device->seat_caps & EVDEV_DEVICE_TOUCH))
break;
 
+   seat_slot = device->abs.seat_slot;
+   seat->slot_map &= ~(1 << seat_slot);
+
touch_notify_touch(base,
   time,
   -1,
+  seat_slot,
   0, 0,
   LIBINPUT_TOUCH_TYPE_UP);
break;
diff --git a/src/evdev.h b/src/evdev.h
index 3c9f93a..6e7f081 100644
--- a/src/evdev.

Re: Touch events

2012-02-23 Thread Daniel Stone
Hi,

On 23 February 2012 20:22, Bill Spitzak  wrote:
> Chase Douglas wrote:
>> The client won't see the third finger if it touches outside its window.
>> In the wayland case, only the WM has all the info needed to determine if
>> a touch is part of a global gesture. The WM needs to make the decision,
>> not the client.
>
> I'm pretty certain all touch events *MUST* go to the same surface until all
> touches are released. Otherwise it will be quite impossible to do gestures
> reliably, like the user could not do them to objects near the edge of a
> window.

On touchpads, yes.  On touchscreens, no.

>> That would be bad UI design because then global gestures would fire only
>> sometimes. Further, it would break global gestures if touches occur over
>> a broken application.
>
>
> I consider it bad design that global actions are "complex" (like needing 3
> fingers) or global shortcuts require lots of shift keys held down, just to
> avoid collisions with applications.

Needing three fingers isn't complex.  My grandparents can understand
it, and regularly use three-finger gestures.

>> I think we can look at other OSes as case studies. iOS and OS X employ
>> effective global gestures imo, and they take precedence over the
>> application receiving touch or gesture events.
>
> I think it is pretty clear that "what other OS's do" is not always what
> Wayland wants to do. Most of them copied ideas from X.

Not really.  X's input system is pretty unique (and pretty uniquely
broken), and I think it's safe to say that no-one -- least of all iOS
-- has copied it.

Cheers,
Daniel
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-23 Thread Chase Douglas

On 02/23/2012 12:22 PM, Bill Spitzak wrote:

Chase Douglas wrote:


The client won't see the third finger if it touches outside its window.
In the wayland case, only the WM has all the info needed to determine if
a touch is part of a global gesture. The WM needs to make the decision,
not the client.


I'm pretty certain all touch events *MUST* go to the same surface until
all touches are released. Otherwise it will be quite impossible to do
gestures reliably, like the user could not do them to objects near the
edge of a window.


No? I'm not sure what to say other than this is plainly incorrect. For a 
touchscreen device, the user may be interacting with two separate 
applications at the same time. Imagine you have your web browser open on 
the left side of the screen, and you have your spreadsheet open on the 
right side. You then want to scroll both side by side as you compare 
numbers. To do this, you need to send touch events to each client 
separately.


I'm fairly certain that no window system in major use behaves as you 
suggest.



If the clients can look at things first, this would allow the compositor
to do things like "one finger can be used to change desktops if the
underlying program does not use it".


That would be bad UI design because then global gestures would fire only
sometimes. Further, it would break global gestures if touches occur over
a broken application.


I consider it bad design that global actions are "complex" (like needing
3 fingers) or global shortcuts require lots of shift keys held down,
just to avoid collisions with applications.


My belief is that it is considered, universally (meaning >95% of 
people), that global gestures should behave consistently across 
applications, and should not be inhibited by broken applications.


I also believe that 3 or more finger global gestures are not bad design, 
especially since the iPad uses them. Maybe individuals like yourself 
don't like them, but very many do.


I'm not trying to start an argument about what is the best design of 
global gestures. However, I think it is a requirement that the window 
system allows for consistent global gesture behavior, and that three or 
more touch gestures are valid for global gestures.



I also think you are making up "user confusion" that does not exist in
the real world to make an excuse for this. Users will find it pretty
obvious if the same action that scrolls a document also scrolls the
entire screen if you don't touch a document, IMHO.


How would it not be confusing if you have a global "alt-tab" gesture 
work when performed over your spreadsheet but not your browser?



I think we can look at other OSes as case studies. iOS and OS X employ
effective global gestures imo, and they take precedence over the
application receiving touch or gesture events.


I think it is pretty clear that "what other OS's do" is not always what
Wayland wants to do. Most of them copied ideas from X.


That's why I said we should look at them as case studies...

-- Chase
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-23 Thread Bill Spitzak

Chase Douglas wrote:


The client won't see the third finger if it touches outside its window.
In the wayland case, only the WM has all the info needed to determine if
a touch is part of a global gesture. The WM needs to make the decision,
not the client.


I'm pretty certain all touch events *MUST* go to the same surface until 
all touches are released. Otherwise it will be quite impossible to do 
gestures reliably, like the user could not do them to objects near the 
edge of a window.


If the clients can look at things first, this would allow the compositor 
to do things like "one finger can be used to change desktops if the 
underlying program does not use it".


That would be bad UI design because then global gestures would fire only
sometimes. Further, it would break global gestures if touches occur over
a broken application.


I consider it bad design that global actions are "complex" (like needing 
3 fingers) or global shortcuts require lots of shift keys held down, 
just to avoid collisions with applications.


I also think you are making up "user confusion" that does not exist in 
the real world to make an excuse for this. Users will find it pretty 
obvious if the same action that scrolls a document also scrolls the 
entire screen if you don't touch a document, IMHO.



I think we can look at other OSes as case studies. iOS and OS X employ
effective global gestures imo, and they take precedence over the
application receiving touch or gesture events.


I think it is pretty clear that "what other OS's do" is not always what 
Wayland wants to do. Most of them copied ideas from X.

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-22 Thread Peter Hutterer
On Tue, Feb 21, 2012 at 04:25:39PM -0500, Kristian Høgsberg wrote:
> 2012/2/21 Chase Douglas :
> > On 02/21/2012 09:16 PM, Kristian Høgsberg wrote:
> >> 2012/2/20 Chase Douglas :
> >>> On 02/17/2012 06:01 PM, Kristian Høgsberg wrote:
> >>>>  - input protocol restructuring: break up events into wl_pointer
> >>>> (enter/leave/motion/button/axis events, set_pointer_surface request),
> >>>> wl_keyboard (enter/leave/key events... what else... unicode event,
> >>>> set_map request? pending kb work), and wl_touch (down/up/motion/cancel
> >>>> events) interfaces
> >>>
> >>> [snip]
> >>>
> >>> So the client window will receive touch events without delay, but may
> >>> receive a cancel? I am in favor of this approach, but you need to add a
> >>> way to tell the window when the window manager has "rejected" a touch
> >>> sequence as well. Otherwise the client will never know when they can
> >>> perform destructive operations with it.
> >>
> >> No, we don't need that.  Don't do destructive operations on something
> >> that could be the first half on a globall gesture.  If you need to
> >> know for sure that nobody is going to take over the events, wait until
> >> the touch session is over (all touch points up, none cancelled).
> >
> > That doesn't make any sense. What if I'm playing a game? Latency
> > matters, and I need to know that the touches are mine early on. In the
> > case of a game, the environment should leave touches alone and
> > immediately tell the game that the touches are owned by it.
> 
> I didn't say that applications always have to wait for touch end.
> Just that if you're doing something irrevocable like, sending an email
> or formatting your floppy, you don't want to trigger on something that
> could be part of a global gesture.  Like touch down, then move. You
> need to wait for touch end in that case.

 
> On the other hand, if you're scrolling in your web browser or moving
> around in a game, you just assume you own the events you get and
> scroll/move around.  If later in that touch session, you get the
> cancel event, you can just leave the web page/game where you
> scrolled/moved to.  Or you can undo what you did, for example, if you
> scribbled in a paint app as part of the global gesture.  Because you
> have to be able to do that anyway.
> 
> > Touches are used for much more than just button tapping, and waiting
> > until a touch is lifted to do anything won't work.
> >
> > Another reason this won't work is if your environment wants to recognize
> > a double-tap sequence. The first tap will end, at which point the
> > wayland application will attempt to use it. But a second tap comes along
> > and the environment performs an action.
> 
> This doesn't seem like a valid use case.

Quote from above "you need to wait for a touch end in that case".

How will a client know what touch sequence _could_ be a global gesture? How
long after the touch end will I need to wait to make sure this wasn't a
gesture used by the environment? 

Surface-local coordinates and the fat-finger problem may cause a double-tap
to happen in two different surfaces. Can the compositor cancel completed
sequences?

Cheers,
  Peter
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-22 Thread Chase Douglas
On 02/21/2012 09:15 PM, Bill Spitzak wrote:
> Daniel Stone wrote:
>> Hi,
>>
>> On 22 February 2012 00:13, Bill Spitzak  wrote:
>>> It seems like it would be better if clients got the touch events first, and
>>> the compositor only did things if the client said it was uninterested in the
>>> events. For a lot of touch events this can be decided immediately on the
>>> touch-down, since the client knows the user is pushing nothing.
>>
>> No, this just isn't true.  You want global gestures (e.g. three finger
>> swipe to change desktop) to take precedence over local shortcuts.  And
>> you cannot -- cannot -- know at touch-down time who's going to be
>> ultimately interested in it.  Because for a three-finger swipe, there
>> will be a time when there's only one finger down, or two.  At which
>> point the client will say, 'ah yes, I'm interested in this!'.  And
>> then the third finger lands and you regret a terrible design decision
>> you made.
> 
> I would think the client could say "not interested" when it sees the 
> third finger.

The client won't see the third finger if it touches outside its window.
In the wayland case, only the WM has all the info needed to determine if
a touch is part of a global gesture. The WM needs to make the decision,
not the client.

> If the clients can look at things first, this would allow the compositor 
> to do things like "one finger can be used to change desktops if the 
> underlying program does not use it".

That would be bad UI design because then global gestures would fire only
sometimes. Further, it would break global gestures if touches occur over
a broken application.

> Solutions like "three fingers are needed" are just like the solutions 
> for shortcuts where you have to hold Alt and Ctrl and the right-hand 
> Shift and push the key, in an attempt to not collide with keystrokes the 
> program wants.

Yes, system and application gestures may collide. There's not much one
can do about it other than provide guidelines for application
developers. Unity's guidelines are: three and four finger gestures are
reserved for global uses. Everything else is fair game.

I think we can look at other OSes as case studies. iOS and OS X employ
effective global gestures imo, and they take precedence over the
application receiving touch or gesture events.

-- Chase
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-22 Thread Mohamed Ikbel Boulabiar
On Wed, Feb 22, 2012 at 1:21 AM, Daniel Stone  wrote:
>  Because for a three-finger swipe, there
> will be a time when there's only one finger down, or two.  At which
> point the client will say, 'ah yes, I'm interested in this!'.  And
> then the third finger lands and you regret a terrible design decision
> you made.

Some values about the time between fingers touching the surface (in
mt) can help designing the interaction. I don't have numbers now but
what I remember is that they are very small values (20ms-50ms) and
thresholded to devices timestamps.
Time between double-click using a mouse is 500ms. (which can be
configured later by the user)
And in terms of interactions, the maximum time between an input and a
feedback should be less than 100ms, otherwise the user will detect a
delay.

Using these values, it's not possible to model things using usual FSM ?

i
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-21 Thread Bill Spitzak



Daniel Stone wrote:

Hi,

On 22 February 2012 00:13, Bill Spitzak  wrote:

It seems like it would be better if clients got the touch events first, and
the compositor only did things if the client said it was uninterested in the
events. For a lot of touch events this can be decided immediately on the
touch-down, since the client knows the user is pushing nothing.


No, this just isn't true.  You want global gestures (e.g. three finger
swipe to change desktop) to take precedence over local shortcuts.  And
you cannot -- cannot -- know at touch-down time who's going to be
ultimately interested in it.  Because for a three-finger swipe, there
will be a time when there's only one finger down, or two.  At which
point the client will say, 'ah yes, I'm interested in this!'.  And
then the third finger lands and you regret a terrible design decision
you made.


I would think the client could say "not interested" when it sees the 
third finger.


If the clients can look at things first, this would allow the compositor 
to do things like "one finger can be used to change desktops if the 
underlying program does not use it".


Solutions like "three fingers are needed" are just like the solutions 
for shortcuts where you have to hold Alt and Ctrl and the right-hand 
Shift and push the key, in an attempt to not collide with keystrokes the 
program wants.



___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-21 Thread Chase Douglas
On 02/21/2012 10:25 PM, Kristian Høgsberg wrote:
> 2012/2/21 Chase Douglas :
>> On 02/21/2012 09:16 PM, Kristian Høgsberg wrote:
>>> 2012/2/20 Chase Douglas :
>>>> On 02/17/2012 06:01 PM, Kristian Høgsberg wrote:
>>>>>  - input protocol restructuring: break up events into wl_pointer
>>>>> (enter/leave/motion/button/axis events, set_pointer_surface request),
>>>>> wl_keyboard (enter/leave/key events... what else... unicode event,
>>>>> set_map request? pending kb work), and wl_touch (down/up/motion/cancel
>>>>> events) interfaces
>>>>
>>>> [snip]
>>>>
>>>> So the client window will receive touch events without delay, but may
>>>> receive a cancel? I am in favor of this approach, but you need to add a
>>>> way to tell the window when the window manager has "rejected" a touch
>>>> sequence as well. Otherwise the client will never know when they can
>>>> perform destructive operations with it.
>>>
>>> No, we don't need that.  Don't do destructive operations on something
>>> that could be the first half on a globall gesture.  If you need to
>>> know for sure that nobody is going to take over the events, wait until
>>> the touch session is over (all touch points up, none cancelled).
>>
>> That doesn't make any sense. What if I'm playing a game? Latency
>> matters, and I need to know that the touches are mine early on. In the
>> case of a game, the environment should leave touches alone and
>> immediately tell the game that the touches are owned by it.
> 
> I didn't say that applications always have to wait for touch end.
> Just that if you're doing something irrevocable like, sending an email
> or formatting your floppy, you don't want to trigger on something that
> could be part of a global gesture.  Like touch down, then move. You
> need to wait for touch end in that case.
> 
> On the other hand, if you're scrolling in your web browser or moving
> around in a game, you just assume you own the events you get and
> scroll/move around.  If later in that touch session, you get the
> cancel event, you can just leave the web page/game where you
> scrolled/moved to.  Or you can undo what you did, for example, if you
> scribbled in a paint app as part of the global gesture.  Because you
> have to be able to do that anyway.

I don't see how this resolves the game use case. Usually in a game,
every action is destructive from the start. The application will either
have to assume that wayland won't do something with the touches, or will
have to wait until the end of a touch, which just isn't feasible.

>> Touches are used for much more than just button tapping, and waiting
>> until a touch is lifted to do anything won't work.
>>
>> Another reason this won't work is if your environment wants to recognize
>> a double-tap sequence. The first tap will end, at which point the
>> wayland application will attempt to use it. But a second tap comes along
>> and the environment performs an action.
> 
> This doesn't seem like a valid use case.

Why not? I can easily envision a four-touch double tap being an
environment gesture.

I can come up with many more use cases if you need. There is no way this
will work without a concept of touch ownership that is passed on to the
client applications.

-- Chase
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-21 Thread Daniel Stone
Hi,

On 22 February 2012 00:13, Bill Spitzak  wrote:
> It seems like it would be better if clients got the touch events first, and
> the compositor only did things if the client said it was uninterested in the
> events. For a lot of touch events this can be decided immediately on the
> touch-down, since the client knows the user is pushing nothing.

No, this just isn't true.  You want global gestures (e.g. three finger
swipe to change desktop) to take precedence over local shortcuts.  And
you cannot -- cannot -- know at touch-down time who's going to be
ultimately interested in it.  Because for a three-finger swipe, there
will be a time when there's only one finger down, or two.  At which
point the client will say, 'ah yes, I'm interested in this!'.  And
then the third finger lands and you regret a terrible design decision
you made.

Cheers,
Daniel
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-21 Thread Bill Spitzak
It seems like it would be better if clients got the touch events first, 
and the compositor only did things if the client said it was 
uninterested in the events. For a lot of touch events this can be 
decided immediately on the touch-down, since the client knows the user 
is pushing nothing.


I don't know much about touch events, but this seems similar to global 
shortcuts. It would help a lot if every keystroke was sent to clients 
but the clients could say "I don't do anything with that" and then the 
compositor/shell could use the keystroke as a global shortcut, and in 
fact I thought this was what Wayland was going to do. It seems the same 
ideas should apply to other events like touch.

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-21 Thread Kristian Høgsberg
2012/2/21 Chase Douglas :
> On 02/21/2012 09:16 PM, Kristian Høgsberg wrote:
>> 2012/2/20 Chase Douglas :
>>> On 02/17/2012 06:01 PM, Kristian Høgsberg wrote:
>>>>  - input protocol restructuring: break up events into wl_pointer
>>>> (enter/leave/motion/button/axis events, set_pointer_surface request),
>>>> wl_keyboard (enter/leave/key events... what else... unicode event,
>>>> set_map request? pending kb work), and wl_touch (down/up/motion/cancel
>>>> events) interfaces
>>>
>>> [snip]
>>>
>>> So the client window will receive touch events without delay, but may
>>> receive a cancel? I am in favor of this approach, but you need to add a
>>> way to tell the window when the window manager has "rejected" a touch
>>> sequence as well. Otherwise the client will never know when they can
>>> perform destructive operations with it.
>>
>> No, we don't need that.  Don't do destructive operations on something
>> that could be the first half on a globall gesture.  If you need to
>> know for sure that nobody is going to take over the events, wait until
>> the touch session is over (all touch points up, none cancelled).
>
> That doesn't make any sense. What if I'm playing a game? Latency
> matters, and I need to know that the touches are mine early on. In the
> case of a game, the environment should leave touches alone and
> immediately tell the game that the touches are owned by it.

I didn't say that applications always have to wait for touch end.
Just that if you're doing something irrevocable like, sending an email
or formatting your floppy, you don't want to trigger on something that
could be part of a global gesture.  Like touch down, then move. You
need to wait for touch end in that case.

On the other hand, if you're scrolling in your web browser or moving
around in a game, you just assume you own the events you get and
scroll/move around.  If later in that touch session, you get the
cancel event, you can just leave the web page/game where you
scrolled/moved to.  Or you can undo what you did, for example, if you
scribbled in a paint app as part of the global gesture.  Because you
have to be able to do that anyway.

> Touches are used for much more than just button tapping, and waiting
> until a touch is lifted to do anything won't work.
>
> Another reason this won't work is if your environment wants to recognize
> a double-tap sequence. The first tap will end, at which point the
> wayland application will attempt to use it. But a second tap comes along
> and the environment performs an action.

This doesn't seem like a valid use case.

Kristian
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-21 Thread Thiago Macieira
On terça-feira, 21 de fevereiro de 2012 21.49.26, Chase Douglas wrote:
> That doesn't make any sense. What if I'm playing a game? Latency
> matters, and I need to know that the touches are mine early on. In the
> case of a game, the environment should leave touches alone and
> immediately tell the game that the touches are owned by it.
>
> Touches are used for much more than just button tapping, and waiting
> until a touch is lifted to do anything won't work.
>
> Another reason this won't work is if your environment wants to recognize
> a double-tap sequence. The first tap will end, at which point the
> wayland application will attempt to use it. But a second tap comes along
> and the environment performs an action.

There's one important difference with X here: the compositor can simply delay
sending the events if it wants to. That's the case of the double-tap above.

If latency matters, the actions cannot be delayed, not even to wait for
"you're not going to get cancelled from this point on".

You can also think that, in the Wayland world, the "cannot be cancelled" event
matches the "touch end" event. That constrains what the UX can do, sure.

If there is a real UX which cannot be met by this constraint, let us know.
It's been really hard to get UX requirements from anyone...

--
Thiago Macieira - thiago.macieira (AT) intel.com
  Software Architect - Intel Open Source Technology Center
 Intel Sweden AB - Registration Number: 556189-6027
 Knarrarnäsgatan 15, 164 40 Kista, Stockholm, Sweden


signature.asc
Description: This is a digitally signed message part.
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events

2012-02-21 Thread Chase Douglas
On 02/21/2012 09:16 PM, Kristian Høgsberg wrote:
> 2012/2/20 Chase Douglas :
>> On 02/17/2012 06:01 PM, Kristian Høgsberg wrote:
>>>  - input protocol restructuring: break up events into wl_pointer
>>> (enter/leave/motion/button/axis events, set_pointer_surface request),
>>> wl_keyboard (enter/leave/key events... what else... unicode event,
>>> set_map request? pending kb work), and wl_touch (down/up/motion/cancel
>>> events) interfaces
>>
>> [snip]
>>
>> So the client window will receive touch events without delay, but may
>> receive a cancel? I am in favor of this approach, but you need to add a
>> way to tell the window when the window manager has "rejected" a touch
>> sequence as well. Otherwise the client will never know when they can
>> perform destructive operations with it.
> 
> No, we don't need that.  Don't do destructive operations on something
> that could be the first half on a globall gesture.  If you need to
> know for sure that nobody is going to take over the events, wait until
> the touch session is over (all touch points up, none cancelled).

That doesn't make any sense. What if I'm playing a game? Latency
matters, and I need to know that the touches are mine early on. In the
case of a game, the environment should leave touches alone and
immediately tell the game that the touches are owned by it.

Touches are used for much more than just button tapping, and waiting
until a touch is lifted to do anything won't work.

Another reason this won't work is if your environment wants to recognize
a double-tap sequence. The first tap will end, at which point the
wayland application will attempt to use it. But a second tap comes along
and the environment performs an action.

-- Chase
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: Touch events (was: Towards 1.0)

2012-02-21 Thread Kristian Høgsberg
2012/2/20 Chase Douglas :
> On 02/17/2012 06:01 PM, Kristian Høgsberg wrote:
>>  - input protocol restructuring: break up events into wl_pointer
>> (enter/leave/motion/button/axis events, set_pointer_surface request),
>> wl_keyboard (enter/leave/key events... what else... unicode event,
>> set_map request? pending kb work), and wl_touch (down/up/motion/cancel
>> events) interfaces
>
> [snip]
>
> So the client window will receive touch events without delay, but may
> receive a cancel? I am in favor of this approach, but you need to add a
> way to tell the window when the window manager has "rejected" a touch
> sequence as well. Otherwise the client will never know when they can
> perform destructive operations with it.

No, we don't need that.  Don't do destructive operations on something
that could be the first half on a globall gesture.  If you need to
know for sure that nobody is going to take over the events, wait until
the touch session is over (all touch points up, none cancelled).

Kristian
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Touch events (was: Towards 1.0)

2012-02-20 Thread Chase Douglas
On 02/17/2012 06:01 PM, Kristian Høgsberg wrote:
>  - input protocol restructuring: break up events into wl_pointer
> (enter/leave/motion/button/axis events, set_pointer_surface request),
> wl_keyboard (enter/leave/key events... what else... unicode event,
> set_map request? pending kb work), and wl_touch (down/up/motion/cancel
> events) interfaces

[snip]

So the client window will receive touch events without delay, but may
receive a cancel? I am in favor of this approach, but you need to add a
way to tell the window when the window manager has "rejected" a touch
sequence as well. Otherwise the client will never know when they can
perform destructive operations with it.

-- Chase
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


RE: [PATCH] Add touch events to protocol.

2011-07-08 Thread laszlo.p.agocs
Hi Chase,

True, some additional properties, like pressure and area, are missing however 
this is intentional as the aim of the patch was to introduce the basics of 
touch event handling with a minimal set of events and parameters.

The problem with the extra touch properties is that they may not be available 
or may not be used on some systems and forcing the transmission of e.g. a fake 
pressure value for every single point feels a waste of bandwidth in these cases.

These properties should somehow be optional in the touch point event (e.g. by 
having an optional map of values like you mentioned) but I am not quite sure 
how that could be achieved. Suggestions are welcome.

Regards,
Laszlo

-Original Message-
From: ext Chase Douglas
Sent:  05/07/2011, 19:08
To: Agocs Laszlo.P (Nokia-MP-Qt/Tampere)
Cc: wayland-devel@lists.freedesktop.org
Subject: Re: [PATCH] Add touch events to protocol.

Hi Laszlo,

On 06/29/2011 07:54 AM, Laszlo Agocs wrote:
>
>  From f656362511e2622e3cde6062e156b59a83b50e03 Mon Sep 17 00:00:00 2001
> From: Laszlo Agocs 
> Date: Wed, 29 Jun 2011 17:51:29 +0300
> Subject: [PATCH] Add touch events to protocol.
>
> ---
>   protocol/wayland.xml |   39 +++
>   1 files changed, 39 insertions(+), 0 deletions(-)
>
> diff --git a/protocol/wayland.xml b/protocol/wayland.xml
> index fd54245..874fd5f 100644
> --- a/protocol/wayland.xml
> +++ b/protocol/wayland.xml
> @@ -461,6 +461,45 @@
> 
> 
>   
> +
> +
> +
> +  
> +  
> +  
> +  
> +
> +
> +
> +  
> +  
> +  
> +  
> +
> +
> +
> +  
> +  
> +
> +
> +
> +  
> +  
> +  
> +  
> +

What about pressure or shape? I don't know the wayland protocol yet, but
is it possible to send a map of properties and values sort of like
valuators?

> +
> +
> +
> +
> +
> +
> +
> +
> 

I never understood the concept behind "touch cancel". If I'm a client,
I'm supposed to sit around waiting for a touch cancel event at any
point? Is it bounded in time, or could I get a touch cancel event 20
seconds into a stream of motion events? I don't see a way to get around
explicitly conferring whether a touch  is "accepted" or "rejected"
(implicitly or explicitly) at some point, which is what XInput 2.1 is
aiming to do. However, the exact mechanisms may be different in Wayland
since we don't have the old X cruft to deal with.

Thanks!

-- Chase
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH] Add touch events to protocol.

2011-07-07 Thread Josh Leverette
With the exception of incoming phone call scenario, how does one go about
cancelling a touch event anyways? Unless I'm wildly mistaken, this is an
unlikely scenario. The way you normally cancel touch events is you touch
down on a button, then move your finger off the button and lift up, but that
is all handled by the application itself, not the window manager. Correct?
Therefore applications should always work under the assumption that the
touches will not be cancelled unless they implement cancellation support,
but for the rare even that they receive the cancel event from the WM, just
commit whatever actions have been completed thus far. (treat it as the
fingers lifting off the screen)

2011/7/7 Chase Douglas 

> On 07/07/2011 09:52 AM, Kristian Høgsberg wrote:
> > 2011/7/7 Chase Douglas :
> >> On 07/07/2011 07:28 AM, Kristian Høgsberg wrote:
> >>> On Tue, Jul 5, 2011 at 1:08 PM, Chase Douglas
> >>>  wrote:
> >>>> On 06/29/2011 07:54 AM, Laszlo Agocs wrote:
> >>>>> +
> >>>>> +
> >>>>> +
> >>>>> +
> >>>>> +
> >>>>> +
> >>>>> +
> >>>>> +
> >>>>> 
> >>>>
> >>>> I never understood the concept behind "touch cancel". If I'm a client,
> >>>> I'm supposed to sit around waiting for a touch cancel event at any
> >>>> point? Is it bounded in time, or could I get a touch cancel event 20
> >>>> seconds into a stream of motion events? I don't see a way to get
> around
> >>>> explicitly conferring whether a touch  is "accepted" or "rejected"
> >>>> (implicitly or explicitly) at some point, which is what XInput 2.1 is
> >>>> aiming to do. However, the exact mechanisms may be different in
> Wayland
> >>>> since we don't have the old X cruft to deal with.
> >>>
> >>> Yes, any stream of touch event, no matter how long, can be cancelled
> >>> by the compositor at any time.  The use cases are when the compositor
> >>> recognizes a global gesture (three finger pinch to go to home screen
> >>> or so) or if an important system event happens (incoming phone call or
> >>> such).
> >>>
> >>> Whether and how to undo the effects of the touch events up until the
> >>> cancel is an application policy decision.  If you're just scrolling a
> >>> browser window, it probably doesn't make sense to undo the scrolling.
> >>> Even if you're, say, painting in a paint app, it probably makes sense
> >>> to just treat cancel as end and commit the paint action, provided the
> >>> user can just undo that if necessary.
> >>
> >> I think undoing automatically would be better, but I'm not a paint app
> >> developer, and it's beside the point :).
> >>
> >>> In general, undoing the touch stream is similar to undo, which most
> >>> apps are already handling.  From an application point of view, if you
> >>> have to handle unowned events (that's the XI 2.1 term, right?) for a
> >>> small initial window (100ms, 3s or a week), you end up with the same
> >>> application side complexity, and I don't see what it is apps can do
> >>> differently once they know that the remaining event stream belongs to
> >>> them.
> >>
> >> Lets say the WM responds to a short flick gesture. The user performs the
> >> entire gesture before the WM has decided whether it matches the flick
> >> pattern. Now we've sent the touch stream in its entirety to the wayland
> >> client, and we have to cancel it after the fact.
> >>
> >> This leaves the client in a precarious situation. It has received the
> >> entire stream of touch events for a touch, but it doesn't yet know if
> >> the stream might be cancelled or not. The client may be buffering the
> >> application state so it can undo changes if it receives a touch cancel.
> >> We don't want to force it to buffer state changes forever, so we must
> >> provide some guidance to developers on how to handle this situation. Do
> >> we say that after 1 s the client can assume the touch stream is theirs?
> >> That's hackish and may produce annoying side effects if the machine is
> >> swapping and goes "dead" for a few seconds.
> >
> > The touch stream ends with either 1) all touch points up or 2) touch
> > cancel.  If the

Re: [PATCH] Add touch events to protocol.

2011-07-07 Thread Chase Douglas
On 07/07/2011 09:52 AM, Kristian Høgsberg wrote:
> 2011/7/7 Chase Douglas :
>> On 07/07/2011 07:28 AM, Kristian Høgsberg wrote:
>>> On Tue, Jul 5, 2011 at 1:08 PM, Chase Douglas
>>>  wrote:
>>>> On 06/29/2011 07:54 AM, Laszlo Agocs wrote:
>>>>> +
>>>>> +
>>>>> +
>>>>> +
>>>>> +
>>>>> +
>>>>> +
>>>>> +
>>>>> 
>>>>
>>>> I never understood the concept behind "touch cancel". If I'm a client,
>>>> I'm supposed to sit around waiting for a touch cancel event at any
>>>> point? Is it bounded in time, or could I get a touch cancel event 20
>>>> seconds into a stream of motion events? I don't see a way to get around
>>>> explicitly conferring whether a touch  is "accepted" or "rejected"
>>>> (implicitly or explicitly) at some point, which is what XInput 2.1 is
>>>> aiming to do. However, the exact mechanisms may be different in Wayland
>>>> since we don't have the old X cruft to deal with.
>>>
>>> Yes, any stream of touch event, no matter how long, can be cancelled
>>> by the compositor at any time.  The use cases are when the compositor
>>> recognizes a global gesture (three finger pinch to go to home screen
>>> or so) or if an important system event happens (incoming phone call or
>>> such).
>>>
>>> Whether and how to undo the effects of the touch events up until the
>>> cancel is an application policy decision.  If you're just scrolling a
>>> browser window, it probably doesn't make sense to undo the scrolling.
>>> Even if you're, say, painting in a paint app, it probably makes sense
>>> to just treat cancel as end and commit the paint action, provided the
>>> user can just undo that if necessary.
>>
>> I think undoing automatically would be better, but I'm not a paint app
>> developer, and it's beside the point :).
>>
>>> In general, undoing the touch stream is similar to undo, which most
>>> apps are already handling.  From an application point of view, if you
>>> have to handle unowned events (that's the XI 2.1 term, right?) for a
>>> small initial window (100ms, 3s or a week), you end up with the same
>>> application side complexity, and I don't see what it is apps can do
>>> differently once they know that the remaining event stream belongs to
>>> them.
>>
>> Lets say the WM responds to a short flick gesture. The user performs the
>> entire gesture before the WM has decided whether it matches the flick
>> pattern. Now we've sent the touch stream in its entirety to the wayland
>> client, and we have to cancel it after the fact.
>>
>> This leaves the client in a precarious situation. It has received the
>> entire stream of touch events for a touch, but it doesn't yet know if
>> the stream might be cancelled or not. The client may be buffering the
>> application state so it can undo changes if it receives a touch cancel.
>> We don't want to force it to buffer state changes forever, so we must
>> provide some guidance to developers on how to handle this situation. Do
>> we say that after 1 s the client can assume the touch stream is theirs?
>> That's hackish and may produce annoying side effects if the machine is
>> swapping and goes "dead" for a few seconds.
> 
> The touch stream ends with either 1) all touch points up or 2) touch
> cancel.  If the application receives up events for corresponding to
> all touch down events, the touch stream belongs to the client.

What do we do when the user performs a one finger flick and the flick
hasn't been recognized until the touch end (one definition of flick is
if the final velocity at touch end is above a certain threshold)? Do we
withhold the touch end event from the client until we are sure?

If we withhold the touch end event, we should at least send a touch
update event to describe the location of the touch end event (if
different than the previous event). If you do that, then the touch end
event is the same as a touch ownership event, as it only conveys the
fact that the client now owns the touch stream.

At best we can eliminate one event, the touch ownership-like event, if
the compositor is certain before the touch stream ends that the stream
does not comprise a compositor gesture. The trade-off for potentially
saving one event is we are forcing clients to assume a touch stream can
be canceled at any time, which may be a burden for a very long touch stream.

-- Chase
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH] Add touch events to protocol.

2011-07-07 Thread Kristian Høgsberg
2011/7/7 Chase Douglas :
> On 07/07/2011 07:28 AM, Kristian Høgsberg wrote:
>> On Tue, Jul 5, 2011 at 1:08 PM, Chase Douglas
>>  wrote:
>>> Hi Laszlo,
>>>
>>> On 06/29/2011 07:54 AM, Laszlo Agocs wrote:
>>>>
>>>>  From f656362511e2622e3cde6062e156b59a83b50e03 Mon Sep 17 00:00:00 2001
>>>> From: Laszlo Agocs 
>>>> Date: Wed, 29 Jun 2011 17:51:29 +0300
>>>> Subject: [PATCH] Add touch events to protocol.
>>>>
>>>> ---
>>>>   protocol/wayland.xml |   39 +++
>>>>   1 files changed, 39 insertions(+), 0 deletions(-)
>>>>
>>>> diff --git a/protocol/wayland.xml b/protocol/wayland.xml
>>>> index fd54245..874fd5f 100644
>>>> --- a/protocol/wayland.xml
>>>> +++ b/protocol/wayland.xml
>>>> @@ -461,6 +461,45 @@
>>>>         
>>>>         
>>>>       
>>>> +
>>>> +    
>>>> +    
>>>> +      
>>>> +      
>>>> +      
>>>> +      
>>>> +    
>>>> +
>>>> +    
>>>> +      
>>>> +      
>>>> +      
>>>> +      
>>>> +    
>>>> +
>>>> +    
>>>> +      
>>>> +      
>>>> +    
>>>> +
>>>> +    
>>>> +      
>>>> +      
>>>> +      
>>>> +      
>>>> +    
>>>
>>> What about pressure or shape? I don't know the wayland protocol yet, but
>>> is it possible to send a map of properties and values sort of like
>>> valuators?
>>>
>>>> +
>>>> +    
>>>> +    
>>>> +    
>>>> +
>>>> +    
>>>> +    
>>>> +    
>>>>     
>>>
>>> I never understood the concept behind "touch cancel". If I'm a client,
>>> I'm supposed to sit around waiting for a touch cancel event at any
>>> point? Is it bounded in time, or could I get a touch cancel event 20
>>> seconds into a stream of motion events? I don't see a way to get around
>>> explicitly conferring whether a touch  is "accepted" or "rejected"
>>> (implicitly or explicitly) at some point, which is what XInput 2.1 is
>>> aiming to do. However, the exact mechanisms may be different in Wayland
>>> since we don't have the old X cruft to deal with.
>>
>> Yes, any stream of touch event, no matter how long, can be cancelled
>> by the compositor at any time.  The use cases are when the compositor
>> recognizes a global gesture (three finger pinch to go to home screen
>> or so) or if an important system event happens (incoming phone call or
>> such).
>>
>> Whether and how to undo the effects of the touch events up until the
>> cancel is an application policy decision.  If you're just scrolling a
>> browser window, it probably doesn't make sense to undo the scrolling.
>> Even if you're, say, painting in a paint app, it probably makes sense
>> to just treat cancel as end and commit the paint action, provided the
>> user can just undo that if necessary.
>
> I think undoing automatically would be better, but I'm not a paint app
> developer, and it's beside the point :).
>
>> In general, undoing the touch stream is similar to undo, which most
>> apps are already handling.  From an application point of view, if you
>> have to handle unowned events (that's the XI 2.1 term, right?) for a
>> small initial window (100ms, 3s or a week), you end up with the same
>> application side complexity, and I don't see what it is apps can do
>> differently once they know that the remaining event stream belongs to
>> them.
>
> Lets say the WM responds to a short flick gesture. The user performs the
> entire gesture before the WM has decided whether it matches the flick
> pattern. Now we've sent the touch stream in its entirety to the wayland
> client, and we have to cancel it after the fact.
>
> This leaves the client in a precarious situation. It has received the
> entire stream of touch events for a touch, but it doesn't yet know if
> the stream might be cancelled or not. The client may be buffering the
> application state so it can undo changes if it receives a touch cancel.
> We don't want to force it to buffer state changes forever, so we must
> provide some guidance to developers on how to handle this situation. Do
> we say that after 1 s the client can assume the touch stream is theirs?
> That's hackish and may produce annoying side effects if the machine is
> swapping and goes "dead" for a few seconds.

The touch stream ends with either 1) all touch points up or 2) touch
cancel.  If the application receives up events for corresponding to
all touch down events, the touch stream belongs to the client.

> With XI 2.1 we get around this issue by leaving touches as unowned until
> a client explicitly accepts the events. An entire touch stream from
> beginning to end may exist in the unowned state, and then once a client
> accepts or rejects it the rest of the clients get notified so they can
> determine what to do.

I think that's complexity that's only needed when the global gesture
recognizer is just another client as opposed a part of the compositor.

Kristian
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH] Add touch events to protocol.

2011-07-07 Thread Chase Douglas
On 07/07/2011 07:28 AM, Kristian Høgsberg wrote:
> On Tue, Jul 5, 2011 at 1:08 PM, Chase Douglas
>  wrote:
>> Hi Laszlo,
>>
>> On 06/29/2011 07:54 AM, Laszlo Agocs wrote:
>>>
>>>  From f656362511e2622e3cde6062e156b59a83b50e03 Mon Sep 17 00:00:00 2001
>>> From: Laszlo Agocs 
>>> Date: Wed, 29 Jun 2011 17:51:29 +0300
>>> Subject: [PATCH] Add touch events to protocol.
>>>
>>> ---
>>>   protocol/wayland.xml |   39 +++
>>>   1 files changed, 39 insertions(+), 0 deletions(-)
>>>
>>> diff --git a/protocol/wayland.xml b/protocol/wayland.xml
>>> index fd54245..874fd5f 100644
>>> --- a/protocol/wayland.xml
>>> +++ b/protocol/wayland.xml
>>> @@ -461,6 +461,45 @@
>>> 
>>> 
>>>   
>>> +
>>> +
>>> +
>>> +  
>>> +  
>>> +  
>>> +  
>>> +
>>> +
>>> +
>>> +  
>>> +  
>>> +  
>>> +  
>>> +
>>> +
>>> +
>>> +  
>>> +  
>>> +
>>> +
>>> +
>>> +  
>>> +  
>>> +  
>>> +  
>>> +
>>
>> What about pressure or shape? I don't know the wayland protocol yet, but
>> is it possible to send a map of properties and values sort of like
>> valuators?
>>
>>> +
>>> +
>>> +
>>> +
>>> +
>>> +
>>> +
>>> +
>>> 
>>
>> I never understood the concept behind "touch cancel". If I'm a client,
>> I'm supposed to sit around waiting for a touch cancel event at any
>> point? Is it bounded in time, or could I get a touch cancel event 20
>> seconds into a stream of motion events? I don't see a way to get around
>> explicitly conferring whether a touch  is "accepted" or "rejected"
>> (implicitly or explicitly) at some point, which is what XInput 2.1 is
>> aiming to do. However, the exact mechanisms may be different in Wayland
>> since we don't have the old X cruft to deal with.
> 
> Yes, any stream of touch event, no matter how long, can be cancelled
> by the compositor at any time.  The use cases are when the compositor
> recognizes a global gesture (three finger pinch to go to home screen
> or so) or if an important system event happens (incoming phone call or
> such).
> 
> Whether and how to undo the effects of the touch events up until the
> cancel is an application policy decision.  If you're just scrolling a
> browser window, it probably doesn't make sense to undo the scrolling.
> Even if you're, say, painting in a paint app, it probably makes sense
> to just treat cancel as end and commit the paint action, provided the
> user can just undo that if necessary.

I think undoing automatically would be better, but I'm not a paint app
developer, and it's beside the point :).

> In general, undoing the touch stream is similar to undo, which most
> apps are already handling.  From an application point of view, if you
> have to handle unowned events (that's the XI 2.1 term, right?) for a
> small initial window (100ms, 3s or a week), you end up with the same
> application side complexity, and I don't see what it is apps can do
> differently once they know that the remaining event stream belongs to
> them.

Lets say the WM responds to a short flick gesture. The user performs the
entire gesture before the WM has decided whether it matches the flick
pattern. Now we've sent the touch stream in its entirety to the wayland
client, and we have to cancel it after the fact.

This leaves the client in a precarious situation. It has received the
entire stream of touch events for a touch, but it doesn't yet know if
the stream might be cancelled or not. The client may be buffering the
application state so it can undo changes if it receives a touch cancel.
We don't want to force it to buffer state changes forever, so we must
provide some guidance to developers on how to handle this situation. Do
we say that after 1 s the client can assume the touch stream is theirs?
That's hackish and may produce annoying side effects if the machine is
swapping and goes "dead" for a few seconds.

With XI 2.1 we get around this issue by leaving touches as unowned until
a client explicitly accepts the events. An entire touch stream from
beginning to end may exist in the unowned state, and then once a client
accepts or rejects it the rest of the clients get notified so they can
determine what to do.

-- Chase
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH] Add touch events to protocol.

2011-07-07 Thread Kristian Høgsberg
On Tue, Jul 5, 2011 at 1:08 PM, Chase Douglas
 wrote:
> Hi Laszlo,
>
> On 06/29/2011 07:54 AM, Laszlo Agocs wrote:
>>
>>  From f656362511e2622e3cde6062e156b59a83b50e03 Mon Sep 17 00:00:00 2001
>> From: Laszlo Agocs 
>> Date: Wed, 29 Jun 2011 17:51:29 +0300
>> Subject: [PATCH] Add touch events to protocol.
>>
>> ---
>>   protocol/wayland.xml |   39 +++
>>   1 files changed, 39 insertions(+), 0 deletions(-)
>>
>> diff --git a/protocol/wayland.xml b/protocol/wayland.xml
>> index fd54245..874fd5f 100644
>> --- a/protocol/wayland.xml
>> +++ b/protocol/wayland.xml
>> @@ -461,6 +461,45 @@
>>         
>>         
>>       
>> +
>> +    
>> +    
>> +      
>> +      
>> +      
>> +      
>> +    
>> +
>> +    
>> +      
>> +      
>> +      
>> +      
>> +    
>> +
>> +    
>> +      
>> +      
>> +    
>> +
>> +    
>> +      
>> +      
>> +      
>> +      
>> +    
>
> What about pressure or shape? I don't know the wayland protocol yet, but
> is it possible to send a map of properties and values sort of like
> valuators?
>
>> +
>> +    
>> +    
>> +    
>> +
>> +    
>> +    
>> +    
>>     
>
> I never understood the concept behind "touch cancel". If I'm a client,
> I'm supposed to sit around waiting for a touch cancel event at any
> point? Is it bounded in time, or could I get a touch cancel event 20
> seconds into a stream of motion events? I don't see a way to get around
> explicitly conferring whether a touch  is "accepted" or "rejected"
> (implicitly or explicitly) at some point, which is what XInput 2.1 is
> aiming to do. However, the exact mechanisms may be different in Wayland
> since we don't have the old X cruft to deal with.

Yes, any stream of touch event, no matter how long, can be cancelled
by the compositor at any time.  The use cases are when the compositor
recognizes a global gesture (three finger pinch to go to home screen
or so) or if an important system event happens (incoming phone call or
such).

Whether and how to undo the effects of the touch events up until the
cancel is an application policy decision.  If you're just scrolling a
browser window, it probably doesn't make sense to undo the scrolling.
Even if you're, say, painting in a paint app, it probably makes sense
to just treat cancel as end and commit the paint action, provided the
user can just undo that if necessary.

In general, undoing the touch stream is similar to undo, which most
apps are already handling.  From an application point of view, if you
have to handle unowned events (that's the XI 2.1 term, right?) for a
small initial window (100ms, 3s or a week), you end up with the same
application side complexity, and I don't see what it is apps can do
differently once they know that the remaining event stream belongs to
them.

Kristian
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH] Add touch events to protocol.

2011-07-05 Thread Chase Douglas
Hi Laszlo,

On 06/29/2011 07:54 AM, Laszlo Agocs wrote:
> 
>  From f656362511e2622e3cde6062e156b59a83b50e03 Mon Sep 17 00:00:00 2001
> From: Laszlo Agocs 
> Date: Wed, 29 Jun 2011 17:51:29 +0300
> Subject: [PATCH] Add touch events to protocol.
> 
> ---
>   protocol/wayland.xml |   39 +++
>   1 files changed, 39 insertions(+), 0 deletions(-)
> 
> diff --git a/protocol/wayland.xml b/protocol/wayland.xml
> index fd54245..874fd5f 100644
> --- a/protocol/wayland.xml
> +++ b/protocol/wayland.xml
> @@ -461,6 +461,45 @@
> 
> 
>   
> +
> +
> +
> +  
> +  
> +  
> +  
> +
> +
> +
> +  
> +  
> +  
> +  
> +
> +
> +
> +  
> +  
> +
> +
> +
> +  
> +  
> +  
> +  
> +

What about pressure or shape? I don't know the wayland protocol yet, but
is it possible to send a map of properties and values sort of like
valuators?

> +
> +
> +
> +
> +
> +
> +
> +
> 

I never understood the concept behind "touch cancel". If I'm a client,
I'm supposed to sit around waiting for a touch cancel event at any
point? Is it bounded in time, or could I get a touch cancel event 20
seconds into a stream of motion events? I don't see a way to get around
explicitly conferring whether a touch  is "accepted" or "rejected"
(implicitly or explicitly) at some point, which is what XInput 2.1 is
aiming to do. However, the exact mechanisms may be different in Wayland
since we don't have the old X cruft to deal with.

Thanks!

-- Chase
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


[PATCH] Add touch events to protocol.

2011-06-29 Thread Laszlo Agocs


From f656362511e2622e3cde6062e156b59a83b50e03 Mon Sep 17 00:00:00 2001
From: Laszlo Agocs 
Date: Wed, 29 Jun 2011 17:51:29 +0300
Subject: [PATCH] Add touch events to protocol.

---
 protocol/wayland.xml |   39 +++
 1 files changed, 39 insertions(+), 0 deletions(-)

diff --git a/protocol/wayland.xml b/protocol/wayland.xml
index fd54245..874fd5f 100644
--- a/protocol/wayland.xml
+++ b/protocol/wayland.xml
@@ -461,6 +461,45 @@
   
   
 
+
+
+
+  
+  
+  
+  
+
+
+
+  
+  
+  
+  
+
+
+
+  
+  
+
+
+
+  
+  
+  
+  
+
+
+
+
+
+
+
+
+
   


--
1.7.4.1

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel