Re: [PATCH libinput] touchpad: only pair internal trackpoint devices with internal touchpads

2015-04-15 Thread Bill Spitzak

On 04/14/2015 09:01 PM, Peter Hutterer wrote:

Internal touchpads with trackpoints are either BUS_I8042 or BUS_I2C, but not
BUS_USB. Lenovo sells external keyboards with a trackpoint built-in, make sure
we don't pair that trackpoint with the internal touchpad.
And likewise, the internal trackpoint should not be paired with e.g. a wacom
touch device.

Lenovo had one external device that has a trackpoint and a touchpad on an
external keyboard. That device won't be covered with this patch, if we have a
user we can re-consider.


Would checking if both devices are on the same bus (rather than not on 
the USB bus) work for the systems you have, and also cover this case?


(ie. use bus_tp == bus_trp instead of tp_is_internal  trp_is_internal)

Pardon me if I have no idea what I am talking about.

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH libinput] tools: add a tool to list local devices and the default configurations

2015-04-15 Thread Hans de Goede

Hi,

On 15-04-15 05:49, Peter Hutterer wrote:

xinput or an equivalent isn't available under wayland, but the majority of
use-cases of why doesn't my device work or why does feature X not work
should be covered by simply listing the local devices and their config
options.

Example output:

Device: SynPS/2 Synaptics TouchPad
Kernel: /dev/input/event4
Group:  9
Seat:   seat0, default
Size:   97.33x62.40mm
Capabilities:   pointer
Tap-to-click:   disabled
Left-handed:disabled
Nat.scrolling:  disabled
Calibration:n/a
Scroll methods: *two-finger
Click methods:  *button-areas clickfinger

Signed-off-by: Peter Hutterer peter.hutte...@who-t.net


Looks good:

Reviewed-by: Hans de Goede hdego...@redhat.com

Regards,

Hans


---
  tools/.gitignore|   1 +
  tools/Makefile.am   |   6 +
  tools/libinput-list-devices.c   | 282 
  tools/libinput-list-devices.man |  37 ++
  4 files changed, 326 insertions(+)
  create mode 100644 tools/libinput-list-devices.c
  create mode 100644 tools/libinput-list-devices.man

diff --git a/tools/.gitignore b/tools/.gitignore
index 6d530e6..e58dba9 100644
--- a/tools/.gitignore
+++ b/tools/.gitignore
@@ -1,3 +1,4 @@
  event-debug
  event-gui
  ptraccel-debug
+libinput-list-devices
diff --git a/tools/Makefile.am b/tools/Makefile.am
index 34d5ab0..b8cc218 100644
--- a/tools/Makefile.am
+++ b/tools/Makefile.am
@@ -1,4 +1,5 @@
  noinst_PROGRAMS = event-debug ptraccel-debug
+bin_PROGRAMS = libinput-list-devices
  noinst_LTLIBRARIES = libshared.la

  AM_CPPFLAGS = -I$(top_srcdir)/include \
@@ -18,6 +19,11 @@ ptraccel_debug_SOURCES = ptraccel-debug.c
  ptraccel_debug_LDADD = ../src/libfilter.la
  ptraccel_debug_LDFLAGS = -no-install

+libinput_list_devices_SOURCES = libinput-list-devices.c
+libinput_list_devices_LDADD = ../src/libinput.la libshared.la $(LIBUDEV_LIBS)
+libinput_list_devices_CFLAGS = $(LIBUDEV_CFLAGS)
+man1_MANS = libinput-list-devices.man
+
  if BUILD_EVENTGUI
  noinst_PROGRAMS += event-gui

diff --git a/tools/libinput-list-devices.c b/tools/libinput-list-devices.c
new file mode 100644
index 000..24c7c53
--- /dev/null
+++ b/tools/libinput-list-devices.c
@@ -0,0 +1,282 @@
+/*
+ * Copyright © 2015 Red Hat, Inc.
+ *
+ * Permission to use, copy, modify, distribute, and sell this software and
+ * its documentation for any purpose is hereby granted without fee, provided
+ * that the above copyright notice appear in all copies and that both that
+ * copyright notice and this permission notice appear in supporting
+ * documentation, and that the name of the copyright holders not be used in
+ * advertising or publicity pertaining to distribution of the software
+ * without specific, written prior permission.  The copyright holders make
+ * no representations about the suitability of this software for any
+ * purpose.  It is provided as is without express or implied warranty.
+ *
+ * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS
+ * SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS, IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
+ * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+ * CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+ * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+#define _GNU_SOURCE
+#include errno.h
+#include fcntl.h
+#include stdbool.h
+#include stdio.h
+#include string.h
+#include unistd.h
+#include libudev.h
+
+#include libinput.h
+
+#include shared.h
+
+static int
+open_restricted(const char *path, int flags, void *user_data)
+{
+   int fd = open(path, flags);
+   if (fd  0)
+   fprintf(stderr, Failed to open %s (%s)\n,
+   path, strerror(errno));
+   return fd  0 ? -errno : fd;
+}
+
+static void
+close_restricted(int fd, void *user_data)
+{
+   close(fd);
+}
+
+static const struct libinput_interface interface = {
+   .open_restricted = open_restricted,
+   .close_restricted = close_restricted,
+};
+
+static inline const char*
+bool_to_str(bool b)
+{
+   if (b)
+   return yes;
+   else
+   return no;
+}
+
+static const char *
+tap_default(struct libinput_device *device)
+{
+   if (!libinput_device_config_tap_get_finger_count(device))
+   return n/a;
+
+   if (libinput_device_config_tap_get_default_enabled(device))
+   return enabled;
+   else
+   return disabled;
+}
+
+static const char*
+left_handed_default(struct libinput_device *device)
+{
+   if (!libinput_device_config_left_handed_is_available(device))
+   return n/a;
+
+   if (libinput_device_config_left_handed_get_default(device))
+   return enabled;
+   else
+   return disabled;
+}
+
+static const char *

Re: [PATCH libinput 0/6] Middle mouse button emulation

2015-04-15 Thread Bill Spitzak
Is this being done for physical mice with two buttons? The comments seem 
to indicate not, which seems kind of pointless: if a client has to 
emulate it for a physical mouse it will be emulating it for touchpads as 
well.


Delaying a press to see if it is a middle click is also probably 
annoying. It should send the down event for the first button, then the 
down event for middle when the second button is pressed (ie the client 
will see it as a left+middle or right+middle click).


On 04/14/2015 08:51 PM, Peter Hutterer wrote:


we already had middle mouse button emulation for softbuttons, but not on
physical buttons. This patchset adds support for physical buttons.

Couple of general notes:
* some devices, e.g. 2-button touchpads, have middle button emulation
   enabled, but not exposed as config option
* if you have a middle button emulation is always off by default.
* pressing anything but left/right will stop emulation for this sequence, so
   you can't do an emulated middle button + back button or something. I'm not
   aware of any workflow that actually requires this.

Cheers,
   Peter



___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


[PATCH weston 3/3] desktop-shell: use work area instead of panel size to constrain moves

2015-04-15 Thread Derek Foreman
This fixes the case where an output isn't at y = 0, where the panel height
isn't correct for constraints.

It also kills a bug - moving a window with a mod-drag off the top of the
screen clamped earlier than it should.

Signed-off-by: Derek Foreman der...@osg.samsung.com
---
 desktop-shell/shell.c | 21 -
 1 file changed, 12 insertions(+), 9 deletions(-)

diff --git a/desktop-shell/shell.c b/desktop-shell/shell.c
index bb3fd2d..834713d 100644
--- a/desktop-shell/shell.c
+++ b/desktop-shell/shell.c
@@ -1655,24 +1655,27 @@ constrain_position(struct weston_move_grab *move, int 
*cx, int *cy)
 {
struct shell_surface *shsurf = move-base.shsurf;
struct weston_pointer *pointer = move-base.grab.pointer;
-   int x, y, panel_width, panel_height, bottom;
+   int x, y, bottom;
const int safety = 50;
+   pixman_rectangle32_t area;
 
x = wl_fixed_to_int(pointer-x + move-dx);
y = wl_fixed_to_int(pointer-y + move-dy);
 
if (shsurf-shell-panel_position == DESKTOP_SHELL_PANEL_POSITION_TOP) {
-   get_output_panel_size(shsurf-shell, shsurf-surface-output,
- panel_width, panel_height);
+   get_output_work_area(shsurf-shell,
+shsurf-surface-output,
+area);
 
-   bottom = y + shsurf-geometry.height;
-   if (bottom - panel_height  safety)
-   y = panel_height + safety -
-   shsurf-geometry.height;
+   bottom = y + shsurf-geometry.height + shsurf-geometry.y;
+   if (bottom - safety  area.y)
+   y = area.y + safety - shsurf-geometry.height
+ - shsurf-geometry.y;
 
if (move-client_initiated 
-   y + shsurf-geometry.y  panel_height)
-   y = panel_height - shsurf-geometry.y;
+   y + shsurf-geometry.y  area.y)
+   y = area.y - shsurf-geometry.y;
+
}
 
*cx = x;
-- 
2.1.4

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


[PATCH weston 1/3] desktop-shell: add output co-ordinates to get_output_work_area()

2015-04-15 Thread Derek Foreman
get_output_work_area() now returns the absolute work area including the
output's offset.

This will make math a little simpler later when we use it to constrain
window moves.

Signed-off-by: Derek Foreman der...@osg.samsung.com
---
 desktop-shell/shell.c | 22 +-
 1 file changed, 9 insertions(+), 13 deletions(-)

diff --git a/desktop-shell/shell.c b/desktop-shell/shell.c
index 96aa8f3..204068c 100644
--- a/desktop-shell/shell.c
+++ b/desktop-shell/shell.c
@@ -442,21 +442,20 @@ get_output_work_area(struct desktop_shell *shell,
 {
int32_t panel_width = 0, panel_height = 0;
 
-   area-x = 0;
-   area-y = 0;
+   area-x = output-x;
+   area-y = output-y;
 
get_output_panel_size(shell, output, panel_width, panel_height);
-
switch (shell-panel_position) {
case DESKTOP_SHELL_PANEL_POSITION_TOP:
default:
-   area-y = panel_height;
+   area-y += panel_height;
case DESKTOP_SHELL_PANEL_POSITION_BOTTOM:
area-width = output-width;
area-height = output-height - panel_height;
break;
case DESKTOP_SHELL_PANEL_POSITION_LEFT:
-   area-x = panel_width;
+   area-x += panel_width;
case DESKTOP_SHELL_PANEL_POSITION_RIGHT:
area-width = output-width - panel_width;
area-height = output-height;
@@ -5417,7 +5416,7 @@ weston_view_set_initial_position(struct weston_view *view,
struct weston_compositor *compositor = shell-compositor;
int ix = 0, iy = 0;
int32_t range_x, range_y;
-   int32_t dx, dy, x, y;
+   int32_t x, y;
struct weston_output *output, *target_output = NULL;
struct weston_seat *seat;
pixman_rectangle32_t area;
@@ -5454,19 +5453,16 @@ weston_view_set_initial_position(struct weston_view 
*view,
 */
get_output_work_area(shell, target_output, area);
 
-   dx = area.x;
-   dy = area.y;
+   x = area.x;
+   y = area.y;
range_x = area.width - view-surface-width;
range_y = area.height - view-surface-height;
 
if (range_x  0)
-   dx += random() % range_x;
+   x += random() % range_x;
 
if (range_y  0)
-   dy += random() % range_y;
-
-   x = target_output-x + dx;
-   y = target_output-y + dy;
+   y += random() % range_y;
 
weston_view_set_position(view, x, y);
 }
-- 
2.1.4

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


[PATCH weston 0/3] Clean up window placement and move constraining

2015-04-15 Thread Derek Foreman
If we give an output a non-zero Y co-ordinate, window placement starts
putting things off screen due to a bug in get_output_panel_size().
Constraints still work due to a symmetrical bug in constrain_position().

This should straighten things out a little bit.

Derek Foreman (3):
  desktop-shell: add output co-ordinates to get_output_work_area()
  desktop-shell: use output position in get_output_panel_size()
  desktop-shell: use work area instead of panel size to constrain moves

 desktop-shell/shell.c | 52 +--
 1 file changed, 25 insertions(+), 27 deletions(-)

-- 
2.1.4

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


[PATCH weston 2/3] desktop-shell: use output position in get_output_panel_size()

2015-04-15 Thread Derek Foreman
The panel size calculation needs to take the output position into account
or it's only correct when the output is at 0, 0.

Signed-off-by: Derek Foreman der...@osg.samsung.com
---
 desktop-shell/shell.c | 9 -
 1 file changed, 4 insertions(+), 5 deletions(-)

diff --git a/desktop-shell/shell.c b/desktop-shell/shell.c
index 204068c..bb3fd2d 100644
--- a/desktop-shell/shell.c
+++ b/desktop-shell/shell.c
@@ -406,13 +406,12 @@ get_output_panel_size(struct desktop_shell *shell,
switch (shell-panel_position) {
case DESKTOP_SHELL_PANEL_POSITION_TOP:
case DESKTOP_SHELL_PANEL_POSITION_BOTTOM:
-
weston_view_to_global_float(view,
view-surface-width, 0,
x, y);
 
-   *width = (int) x;
-   *height = view-surface-height + (int) y;
+   *width = (int)x - output-x;
+   *height = view-surface-height + (int) y - output-y;
return;
 
case DESKTOP_SHELL_PANEL_POSITION_LEFT:
@@ -421,8 +420,8 @@ get_output_panel_size(struct desktop_shell *shell,
0, view-surface-height,
x, y);
 
-   *width = view-surface-width + (int) x;
-   *height = (int) y;
+   *width = view-surface-width + (int)x - output-x;
+   *height = (int)y - output-y;
return;
 
default:
-- 
2.1.4

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: EFL/Wayland and xdg-shell

2015-04-15 Thread Christian Stroetmann

On the 15th of 2015 21:31, Daniel Stone wrote:

On 14 April 2015 at 04:19, Jasper St. Pierrejstpie...@mecheye.net  wrote:

Boo hoo.

you're the only ones who want physically-based rendering raytraced
desktops.

Enlightenment is absolutely nothing like my desktop environment of
choice either, but this is staggeringly unnecessary. If you want
xdg_shell to actually just be gtk_shell_base, and be totally
unencumbered by the shackles of ever having to work with anyone else,
this is definitely the way to go about it. But core protocol
development is not that.

Be nice.



Who knows what the future brings? At least I can give these informations:

OSGLand [1] and Ontologics OntoLix officially started on the 9th of 
November 2014 [2].


Kind regards
C.S.

[1] OSGLand 
www.ontolinux.com/technology/ontographics/ontographics.htm#osgland

[2] Ontologics OntoLix officially
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: EFL/Wayland and xdg-shell

2015-04-15 Thread Daniel Stone
On 14 April 2015 at 04:19, Jasper St. Pierre jstpie...@mecheye.net wrote:
 Boo hoo.

 you're the only ones who want physically-based rendering raytraced
 desktops.

Enlightenment is absolutely nothing like my desktop environment of
choice either, but this is staggeringly unnecessary. If you want
xdg_shell to actually just be gtk_shell_base, and be totally
unencumbered by the shackles of ever having to work with anyone else,
this is definitely the way to go about it. But core protocol
development is not that.

Be nice.
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: EFL/Wayland and xdg-shell

2015-04-15 Thread Daniel Stone
Hi,
Replies to both here ...

On 15 April 2015 at 02:39, Carsten Haitzler ras...@rasterman.com wrote:
 On Tue, 14 Apr 2015 01:31:56 +0100 Daniel Stone dan...@fooishbar.org said:
 On 14 April 2015 at 01:02, Bryce Harrington br...@osg.samsung.com wrote:
  While window rotation was used more as an example of how built-in
  assumptions in the API could unintentionally constrain D-E's, than as a
  seriously needed feature, they did describe a number of ideas for rather
  elaborate window behaviors:
 
* Rotation animations with frame updates to allow widget re-layouts
  while the window is rotating.

 So not just animating the transition, but requesting the client
 animate the content as well? That's extremely esoteric, and seems like
 it belongs in a separate extension - which is possible.

 not esoteric - an actual request from people making products.

The reason I took that as 'esoteric' was that I assumed it was about
free window rotation inside Weston: a feature which is absolutely
pointless but as a proof-of-concept for a hidden global co-ordinate
space. Makes a lot more sense for whole-display rotation. More below.

* Non-linear surface movement/resizing animations and transition
  effects.

 This seems like a compositor thing?

 it is.

i.e. no bearing on actual xdg_shell

  There was lots of interest in hearing more about Wayland's plans for
  text-cursor-position and input-method, which are necessary for Asian
  languages.

 It's sadly not been unmaintained for a while.

  A particular question was how clients could coordinate with
  the virtual keyboard input window so that it doesn't overlay where text
  is being inserted.

 See the text_cursor_position protocol.

 actually the other way around... clients know where the vkbd region(s) are so
 client can shuffle content to be visible. :)

In a VKB (rather than overlay-helper, as used for complex composition)
scenario, I would expect xdg-shell to send a configure event to resize
the window and allow for the VKB. If this isn't sufficient - I
honestly don't know what the behaviour is under X11 - then a potential
version bump of wl_text could provide for this.

 RandR is a disaster of an API to expose to clients. I would suggest
 that anything more than a list of monitors (not outputs/connectors)
 with their resolutions, relative monitor positioning, and the ability
 to change/disable the above, is asking for trouble.

 agreed - exposing randr is not sane. it's an internal compositor matter at 
 this
 level of detail (if compositor chooses to have a protocol, do it al itself
 internally etc. is up to it, but any tool to configure screen output at this
 level would be compositor specific).

 what i do think is needed is a list of screens with some kind of types 
 attached
 and rough metadata like one screen is left or right of another (so clients 
 like
 flight simulators could ask to have special surface on the left/right screens
 showing views out the sides of the cockpit and middle screen is out the 
 front).
 something like:

 0 desktop primary
 1 desktop left_of primary
 2 desktop right_of primary
 3 mobile detached
 4 tv above primary

 (pretend a phone with 4 external monitors attached).

Hell of a phone. More seriously, yes, a display-management API could
expose this, however if the aim is for clients to communicate intent
('this is a presentation') rather than for compositors to communicate
situation ('this is one of the external monitors'), then we probably
don't need this. wl_output already provides the relative geometry, so
all that is required for this is a way to communicate output type.

 perhaps listing
 resolution, rotation and dpi as well (pick the screen with the biggest 
 physical
 size or highest dpi - adjust window contents based on screen rotation - eg so
 some controls are always facing the bottom edge of the screen where some
 button controls are - the screen shows the legend text).

 apps should not be configuring any of this. it's read-only.

This already exists today - wl_output's geometry (DPI, rotation,
subpixel information, position within global co-ordinate space) and
mode (mode) events. So, no problem.

 surfaces should be
 able to hint at usage - eg i want to be on the biggest tv. i want to be
 wherever you have a small mobile touch screen etc. compositor deals with
 deciding where they would go based on the current state of the world
 screen-wise and app hints.

Right. So if we do have this client-intent-led interface (which would
definitely be the most Wayland-y approach), then we don't need to
advertise output types and wl_output already deals with the rest, so
no change required here?

  One area we could improve on X for output configuration is in how
  displays are selected for a given application's surface.  A suggestion
  was type descriptors for outputs, such as laptop display,
  television, projector, etc. so that surfaces could express an output
  type affinity.  Then a movie application could 

Re: EFL/Wayland and xdg-shell

2015-04-15 Thread Jasper St. Pierre
Yeah, that was extremely uncalled for. Was a difficult day at work,
and I was already cranky. I messed up, that was my fault, and I
apologize.

On Wed, Apr 15, 2015 at 12:31 PM, Daniel Stone dan...@fooishbar.org wrote:
 On 14 April 2015 at 04:19, Jasper St. Pierre jstpie...@mecheye.net wrote:
 Boo hoo.

 you're the only ones who want physically-based rendering raytraced
 desktops.

 Enlightenment is absolutely nothing like my desktop environment of
 choice either, but this is staggeringly unnecessary. If you want
 xdg_shell to actually just be gtk_shell_base, and be totally
 unencumbered by the shackles of ever having to work with anyone else,
 this is definitely the way to go about it. But core protocol
 development is not that.

 Be nice.



-- 
  Jasper
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH libinput 0/6] Middle mouse button emulation

2015-04-15 Thread Peter Hutterer
On Wed, Apr 15, 2015 at 07:32:01AM -0700, Bill Spitzak wrote:
 Is this being done for physical mice with two buttons? The comments seem to
 indicate not, which seems kind of pointless: if a client has to emulate it
 for a physical mouse it will be emulating it for touchpads as well.

not quite, there are two parts to it: configuration and the actual
emulation. You may get both or just the emulation, if you have both you can
turn emulation on/off otherwise it's always on. in the current patchset:

* if a device has left+right but not middle, emulation is enabled by
  default but no configuration option is present (i.e. cannot be turned off)
* if a touchpad has a middle button, no emulation or config option 
* if a non-touchpad device has a middle button, emulation is disabled by
  default but can be enabled through the config option

 Delaying a press to see if it is a middle click is also probably annoying.
 It should send the down event for the first button, then the down event for
 middle when the second button is pressed (ie the client will see it as a
 left+middle or right+middle click).

yeah, no. you now have the onus on the client to guess whether it needs to
filter the left click or not.

Cheers,
   Peter


 
 On 04/14/2015 08:51 PM, Peter Hutterer wrote:
 
 we already had middle mouse button emulation for softbuttons, but not on
 physical buttons. This patchset adds support for physical buttons.
 
 Couple of general notes:
 * some devices, e.g. 2-button touchpads, have middle button emulation
enabled, but not exposed as config option
 * if you have a middle button emulation is always off by default.
 * pressing anything but left/right will stop emulation for this sequence, so
you can't do an emulated middle button + back button or something. I'm not
aware of any workflow that actually requires this.
 
 Cheers,
Peter
 
 
 ___
 wayland-devel mailing list
 wayland-devel@lists.freedesktop.org
 http://lists.freedesktop.org/mailman/listinfo/wayland-devel
 
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: EFL/Wayland and xdg-shell

2015-04-15 Thread The Rasterman
On Wed, 15 Apr 2015 20:29:32 +0100 Daniel Stone dan...@fooishbar.org said:

 Hi,
 Replies to both here ...
 
 On 15 April 2015 at 02:39, Carsten Haitzler ras...@rasterman.com wrote:
  On Tue, 14 Apr 2015 01:31:56 +0100 Daniel Stone dan...@fooishbar.org said:
  On 14 April 2015 at 01:02, Bryce Harrington br...@osg.samsung.com wrote:
   While window rotation was used more as an example of how built-in
   assumptions in the API could unintentionally constrain D-E's, than as a
   seriously needed feature, they did describe a number of ideas for rather
   elaborate window behaviors:
  
 * Rotation animations with frame updates to allow widget re-layouts
   while the window is rotating.
 
  So not just animating the transition, but requesting the client
  animate the content as well? That's extremely esoteric, and seems like
  it belongs in a separate extension - which is possible.
 
  not esoteric - an actual request from people making products.
 
 The reason I took that as 'esoteric' was that I assumed it was about
 free window rotation inside Weston: a feature which is absolutely
 pointless but as a proof-of-concept for a hidden global co-ordinate
 space. Makes a lot more sense for whole-display rotation. More below.

not just whole display - but now imagine a table with a screen and touch and 4
people around it one along each side and multiple windows floating about like
scraps of paper... just an illustration where you'd want window-by-window
rotation done by compositor as well.

   There was lots of interest in hearing more about Wayland's plans for
   text-cursor-position and input-method, which are necessary for Asian
   languages.
 
  It's sadly not been unmaintained for a while.
 
   A particular question was how clients could coordinate with
   the virtual keyboard input window so that it doesn't overlay where text
   is being inserted.
 
  See the text_cursor_position protocol.
 
  actually the other way around... clients know where the vkbd region(s) are
  so client can shuffle content to be visible. :)
 
 In a VKB (rather than overlay-helper, as used for complex composition)
 scenario, I would expect xdg-shell to send a configure event to resize
 the window and allow for the VKB. If this isn't sufficient - I
 honestly don't know what the behaviour is under X11 - then a potential
 version bump of wl_text could provide for this.

no - resizing is a poorer solution. tried that in x11. first obvious port of
call. imagine vkbd is partly translucent... you want it to still be over window
content. imagine a kbd split onto left and right halves, one in the middle of
the left and right edges of the screen (because screen is bigger). :)

  RandR is a disaster of an API to expose to clients. I would suggest
  that anything more than a list of monitors (not outputs/connectors)
  with their resolutions, relative monitor positioning, and the ability
  to change/disable the above, is asking for trouble.
 
  agreed - exposing randr is not sane. it's an internal compositor matter at
  this level of detail (if compositor chooses to have a protocol, do it al
  itself internally etc. is up to it, but any tool to configure screen output
  at this level would be compositor specific).
 
  what i do think is needed is a list of screens with some kind of types
  attached and rough metadata like one screen is left or right of another (so
  clients like flight simulators could ask to have special surface on the
  left/right screens showing views out the sides of the cockpit and middle
  screen is out the front). something like:
 
  0 desktop primary
  1 desktop left_of primary
  2 desktop right_of primary
  3 mobile detached
  4 tv above primary
 
  (pretend a phone with 4 external monitors attached).
 
 Hell of a phone. More seriously, yes, a display-management API could
 expose this, however if the aim is for clients to communicate intent
 ('this is a presentation') rather than for compositors to communicate
 situation ('this is one of the external monitors'), then we probably
 don't need this. wl_output already provides the relative geometry, so
 all that is required for this is a way to communicate output type.

i was thinking a simplified geometry. then again client toolkits can figure
that out and present a simplified enum or what not to the app too. but yes -
some enumerated type attached to the output would be very nice. smarter
clients can decide their intent based on what is listed as available - adapt to
the situation. dumber ones will just ask  for a fixed type and deal with it
if they don't get it.

  perhaps listing
  resolution, rotation and dpi as well (pick the screen with the biggest
  physical size or highest dpi - adjust window contents based on screen
  rotation - eg so some controls are always facing the bottom edge of the
  screen where some button controls are - the screen shows the legend text).
 
  apps should not be configuring any of this. it's read-only.
 
 This already exists today - 

Re: [PATCH libinput] touchpad: only pair internal trackpoint devices with internal touchpads

2015-04-15 Thread Peter Hutterer
fwiw, please don't do a reply to list only, it makes it harder to track
emails.

On Wed, Apr 15, 2015 at 07:17:35AM -0700, Bill Spitzak wrote:
 On 04/14/2015 09:01 PM, Peter Hutterer wrote:
 Internal touchpads with trackpoints are either BUS_I8042 or BUS_I2C, but not
 BUS_USB. Lenovo sells external keyboards with a trackpoint built-in, make 
 sure
 we don't pair that trackpoint with the internal touchpad.
 And likewise, the internal trackpoint should not be paired with e.g. a wacom
 touch device.
 
 Lenovo had one external device that has a trackpoint and a touchpad on an
 external keyboard. That device won't be covered with this patch, if we have a
 user we can re-consider.
 
 Would checking if both devices are on the same bus (rather than not on the
 USB bus) work for the systems you have, and also cover this case?
 
 (ie. use bus_tp == bus_trp instead of tp_is_internal  trp_is_internal)
 
 Pardon me if I have no idea what I am talking about.

nah, good call, we had that idea too. there was the faint memory of some
synaptics device that couldn't handle the trackstick on i2c and went through
ps/2, so that may have broken this approach. in the end we went with this
approach since it that covered that (though possibly obsolete) case too.

Cheers,
   Peter

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH libinput 0/6] Middle mouse button emulation

2015-04-15 Thread Bill Spitzak


On 04/15/2015 02:46 PM, Peter Hutterer wrote:

On Wed, Apr 15, 2015 at 07:32:01AM -0700, Bill Spitzak wrote:

Is this being done for physical mice with two buttons? The comments seem to
indicate not, which seems kind of pointless: if a client has to emulate it
for a physical mouse it will be emulating it for touchpads as well.


not quite, there are two parts to it: configuration and the actual
emulation. You may get both or just the emulation, if you have both you can
turn emulation on/off otherwise it's always on. in the current patchset:

* if a device has left+right but not middle, emulation is enabled by
   default but no configuration option is present (i.e. cannot be turned off)
* if a touchpad has a middle button, no emulation or config option
* if a non-touchpad device has a middle button, emulation is disabled by
   default but can be enabled through the config option


I would check on Windows or Mac I think it is pretty common that 
emulation works whether or not there is a real middle button, maybe with 
the ability to turn it off.



Delaying a press to see if it is a middle click is also probably annoying.
It should send the down event for the first button, then the down event for
middle when the second button is pressed (ie the client will see it as a
left+middle or right+middle click).


yeah, no. you now have the onus on the client to guess whether it needs to
filter the left click or not.


If there is a fixed latency between a button being pressed and the 
result (because it is waiting to see if it is a middle button emulation) 
then it is going to look slow to the user. This is especially annoying 
because single button clicks are probably more common than middle button 
emulation.


Alternatively the latency will be so short that it will be difficult to 
get an actual middle mouse button emulation.

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: EFL/Wayland and xdg-shell

2015-04-15 Thread Bill Spitzak

On 04/15/2015 03:51 PM, Carsten Haitzler (The Rasterman) wrote:


i was thinking a simplified geometry. then again client toolkits can figure
that out and present a simplified enum or what not to the app too. but yes -
some enumerated type attached to the output would be very nice. smarter
clients can decide their intent based on what is listed as available - adapt to
the situation. dumber ones will just ask  for a fixed type and deal with it
if they don't get it.

well the problem here is the client is not aware of the current situation. is
that output on the right a tv on the other side of the room, ore a projector, or
perhaps an internal lcd panel? is it far from the user or touchable (touch
surface). if it's touchable the app may alter ui (make buttons bigger - remove
scrollbars to go into a touch ui mode as opposed ro mouse driven...). maybe app
is written for multitouch controls specifically and thus a display far from the
user with a single mouse only will make the app useless? app should be able
to know what TYPE of display it is on - what types are around and be able to
ask for a type (may or may not get it). important thing is introducing the
concept of a type and attaching it to outputs (and hints on surfaces).


Another reason for the client to know the type is so it can remember 
it and use it to place a window later.


For instance the user may move the window to where she wants it and then 
do a remember where the window is command to the client. Then when the 
client is run next time, it puts the window on the same output as 
before. So the client must be able to query the type of the output the 
surface is on. For I hope obvious reasons it is not acceptable for the 
user to have to choose the type manually, thus there has to be a query 
to determine the type of the output a surface is on.


This may also mean that all outputs have to produce a different type 
(ie if the user has two projectors they are not going to be happy if 
software can't remember which, so they must be different type values), 
and there have to be rules for matching types (so if it is run on a 
system with only one projector then both types end up on that one 
projector).

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH libinput 0/6] Middle mouse button emulation

2015-04-15 Thread Peter Hutterer
On Wed, Apr 15, 2015 at 06:05:09PM -0700, Bill Spitzak wrote:
 
 On 04/15/2015 02:46 PM, Peter Hutterer wrote:
 On Wed, Apr 15, 2015 at 07:32:01AM -0700, Bill Spitzak wrote:
 Is this being done for physical mice with two buttons? The comments seem to
 indicate not, which seems kind of pointless: if a client has to emulate it
 for a physical mouse it will be emulating it for touchpads as well.
 
 not quite, there are two parts to it: configuration and the actual
 emulation. You may get both or just the emulation, if you have both you can
 turn emulation on/off otherwise it's always on. in the current patchset:
 
 * if a device has left+right but not middle, emulation is enabled by
default but no configuration option is present (i.e. cannot be turned off)
 * if a touchpad has a middle button, no emulation or config option
 * if a non-touchpad device has a middle button, emulation is disabled by
default but can be enabled through the config option
 
 I would check on Windows or Mac I think it is pretty common that emulation
 works whether or not there is a real middle button, maybe with the ability
 to turn it off.
 
 Delaying a press to see if it is a middle click is also probably annoying.
 It should send the down event for the first button, then the down event for
 middle when the second button is pressed (ie the client will see it as a
 left+middle or right+middle click).
 
 yeah, no. you now have the onus on the client to guess whether it needs to
 filter the left click or not.
 
 If there is a fixed latency between a button being pressed and the result
 (because it is waiting to see if it is a middle button emulation) then it is
 going to look slow to the user. This is especially annoying because single
 button clicks are probably more common than middle button emulation.

which is why we only enable it by default where it is definitely needed.

Cheers,
   Peter

 Alternatively the latency will be so short that it will be difficult to get
 an actual middle mouse button emulation.
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


How about input method for CJK at present?

2015-04-15 Thread Leslie Zhai

Hi wayland developers,

I found that Jan Arne Petersen implemented extend text protocol for IBus 
support in 2013 
http://lists.freedesktop.org/archives/wayland-devel/2013-April/008524.html
But upstream did not merge it, so I want to know the status about input 
method for CJK, it might be the same situation for Fcitx wayland branch 
https://github.com/fcitx/fcitx/tree/wayland


Then it is better to confirm whether or not implement it by migrating or 
just ignoring ;-)


--
Regards,
Leslie Zhai

___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH weston] Don't manipulate lists while tranversing the list (was: remove all running animations...)

2015-04-15 Thread Pekka Paalanen
On Tue, 14 Apr 2015 13:45:12 -0700
Bryce Harrington br...@osg.samsung.com wrote:

 On Tue, Apr 14, 2015 at 09:41:13AM +0300, Pekka Paalanen wrote:
  On Mon, 13 Apr 2015 22:47:00 -0500
  Derek Foreman der...@osg.samsung.com wrote:
  
   On 13/04/15 07:15 AM, Pekka Paalanen wrote:
Maybe the closing animation ending should just schedule an idle task to
destroy the surface?
   
   I don't think I can think of a valid reason to disagree - would you like
   me to fix it that way?
  
  I think that would be the best, yes. We could make it a rule of thumb:
  never destroy weston_views or weston_surfaces from animation hooks.
  With that simple rule in place, all similar future bugs would be
  avoided. It would be a generic solution to a generic class of bugs.
  
  That rule has a lot of potential for further generalizations, because
  the essence of the problem is manipulating a list while traversing the
  list. Don't destroy weston_views from weston_view signals etc.
 
 
 
 From e12a186ab032d02edd8673f194f5c3a0ffe8ce93 Mon Sep 17 00:00:00 2001
 From: Bryce Harrington br...@osg.samsung.com
 Date: Tue, 14 Apr 2015 13:43:06 -0700
 Subject: [PATCH] notes: Don't manipulate lists while tranversing the list
 
 Signed-off-by: Bryce Harrington br...@osg.samsung.com
 ---
  notes.txt | 10 ++
  1 file changed, 10 insertions(+)
 
 diff --git a/notes.txt b/notes.txt
 index e49052b..ba434a3 100644
 --- a/notes.txt
 +++ b/notes.txt
 @@ -5,6 +5,16 @@ mentioned. Let's keep these in date order, newest first.
  
  
  ---
 +2015-04-14; Pekka Paalanen ppaala...@gmail.com
 +http://lists.freedesktop.org/archives/wayland-devel/2015-April/021309.html
 +
 +Never destroy weston_views or weston_surfaces from animation hooks.
 +Never destroy weston_views from weston_view signals.
 +
 +Basically, never manipulate a list while transversing it.
 +
 +
 +---
  2012-10-23; Pekka Paalanen ppaala...@gmail.com
  http://lists.freedesktop.org/archives/wayland-devel/2012-October/005969.html

I wonder if anyone is ever going to read that, but sure, commit away.


Thanks,
pq
___
wayland-devel mailing list
wayland-devel@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/wayland-devel


Re: [PATCH libinput 1/6] evdev: add evdev_pointer_notify_physical_button

2015-04-15 Thread Hans de Goede

Hi,

For some reason some of the patches have not made it to my mailbox,
so I'm reviewing the entire set here, partially from the web archive.

Patches 1 and 2 looks good and are:

Reviewed-by: Hans de Goede hdego...@redhat.com

The state machine code in patch 3 has some issues:

In evdev_middlebutton_ldown_handle_event() you do:

+   case MIDDLEBUTTON_EVENT_R_UP:
+   break;

But in evdev_middlebutton_rdown_handle_event() you do:

+   case MIDDLEBUTTON_EVENT_L_UP:
+   middlebutton_state_error(device, event);
+   break;

This is not consistent. Also you forget to cancel the timer
in a whole bunch of code paths, one example is:

Press left button and release before timeout, now we're
back in idle with the timer running and have:

+evdev_middlebutton_idle_handle_event(struct evdev_device *device,
+uint64_t time,
+enum evdev_middlebutton_event event)

+   case MIDDLEBUTTON_EVENT_TIMEOUT:
+   middlebutton_state_error(device, event);
+   break;

Triggering. I suggest doing what we've ended up doing in most other
state machines and have a set_state helper which is the only one
to ever touch device-middlebutton.state, that can then always
cancel the timer when called, and (re)set the timer and
device-middlebutton.first_event_time for the states where we want
to start the timer on entering the state.

Also you may want to make the sleep in litest_timeout_middlebutton
slightly larger, at least in all other litest_timeout functions the
sleep is somewhat longer then the timeout in libinput to avoid
surprises / races.

Patches 4 - 6 look good and are:

Reviewed-by: Hans de Goede hdego...@redhat.com

Regards,

Hans


On 15-04-15 05:51, Peter Hutterer wrote:

No functional changes at this point, this merely splits up any physical
buttons (i.e. that represent buttons that exist on that device) vs. other
buttons that are emulated in some way or another.

This is in preparation for the addition of middle button emulation.

Signed-off-by: Peter Hutterer peter.hutte...@who-t.net
---
  src/evdev-mt-touchpad-buttons.c |  8 
  src/evdev.c | 17 +
  src/evdev.h |  5 +
  3 files changed, 22 insertions(+), 8 deletions(-)

diff --git a/src/evdev-mt-touchpad-buttons.c b/src/evdev-mt-touchpad-buttons.c
index 18c32fd..56a054c 100644
--- a/src/evdev-mt-touchpad-buttons.c
+++ b/src/evdev-mt-touchpad-buttons.c
@@ -734,10 +734,10 @@ tp_post_physical_buttons(struct tp_dispatch *tp, uint64_t 
time)
state = LIBINPUT_BUTTON_STATE_RELEASED;

b = evdev_to_left_handed(tp-device, button);
-   evdev_pointer_notify_button(tp-device,
-   time,
-   b,
-   state);
+   evdev_pointer_notify_physical_button(tp-device,
+time,
+b,
+state);
}

button++;
diff --git a/src/evdev.c b/src/evdev.c
index 5b4b2b6..6f87484 100644
--- a/src/evdev.c
+++ b/src/evdev.c
@@ -138,6 +138,15 @@ evdev_keyboard_notify_key(struct evdev_device *device,
  }

  void
+evdev_pointer_notify_physical_button(struct evdev_device *device,
+uint32_t time,
+int button,
+enum libinput_button_state state)
+{
+   evdev_pointer_notify_button(device, time, button, state);
+}
+
+void
  evdev_pointer_notify_button(struct evdev_device *device,
uint32_t time,
int button,
@@ -430,10 +439,10 @@ evdev_button_scroll_button(struct evdev_device *device,
} else {
/* If the button is released quickly enough emit the
 * button press/release events. */
-   evdev_pointer_notify_button(device, time,
+   evdev_pointer_notify_physical_button(device, time,
device-scroll.button,
LIBINPUT_BUTTON_STATE_PRESSED);
-   evdev_pointer_notify_button(device, time,
+   evdev_pointer_notify_physical_button(device, time,
device-scroll.button,
LIBINPUT_BUTTON_STATE_RELEASED);
}
@@ -505,7 +514,7 @@ evdev_process_key(struct evdev_device *device,
evdev_button_scroll_button(device, time, e-value);
break;
}
-