[Intel-gfx] [PATCH 3/3] agp/intel: Fix cache control for Sandybridge

2010-08-26 Thread Zhenyu Wang
Sandybridge GTT has new cache control bits in PTE, which controls
graphics page cache in LLC or LLC/MLC. This one trys to use new type
mask function for that. And this sets cache control to always LLC only
by default on Gen6.

Signed-off-by: Zhenyu Wang 
---
 drivers/char/agp/intel-agp.c|1 +
 drivers/char/agp/intel-gtt.c|   50 +++---
 drivers/gpu/drm/i915/i915_gem.c |1 +
 3 files changed, 42 insertions(+), 10 deletions(-)

diff --git a/drivers/char/agp/intel-agp.c b/drivers/char/agp/intel-agp.c
index 710af89..74461d1 100644
--- a/drivers/char/agp/intel-agp.c
+++ b/drivers/char/agp/intel-agp.c
@@ -12,6 +12,7 @@
 #include 
 #include "agp.h"
 #include "intel-agp.h"
+#include 
 
 #include "intel-gtt.c"
 
diff --git a/drivers/char/agp/intel-gtt.c b/drivers/char/agp/intel-gtt.c
index 0edfc87..68cf2cf 100644
--- a/drivers/char/agp/intel-gtt.c
+++ b/drivers/char/agp/intel-gtt.c
@@ -49,6 +49,26 @@ static struct gatt_mask intel_i810_masks[] =
 .type = INTEL_AGP_CACHED_MEMORY}
 };
 
+#define INTEL_AGP_UNCACHED_MEMORY  0
+#define INTEL_AGP_CACHED_MEMORY_LLC1
+#define INTEL_AGP_CACHED_MEMORY_LLC_GFDT   2
+#define INTEL_AGP_CACHED_MEMORY_LLC_MLC3
+#define INTEL_AGP_CACHED_MEMORY_LLC_MLC_GFDT   4
+
+static struct gatt_mask intel_gen6_masks[] =
+{
+   {.mask = I810_PTE_VALID | GEN6_PTE_UNCACHED,
+.type = INTEL_AGP_UNCACHED_MEMORY },
+   {.mask = I810_PTE_VALID | GEN6_PTE_LLC,
+ .type = INTEL_AGP_CACHED_MEMORY_LLC },
+   {.mask = I810_PTE_VALID | GEN6_PTE_LLC | GEN6_PTE_GFDT,
+ .type = INTEL_AGP_CACHED_MEMORY_LLC_GFDT },
+   {.mask = I810_PTE_VALID | GEN6_PTE_LLC_MLC,
+ .type = INTEL_AGP_CACHED_MEMORY_LLC_MLC },
+   {.mask = I810_PTE_VALID | GEN6_PTE_LLC_MLC | GEN6_PTE_GFDT,
+ .type = INTEL_AGP_CACHED_MEMORY_LLC_MLC_GFDT },
+};
+
 static struct _intel_private {
struct pci_dev *pcidev; /* device one */
u8 __iomem *registers;
@@ -178,13 +198,6 @@ static void intel_agp_insert_sg_entries(struct agp_memory 
*mem,
off_t pg_start, int mask_type)
 {
int i, j;
-   u32 cache_bits = 0;
-
-   if (agp_bridge->dev->device == PCI_DEVICE_ID_INTEL_SANDYBRIDGE_HB ||
-   agp_bridge->dev->device == PCI_DEVICE_ID_INTEL_SANDYBRIDGE_M_HB)
-   {
-   cache_bits = GEN6_PTE_LLC_MLC;
-   }
 
for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
writel(agp_bridge->driver->mask_memory(agp_bridge,
@@ -317,6 +330,23 @@ static int intel_i830_type_to_mask_type(struct 
agp_bridge_data *bridge,
return 0;
 }
 
+static int intel_gen6_type_to_mask_type(struct agp_bridge_data *bridge,
+   int type)
+{
+   unsigned int type_mask = type & ~AGP_USER_CACHED_MEMORY_GFDT;
+   unsigned int gfdt = type & AGP_USER_CACHED_MEMORY_GFDT;
+
+   if (type_mask == AGP_USER_UNCACHED_MEMORY)
+   return INTEL_AGP_UNCACHED_MEMORY;
+   else if (type_mask == AGP_USER_CACHED_MEMORY_LLC_MLC)
+   return gfdt ? INTEL_AGP_CACHED_MEMORY_LLC_MLC_GFDT :
+ INTEL_AGP_CACHED_MEMORY_LLC_MLC;
+   else /* set 'normal'/'cached' to LLC by default */
+   return gfdt ? INTEL_AGP_CACHED_MEMORY_LLC_GFDT :
+ INTEL_AGP_CACHED_MEMORY_LLC;
+}
+
+
 static int intel_i810_insert_entries(struct agp_memory *mem, off_t pg_start,
int type)
 {
@@ -1163,7 +1193,7 @@ static int intel_i915_insert_entries(struct agp_memory 
*mem, off_t pg_start,
 
mask_type = agp_bridge->driver->agp_type_to_mask_type(agp_bridge, type);
 
-   if (mask_type != 0 && mask_type != AGP_PHYS_MEMORY &&
+   if (!IS_SNB && mask_type != 0 && mask_type != AGP_PHYS_MEMORY &&
mask_type != INTEL_AGP_CACHED_MEMORY)
goto out_err;
 
@@ -1563,7 +1593,7 @@ static const struct agp_bridge_driver intel_gen6_driver = 
{
.fetch_size = intel_i9xx_fetch_size,
.cleanup= intel_i915_cleanup,
.mask_memory= intel_gen6_mask_memory,
-   .masks  = intel_i810_masks,
+   .masks  = intel_gen6_masks,
.agp_enable = intel_i810_agp_enable,
.cache_flush= global_cache_flush,
.create_gatt_table  = intel_i965_create_gatt_table,
@@ -1576,7 +1606,7 @@ static const struct agp_bridge_driver intel_gen6_driver = 
{
.agp_alloc_pages= agp_generic_alloc_pages,
.agp_destroy_page   = agp_generic_destroy_page,
.agp_destroy_pages  = agp_generic_destroy_pages,
-   .agp_type_to_mask_type  = intel_i830_type_to_mask_type,
+   .agp_type_to_mask_type  = intel_gen6_type_to_mask_type,
.chipset_flush  = intel_i915_chipset_flush,
 #ifdef USE_PCI_DMA_API
.agp_map_page   

[Intel-gfx] [PATCH 2/3] agp/intel: add new intel-gtt.h for new GTT controls

2010-08-26 Thread Zhenyu Wang
New intel gfx device requires new GTT controls. Add them
into under kernel include dir to be shared with drm/i915
driver.

Signed-off-by: Zhenyu Wang 
---
 include/linux/intel-gtt.h |   20 
 1 files changed, 20 insertions(+), 0 deletions(-)
 create mode 100644 include/linux/intel-gtt.h

diff --git a/include/linux/intel-gtt.h b/include/linux/intel-gtt.h
new file mode 100644
index 000..1d19ab2
--- /dev/null
+++ b/include/linux/intel-gtt.h
@@ -0,0 +1,20 @@
+/*
+ * Common Intel AGPGART and GTT definitions.
+ */
+#ifndef _INTEL_GTT_H
+#define _INTEL_GTT_H
+
+#include 
+
+/* This is for Intel only GTT controls.
+ *
+ * Sandybridge: AGP_USER_CACHED_MEMORY default to LLC only
+ */
+
+#define AGP_USER_CACHED_MEMORY_LLC_MLC (AGP_USER_TYPES + 2)
+#define AGP_USER_UNCACHED_MEMORY (AGP_USER_TYPES + 4)
+
+/* flag for GFDT type */
+#define AGP_USER_CACHED_MEMORY_GFDT (1 << 3)
+
+#endif
-- 
1.7.0.4

___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


[Intel-gfx] [PATCH 1/3] agp/intel: use #ifdef idiom for intel-agp.h

2010-08-26 Thread Zhenyu Wang
Signed-off-by: Zhenyu Wang 
---
 drivers/char/agp/intel-agp.h |4 
 1 files changed, 4 insertions(+), 0 deletions(-)

diff --git a/drivers/char/agp/intel-agp.h b/drivers/char/agp/intel-agp.h
index 08d4753..78124a8 100644
--- a/drivers/char/agp/intel-agp.h
+++ b/drivers/char/agp/intel-agp.h
@@ -1,6 +1,8 @@
 /*
  * Common Intel AGPGART and GTT definitions.
  */
+#ifndef _INTEL_AGP_H
+#define _INTEL_AGP_H
 
 /* Intel registers */
 #define INTEL_APSIZE   0xb4
@@ -244,3 +246,5 @@
agp_bridge->dev->device == PCI_DEVICE_ID_INTEL_IRONLAKE_MA_HB 
|| \
agp_bridge->dev->device == PCI_DEVICE_ID_INTEL_IRONLAKE_MC2_HB 
|| \
IS_SNB)
+
+#endif
-- 
1.7.0.4

___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


Re: [Intel-gfx] Problems configuring unique xorg.conf file

2010-08-26 Thread Felix Miata
On 2010/08/26 23:14 (GMT) Nasa composed:

> Given Meego's setup where is the appropriate place to put the xrandr 
> commands?  I was confused on this point when I tried that before...

I know nothing about Meego other than that you are using it. :-p

> X seems to be started from 3 files in /etc/X11/xinitrc (xinitrc, 
> xinitrc-common, and Xclients)

It worked for me in openSUSE 11.3 adding everything in xinitrc near the end,
right before the actual window manager startup commands. An alternative that
worked for me was using ~/.xinitrc and leaving /etc/X11/xinit/xinitrc untouched.

> I opted to add the lines to xinitrc thus:
...
> xrandr --newmode "840x480_60.00"   31.25  840 864 944 1048  480 483 493 500 
> -hsync +vsync
...

> Where I have it placed now doesn't work (after removing my edited xorg.conf, 
> I get
> the original resolution back).  So I must not understand where it should be 
> placed.
> Could you enlighten me, *pretty please*  :}

I did more experimenting. It worked for me with no xorg.conf, modifying
nothing in /etc/X11/xinit, and putting what used to work in the monitor
section of xorg.conf into /etc/X11/xorg.conf.d/50-monitor.conf.

http://fm.no-ip.com/Tmp/Linux/Xorg/50-monitor.conf.02-113-t2240-1600x1200x120-60_i845G
worked for me to get 1600x1200 instead of the 1024x768 default, but you may
need to add the above xrandr data as a Modeline in order to get your 800x480.
-- 
"The wise are known for their understanding, and pleasant
words are persuasive." Proverbs 16:21 (New Living Translation)

 Team OS/2 ** Reg. Linux User #211409

Felix Miata  ***  http://fm.no-ip.com/
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


Re: [Intel-gfx] Problems configuring unique xorg.conf file

2010-08-26 Thread Nasa

- "Felix Miata"  wrote:

> On 2010/08/26 10:47 (GMT) Nasa composed:
> 
> > I am trying to configure a rather obscure resolution (800x480) which
> is the
> > native resolution for my monitor.  The monitor doesn't provide EDID
> or DDC 
> > information (it's connected over VGA).  And the default settings by
> the 
> > driver produces displays that don't fit the monitor properly.  I
> would like
> > to construct a xorg.conf file with appropraite vertical refresh
> rates, horizontal
> > syncs, and/or modelines to work correctly with the monitor. 
> However, there
> > doesn't seem a way to turn off the driver defaults for those items.
> I tried options
> > like NoDDC, UseEDID, etc with no luck.  I also tried using xrandr to
> change 
> > resolutions after X has started.  The results end up being worse
> than the 
> > intial problem I was trying to fix (ie: the screen is bigger than
> the area
> > available to display it).  I expect my inability to find a suitable
> solution
> > is due to my lack of knowledge -- so I hope someone can fill me in
> to what 
> > I am missing.  Thanks in advance,
> 
> > Chipset:  945GM
> 
> From openSUSE 11.3 on i845G
> http://fm.no-ip.com/Tmp/Linux/Xorg/xinitrc-113-t2240-1600x1200v1920
> should
> serve as a template for you to construct an /etc/X11/xinit/xinitrc
> file with
> required xrandr commands included to be run _before_ $WINDOWMANAGER
> starts.
> 
> Note that the reason for preferring high refresh rates on CRT displays
> does
> not exist on flat panel displays. I've yet to find any flat panel
> that
> doesn't work perfectly in close proximity to 60 if not exactly 60.
> So,
> K.I.S.S., and use CVT and/or GTF without bothering with refresh spec.
> 'gtf
> 800 480' will default to 60, which would invariably be just right.
> 
> Supposedly xorg.conf can still be used with legacy Intel chips to do
> the same
> things as xinitrc/xrandr do, but I've yet to confirm it, succeeding
> only on
> MGA so far.

Given Meego's setup where is the appropriate place to put the xrandr 
commands?  I was confused on this point when I tried that before...

X seems to be started from 3 files in /etc/X11/xinitrc (xinitrc, 
xinitrc-common, and Xclients)

I opted to add the lines to xinitrc thus:

# Mandatorily source xinitrc-common, which is common code shared between the
# Xsession and xinitrc scripts which has been factored out to avoid duplication
. /etc/X11/xinit/xinitrc-common

xrandr --newmode "840x480_60.00"   31.25  840 864 944 1048  480 483 493 500 
-hsync +vsync
xrandr --addmode VGA1 840x480_60.00
xrandr --output VGA1 --mode 840x480_60.00
xrandr --fbmm 152x91

# The user may have their own clients they want to run.  If they don't,
# fall back to system defaults.
if [ -f $HOME/.Xclients ]; then
exec $CK_XINIT_SESSION $SSH_AGENT $HOME/.Xclients || \
exec $CK_XINIT_SESSION $SSH_AGENT $HOME/.Xclients
elif [ -f /etc/X11/xinit/Xclients ]; then
exec $CK_XINIT_SESSION $SSH_AGENT /etc/X11/xinit/Xclients || \
exec $CK_XINIT_SESSION $SSH_AGENT /etc/X11/xinit/Xclients
else
# Failsafe settings.  Although we should never get here
# (we provide fallbacks in Xclients as well) it can't hurt.
[ -x /usr/bin/xsetroot ] && /usr/bin/xsetroot -solid '#222E45'
[ -x /usr/bin/xclock ] && /usr/bin/xclock -geometry 100x100-5+5 &
[ -x /usr/bin/xterm ] && xterm -geometry 80x50-50+150 &
[ -x /usr/bin/twm ] && /usr/bin/twm
fi



As there isn't a .Xclients file in the user directory -- the 
/etc/X11/xinit/Xclients
is what is executed next:

#!/bin/bash

GSESSION="$(which gnome-session 2>/dev/null)"
STARTKDE="$(which startkde 2>/dev/null)"
STARTXFCE="$(which startxfce4 2>/dev/null)"
STARTMOB="$(which startmoblin 2>/dev/null)"

# check to see if the user has a preferred desktop
PREFERRED=
if [ -f /etc/sysconfig/desktop ]; then
. /etc/sysconfig/desktop
if [ "$DESKTOP" = "GNOME" ]; then
PREFERRED="$GSESSION"
elif [ "$DESKTOP" = "KDE" ]; then
PREFERRED="$STARTKDE"
elif [ "$DESKTOP" = "XFCE" ]; then
PREFERRED="$STARTXFCE"
elif [ "$DESKTOP" = "MOBLIN" ]; then
PREFERRED="$STARTMOB"
fi
fi

if [ -n "$PREFERRED" ]; then
exec "$PREFERRED"
fi

# now if we can reach here, either no desktop file was present,
# or the desktop requested is not installed.

if [ -n "$STARTMOB" ]; then
# by default, we run Moblin.
exec "$STARTMOB"
elif [ -n "$STARTXFCE" ]; then
exec "$STARTXFCE"
elif [ -n "$GSESSION" ]; then
# if Moblin isn't installed, try gnome
exec "$GSESSION"
fi

# Failsafe.

# these files are left sitting around by TheNextLevel.
rm -f $HOME/Xrootenv.0

# Argh! Nothing good is installed. Fall back to twm
{
# gosh, neither fvwm95 nor fvwm2 is available; 
# fall back to failsafe settings
[ -x /usr/bin/xsetroot ] && /usr/bin/xsetroot -solid '#222E45'
if [ -x /usr/bin/xclock ] ; then
/usr/bin/xclock -geometry 100x100-5+5 &
elif [ -x /usr/bin/xclock ] ; then
/usr/bin/xclock -geometry 100

Re: [Intel-gfx] Problems configuring unique xorg.conf file

2010-08-26 Thread Felix Miata
On 2010/08/26 10:47 (GMT) Nasa composed:

> I am trying to configure a rather obscure resolution (800x480) which is the
> native resolution for my monitor.  The monitor doesn't provide EDID or DDC 
> information (it's connected over VGA).  And the default settings by the 
> driver produces displays that don't fit the monitor properly.  I would like
> to construct a xorg.conf file with appropraite vertical refresh rates, 
> horizontal
> syncs, and/or modelines to work correctly with the monitor.  However, there
> doesn't seem a way to turn off the driver defaults for those items. I tried 
> options
> like NoDDC, UseEDID, etc with no luck.  I also tried using xrandr to change 
> resolutions after X has started.  The results end up being worse than the 
> intial problem I was trying to fix (ie: the screen is bigger than the area
> available to display it).  I expect my inability to find a suitable solution
> is due to my lack of knowledge -- so I hope someone can fill me in to what 
> I am missing.  Thanks in advance,

> Chipset:  945GM

>From openSUSE 11.3 on i845G
http://fm.no-ip.com/Tmp/Linux/Xorg/xinitrc-113-t2240-1600x1200v1920 should
serve as a template for you to construct an /etc/X11/xinit/xinitrc file with
required xrandr commands included to be run _before_ $WINDOWMANAGER starts.

Note that the reason for preferring high refresh rates on CRT displays does
not exist on flat panel displays. I've yet to find any flat panel that
doesn't work perfectly in close proximity to 60 if not exactly 60. So,
K.I.S.S., and use CVT and/or GTF without bothering with refresh spec. 'gtf
800 480' will default to 60, which would invariably be just right.

Supposedly xorg.conf can still be used with legacy Intel chips to do the same
things as xinitrc/xrandr do, but I've yet to confirm it, succeeding only on
MGA so far.
-- 
"The wise are known for their understanding, and pleasant
words are persuasive." Proverbs 16:21 (New Living Translation)

 Team OS/2 ** Reg. Linux User #211409

Felix Miata  ***  http://fm.no-ip.com/
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


Re: [Intel-gfx] Problems configuring unique xorg.conf file

2010-08-26 Thread Alan W. Irwin

On 2010-08-26 18:40- Nasa wrote:



- "Alan W. Irwin"  wrote:

[...]I have assumed above that the horizontal frequency limits have been
set correctly for your particular monitor.  That is not always the
case.  Check your monitor manual for the correct vertical and
horizontal frequency limits and if your X log shows those are not
being discovered properly by X than specify the correct ranges using
the
VertRefresh and HorizSync values in the Monitor section.  In my case
I used

HorizSync   30-96
VertRefresh 48-120

corresponding to values published in my Sony g200 manual, but your
monitor manual is very likely to require different ranges.

Alan


There in lies the problem  I have created an xorg.conf file with
the settings you mention -- but the settings are ignored.  Trying to
tell Xorg not to use EDID/DDC/Default values hasn't worked (see 1st
part of my orignal message).  Worse off, the log file doesn't report
what values it is using!  So it's been a guessing game...


Look through the log file for the "ranges" string.  For my modern (LCD
monitor on Debian testing) system the result is

(II) intel(0): Ranges: V min: 55 V max: 75 Hz, H min: 30 H max: 80
kHz, PixClock max 140 MHz

(II) intel(0): Using hsync ranges from config file
(II) intel(0): Using vrefresh ranges from config file

However, despite those messages, it turns out HorizSync (and probably
VertRefresh) in my xorg.conf are ignored, and instead the values
reported by the monitor are used. (The above values are consistent
with those reported on the web for my particular monitor.  I set those
same values using HorizSync and VertRefresh except that I specified a
smaller H max via HorizSync as an experiment and it was ignored.)

Ignoring the frequencies in xorg.conf didn't hurt in my modern
LCD/Debian testing case, but probably does in yours.  To confirm
that what is the exact result you get for Ranges in the log file?

In general, I am troubled by any misguided tendency of Intel
developers to remove xorg.conf capability.  Sure, it is nice to
generally not require that file at all by default, but when you really
need control for situations where bad values or no values are being
reported by a monitor, a fully capable xorg.conf file is absolutely
essential.  So let's hope this ignoring of frequencies specified in
xorg.conf (at least for my Debian testing Intel X stack)
is a temporary aberration by the Intel developers that has been fixed
in later versions.

Alan
__
Alan W. Irwin

Astronomical research affiliation with Department of Physics and Astronomy,
University of Victoria (astrowww.phys.uvic.ca).

Programming affiliations with the FreeEOS equation-of-state implementation
for stellar interiors (freeeos.sf.net); PLplot scientific plotting software
package (plplot.org); the libLASi project (unifont.org/lasi); the Loads of
Linux Links project (loll.sf.net); and the Linux Brochure Project
(lbproject.sf.net).
__

Linux-powered Science
__
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


Re: [Intel-gfx] Problems configuring unique xorg.conf file

2010-08-26 Thread Alan W. Irwin

On 2010-08-26 17:37- Nasa wrote:



- "Alan W. Irwin"  wrote:


On 2010-08-26 10:47- Nasa wrote:


Hi,


I am trying to configure a rather obscure resolution (800x480) which

is the

native resolution for my monitor.  The monitor doesn't provide EDID

or DDC

information (it's connected over VGA).  And the default settings by

the

driver produces displays that don't fit the monitor properly.  I

would like

to construct a xorg.conf file with appropraite vertical refresh

rates, horizontal

syncs, and/or modelines to work correctly with the monitor.

However, there

doesn't seem a way to turn off the driver defaults for those items.

I tried options

like NoDDC, UseEDID, etc with no luck.  I also tried using xrandr to

change

resolutions after X has started.  The results end up being worse

than the

intial problem I was trying to fix (ie: the screen is bigger than

the area

available to display it).  I expect my inability to find a suitable

solution

is due to my lack of knowledge -- so I hope someone can fill me in

to what

I am missing.  Thanks in advance,


Earlier this year before I replaced my long-time Sony monitor with an
LCD, and upgraded from Debian Lenny to Debian testing, the results of
gtf and PreferredMode worked for me. For example, my xorg.conf file
for that monitor had the following lines in the Monitor section

#gtf 1024 768 85
# 1024x768 @ 85.00 Hz (GTF) hsync: 68.60 kHz; pclk: 94.39 MHz
Modeline "1024x768_85.00"  94.39  1024 1088 1200 1376  768 769 772
807
-HSync +Vsync
Option "PreferredMode" "1024x768_85.00"

Of course, intead of using the above example, you will want to run
something like

gtf 800 400 85

from the command line (man gtf), paste the results to your Monitor
section, and consistently update the identification of the mode used
by PreferredMode.

I emphasize the above configuration lines worked for an old version of
the
Intel driver (Debian Lenny), and I don't know whether they  would
work
for a modern version.  But it is worth a try.

Alan


Thanks Alan,

I actually attempted this via CVT which ended up with horizontal sync out
of range errors.  Reading the MAN page for CVT didn't show any options to
put in options to set that.  Does GTF have this capability?


No, but it doesn't matter.  Play with either cvt of gtf (I don't think
there is any real difference between them) with a fixed resolution and
varying vertical refresh rate, and you will see that the horizontal
sync frequency of the generated mode line is proportional to the
vertical refresh you specify.  So for your desired resolution if the
generated horizontal sync is lower than your allowed range, increase
the vertical refresh until you have a value within the allowed range.
Or if it is above (extremely unlikely for such a low resolution)
reduce the vertical refresh.

I have assumed above that the horizontal frequency limits have been
set correctly for your particular monitor.  That is not always the
case.  Check your monitor manual for the correct vertical and
horizontal frequency limits and if your X log shows those are not
being discovered properly by X than specify the correct ranges using the
VertRefresh and HorizSync values in the Monitor section.  In my case
I used

HorizSync   30-96
VertRefresh 48-120

corresponding to values published in my Sony g200 manual, but your
monitor manual is very likely to require different ranges.

Alan
__
Alan W. Irwin

Astronomical research affiliation with Department of Physics and Astronomy,
University of Victoria (astrowww.phys.uvic.ca).

Programming affiliations with the FreeEOS equation-of-state implementation
for stellar interiors (freeeos.sf.net); PLplot scientific plotting software
package (plplot.org); the libLASi project (unifont.org/lasi); the Loads of
Linux Links project (loll.sf.net); and the Linux Brochure Project
(lbproject.sf.net).
__

Linux-powered Science
__
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


Re: [Intel-gfx] Problems configuring unique xorg.conf file

2010-08-26 Thread Nasa

- "Alan W. Irwin"  wrote:

> On 2010-08-26 10:47- Nasa wrote:
> 
> > Hi,
> >
> >
> > I am trying to configure a rather obscure resolution (800x480) which
> is the
> > native resolution for my monitor.  The monitor doesn't provide EDID
> or DDC
> > information (it's connected over VGA).  And the default settings by
> the
> > driver produces displays that don't fit the monitor properly.  I
> would like
> > to construct a xorg.conf file with appropraite vertical refresh
> rates, horizontal
> > syncs, and/or modelines to work correctly with the monitor. 
> However, there
> > doesn't seem a way to turn off the driver defaults for those items.
> I tried options
> > like NoDDC, UseEDID, etc with no luck.  I also tried using xrandr to
> change
> > resolutions after X has started.  The results end up being worse
> than the
> > intial problem I was trying to fix (ie: the screen is bigger than
> the area
> > available to display it).  I expect my inability to find a suitable
> solution
> > is due to my lack of knowledge -- so I hope someone can fill me in
> to what
> > I am missing.  Thanks in advance,
> 
> Earlier this year before I replaced my long-time Sony monitor with an
> LCD, and upgraded from Debian Lenny to Debian testing, the results of
> gtf and PreferredMode worked for me. For example, my xorg.conf file
> for that monitor had the following lines in the Monitor section
> 
> #gtf 1024 768 85
> # 1024x768 @ 85.00 Hz (GTF) hsync: 68.60 kHz; pclk: 94.39 MHz
> Modeline "1024x768_85.00"  94.39  1024 1088 1200 1376  768 769 772
> 807
> -HSync +Vsync
> Option "PreferredMode" "1024x768_85.00"
> 
> Of course, intead of using the above example, you will want to run
> something like
> 
> gtf 800 400 85
> 
> from the command line (man gtf), paste the results to your Monitor
> section, and consistently update the identification of the mode used
> by PreferredMode.
> 
> I emphasize the above configuration lines worked for an old version of
> the
> Intel driver (Debian Lenny), and I don't know whether they  would
> work
> for a modern version.  But it is worth a try.
> 
> Alan

Thanks Alan,

I actually attempted this via CVT which ended up with horizontal sync out
of range errors.  Reading the MAN page for CVT didn't show any options to
put in options to set that.  Does GTF have this capability?  I also tried
an online modeline creation site (link alludes me, as I am at work right 
now) with slightly better, but still not totally work results.

Nasa
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


Re: [Intel-gfx] [PATCH] Reintegrate legacy UMS.

2010-08-26 Thread Chris Wilson
You probably heard the screams of terror at the prospect of adding 50k
lines of broken code. The result of that shock was that it did produce a
viable alternative: use a shadow fb and avoid dynamic reallocation and
associated incoherency issues.

The benefits of this approach is that we get KMS (hotplug, resizable
framebuffers, rotation, reliable suspend and resume), overlays (video)
reasonable performance, and continued development. You lose GL
acceleration. (But using GEM it would suffer the same incoherency problems
that plagued X, and unless you were also using an old DRI1 compatible
version of Mesa, legacy/ums could do no better.) You also lose EXA
acceleration, but that turns out to be a good thing in the general case,
but synthetic benchmarks will show decreased performance.

The branch is available for testing from

http://cgit.freedesktop.org/~ickle/xf86-video-intel/log/?h=shadow

To enable the use of the shadow, you will have to add

Section "Device"
  Option "Shadow" "True"
EndSection

to your xorg.conf.

-- 
Chris Wilson, Intel Open Source Technology Centre
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


Re: [Intel-gfx] Problems configuring unique xorg.conf file

2010-08-26 Thread Alan W. Irwin

On 2010-08-26 10:47- Nasa wrote:


Hi,


I am trying to configure a rather obscure resolution (800x480) which is the
native resolution for my monitor.  The monitor doesn't provide EDID or DDC
information (it's connected over VGA).  And the default settings by the
driver produces displays that don't fit the monitor properly.  I would like
to construct a xorg.conf file with appropraite vertical refresh rates, 
horizontal
syncs, and/or modelines to work correctly with the monitor.  However, there
doesn't seem a way to turn off the driver defaults for those items. I tried 
options
like NoDDC, UseEDID, etc with no luck.  I also tried using xrandr to change
resolutions after X has started.  The results end up being worse than the
intial problem I was trying to fix (ie: the screen is bigger than the area
available to display it).  I expect my inability to find a suitable solution
is due to my lack of knowledge -- so I hope someone can fill me in to what
I am missing.  Thanks in advance,


Earlier this year before I replaced my long-time Sony monitor with an
LCD, and upgraded from Debian Lenny to Debian testing, the results of
gtf and PreferredMode worked for me. For example, my xorg.conf file
for that monitor had the following lines in the Monitor section

#gtf 1024 768 85
# 1024x768 @ 85.00 Hz (GTF) hsync: 68.60 kHz; pclk: 94.39 MHz
Modeline "1024x768_85.00"  94.39  1024 1088 1200 1376  768 769 772 807
-HSync +Vsync
Option "PreferredMode" "1024x768_85.00"

Of course, intead of using the above example, you will want to run
something like

gtf 800 400 85

from the command line (man gtf), paste the results to your Monitor
section, and consistently update the identification of the mode used
by PreferredMode.

I emphasize the above configuration lines worked for an old version of the
Intel driver (Debian Lenny), and I don't know whether they  would work
for a modern version.  But it is worth a try.

Alan
__
Alan W. Irwin

Astronomical research affiliation with Department of Physics and Astronomy,
University of Victoria (astrowww.phys.uvic.ca).

Programming affiliations with the FreeEOS equation-of-state implementation
for stellar interiors (freeeos.sf.net); PLplot scientific plotting software
package (plplot.org); the libLASi project (unifont.org/lasi); the Loads of
Linux Links project (loll.sf.net); and the Linux Brochure Project
(lbproject.sf.net).
__

Linux-powered Science
__
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


[Intel-gfx] Installation of Intel drivers in Ubuntu 10.04

2010-08-26 Thread Ganesh Waghmode
Hi,

 

I am trying to install intel drivers in my. My System configuration is as
follows:

 

Motherboard : Intel's Mobile 945GME Express Integrated Graphics Controller

Operating System : Ubuntu 10.04

X-Server 1.7.6

Kernel : 2.6.32-21-generic

 

I have installed libdrm-2.4.17 successfully. But when I try to install
xf86-video-intel-2.10.0 It gives the error as follows

/usr/local/include/xf86drm.h:40:17: error: drm.h: No such file or directory.

 

Drm.h is there in /usr/include/drm/

 

Kindly guide me.

 

With Regards,

Ganesh Waghmode

___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


[Intel-gfx] Problems configuring unique xorg.conf file

2010-08-26 Thread Nasa
Hi,


I am trying to configure a rather obscure resolution (800x480) which is the
native resolution for my monitor.  The monitor doesn't provide EDID or DDC 
information (it's connected over VGA).  And the default settings by the 
driver produces displays that don't fit the monitor properly.  I would like
to construct a xorg.conf file with appropraite vertical refresh rates, 
horizontal
syncs, and/or modelines to work correctly with the monitor.  However, there
doesn't seem a way to turn off the driver defaults for those items. I tried 
options
like NoDDC, UseEDID, etc with no luck.  I also tried using xrandr to change 
resolutions after X has started.  The results end up being worse than the 
intial problem I was trying to fix (ie: the screen is bigger than the area
available to display it).  I expect my inability to find a suitable solution
is due to my lack of knowledge -- so I hope someone can fill me in to what 
I am missing.  Thanks in advance,

Nasa


Chipset:  945GM
Model:  MB899 Intel Core Single/Duo Mini-Itx Motherboard 945GM
Monitor:  Lilliput EBY-701
Driver:  intel
OS:  Meego (IVI image)
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


Re: [Intel-gfx] [PATCH] drm/i915: intel_overlay_print_error_state() only needed for debugfs (v2)

2010-08-26 Thread Chris Wilson
On Thu, 26 Aug 2010 01:52:16 +0200, Tobias Doerffel  
wrote:
> The function intel_overlay_print_error_state() is called from within
> i915_error_state() in i915_debufs.c which is only built if
> CONFIG_DEBUG_FS is set. When building without this option, seq_printf()
> is not declared in intel_overlay.c and the file fails to compile.
> Furthermore the whole function is superfluous in this case. Therefore
> only build it if CONFIG_DEBUG_FS is set.
> 
> Signed-off-by: Tobias Doerffel 
I've already posted a better patch to remove the compile warnings as well.

The simple inclusion of seq_file.h is the best patch to go into mainline
atm, with the more complete patch coming in via drm-intel-next.

-- 
Chris Wilson, Intel Open Source Technology Centre
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


Re: [Intel-gfx] [PATCH] drm/i915: intel_overlay_print_error_state() only needed for debugfs (v2)

2010-08-26 Thread Tobias Doerffel
Hi,

Am Donnerstag, 26. August 2010, um 08:41:13 schrieb Andre Müller:
> On 08/26/10 01:52, Tobias Doerffel wrote:
> > The function intel_overlay_print_error_state() is called from within
> > i915_error_state() in i915_debufs.c which is only built if
> > CONFIG_DEBUG_FS is set. When building without this option, seq_printf()
> > is not declared in intel_overlay.c and the file fails to compile.
> > Furthermore the whole function is superfluous in this case. Therefore
> > only build it if CONFIG_DEBUG_FS is set.
> > 
> > Signed-off-by: Tobias Doerffel 
> 
> You may want to refer to
> https://bugzilla.kernel.org/show_bug.cgi?id=16811
> in the description.
> Andrew Morton has a different fix for the failure
> applied to -mm, which just adds an (unconditional)
> #include .

Yes, that works as well however the affected function is not required at all 
without debugfs so we can leave it out completely (like done with all the 
other functions in i915_debufs.c) and save some bytes.

Regards

Toby

> Regards,
> Andre
> 
> > ---
> > 
> >  drivers/gpu/drm/i915/intel_overlay.c |2 ++
> >  1 files changed, 2 insertions(+), 0 deletions(-)
> > 
> > diff --git a/drivers/gpu/drm/i915/intel_overlay.c
> > b/drivers/gpu/drm/i915/intel_overlay.c index 4f00390..74feb89 100644
> > --- a/drivers/gpu/drm/i915/intel_overlay.c
> > +++ b/drivers/gpu/drm/i915/intel_overlay.c
> > 
> > @@ -1461,6 +1461,7 @@ err:
> > return NULL;
> >  
> >  }
> > 
> > +#ifdef CONFIG_DEBUG_FS
> > 
> >  void
> >  intel_overlay_print_error_state(struct seq_file *m, struct
> >  intel_overlay_error_state *error) {
> > 
> > @@ -1513,3 +1514,4 @@ intel_overlay_print_error_state(struct seq_file *m,
> > struct intel_overlay_error_s
> > 
> > P(UVSCALEV);
> >  
> >  #undef P
> >  }
> > 
> > +#endif /* CONFIG_DEBUG_FS */



signature.asc
Description: This is a digitally signed message part.
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


Re: [Intel-gfx] 945 with SR doesn't need manual enable/disable

2010-08-26 Thread Li Peng
Hello, Alexander:

I have met system hang issue when directly enable SR on 945, so I
enable/disable SR depends on h/w idle status.
If you don't see hang anymore. Then it should be fixed in other commits
(probably as you said, 944001201ca0196bcdb088129e5866a9f379d08c)

Please fix the patch format in your email client. I will give it a test
on my side.

Thanks
Peng

On Mon, 2010-08-23 at 17:34 -0400, Alexander Lam wrote:

> Hi all,
> 
> Using 2.6.35.2, I changed 945's self refresh to work without the need
> for the driver to enable/disable self refresh manually based on the
> idle state of the gpu.
> 
> Something must have been fixed in the driver between the initial
> implementation of 945 self refresh and now.
> (maybe 944001201ca0196bcdb088129e5866a9f379d08c: drm/i915: enable low
> power render writes on GEN3 hardware?)
> 
> patch (which probably doesn't cover all cases concerning if SR should
> be enabled/disabled, not to mention doing things in the wrong order or
> in the wrong place):
> 
> diff -uNrp a/drivers/gpu/drm/i915/intel_display.c
> b/drivers/gpu/drm/i915/intel_display.c
> --- a/drivers/gpu/drm/i915/intel_display.c  2010-08-01
> 18:11:14.0 -0400
> +++ b/drivers/gpu/drm/i915/intel_display.c  2010-08-22
> 16:52:24.16894 -0400
> @@ -3016,7 +3016,7 @@ static void i9xx_update_wm(struct drm_de
>int planea_wm, planeb_wm;
>struct intel_watermark_params planea_params, planeb_params;
>unsigned long line_time_us;
> -   int sr_clock, sr_entries = 0;
> +   int sr_clock, sr_entries = 0, sr_enabled = 0;
> 
>/* Create copies of the base settings for each pipe */
>if (IS_I965GM(dev) || IS_I945GM(dev))
> @@ -3063,8 +3063,11 @@ static void i9xx_update_wm(struct drm_de
>if (srwm < 0)
>srwm = 1;
> 
> -   if (IS_I945G(dev) || IS_I945GM(dev))
> +   if (IS_I945G(dev) || IS_I945GM(dev)){
>I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_FIFO_MASK |
> (srwm & 0xff));
> +   DRM_DEBUG_DRIVER("enable memory self refresh on 
> 945\n");
> +   sr_enabled = 1;
> +   }
>else if (IS_I915GM(dev)) {
>/* 915M has a smaller SRWM field */
>I915_WRITE(FW_BLC_SELF, srwm & 0x3f);
> @@ -3073,6 +3076,8 @@ static void i9xx_update_wm(struct drm_de
>} else {
>/* Turn off self refresh if both pipes are enabled */
>if (IS_I945G(dev) || IS_I945GM(dev)) {
> +   DRM_DEBUG_DRIVER("disable memory self refresh
> on 945\n");
> +   sr_enabled = 0;
>I915_WRITE(FW_BLC_SELF, I915_READ(FW_BLC_SELF)
>   & ~FW_BLC_SELF_EN);
>} else if (IS_I915GM(dev)) {
> @@ -3092,6 +3097,8 @@ static void i9xx_update_wm(struct drm_de
> 
>I915_WRITE(FW_BLC, fwater_lo);
>I915_WRITE(FW_BLC2, fwater_hi);
> +   if (sr_enabled)
> +   I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN_MASK | FW_BLC_SELF_EN);
>  }
> 
>  static void i830_update_wm(struct drm_device *dev, int planea_clock,
> int unused,
> @@ -4506,7 +4513,6 @@ static void intel_idle_update(struct wor
>struct drm_device *dev = dev_priv->dev;
>struct drm_crtc *crtc;
>struct intel_crtc *intel_crtc;
> -   int enabled = 0;
> 
>if (!i915_powersave)
>return;
> @@ -4520,16 +4526,11 @@ static void intel_idle_update(struct wor
>if (!crtc->fb)
>continue;
> 
> -   enabled++;
>intel_crtc = to_intel_crtc(crtc);
>if (!intel_crtc->busy)
>intel_decrease_pllclock(crtc);
>}
> 
> -   if ((enabled == 1) && (IS_I945G(dev) || IS_I945GM(dev))) {
> -   DRM_DEBUG_DRIVER("enable memory self refresh on 945\n");
> -   I915_WRITE(FW_BLC_SELF, FW_BLC_SELF_EN_MASK | FW_BLC_SELF_EN);
> -   }
> 
>mutex_unlock(&dev->struct_mutex);
>  }
> @@ -4554,17 +4555,9 @@ void intel_mark_busy(struct drm_device *
>if (!drm_core_check_feature(dev, DRIVER_MODESET))
>return;
> 
> -   if (!dev_priv->busy) {
> -   if (IS_I945G(dev) || IS_I945GM(dev)) {
> -   u32 fw_blc_self;
> -
> -   DRM_DEBUG_DRIVER("disable memory self refresh
> on 945\n");
> -   fw_blc_self = I915_READ(FW_BLC_SELF);
> -   fw_blc_self &= ~FW_BLC_SELF_EN;
> -   I915_WRITE(FW_BLC_SELF, fw_blc_self |
> FW_BLC_SELF_EN_MASK);
> -   }
> +   if (!dev_priv->busy)
>dev_priv->busy = true;
> -   } else
> +   else
>mod_timer(&dev_priv->idle_timer, jiffies +
>  msecs_to_jiffies(GPU_IDLE_TIMEOUT));
> 
> @@ -4576,14 +4569,6 @@ void intel_mark_busy(struct drm_device *
>   

Re: [Intel-gfx] 2.6.36-rc2 regression: Wrong screen resolution (1024x768 instead of 1680x1050)

2010-08-26 Thread Tino Keitel
On Thu, Aug 26, 2010 at 09:30:39 +0200, Tino Keitel wrote:

[...]

> Xorg log is attached.

It wasn't. Not it is attached.

Regards,
Tino


Xorg.0.log.gz
Description: Binary data
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx


[Intel-gfx] 2.6.36-rc2 regression: Wrong screen resolution (1024x768 instead of 1680x1050)

2010-08-26 Thread Tino Keitel
Hi,

booting 2.6.36-rc2-git4 (commit
d4348c678977c7093438bbbf2067c49396ae941b) results in a screen
resolution of 1024x768 instead of 1680x1050.  It works fine with
2.6.35.2.

The framebuffer console uses the correct resolution, but only uses a
part of the screen for text output. Xorg output looks distorted.

Xorg log is attached.

$ xrandr 
Screen 0: minimum 320 x 200, current 1024 x 768, maximum 4096 x 4096
VGA1 disconnected (normal left inverted right x axis y axis)
DVI1 connected 1024x768+0+0 (normal left inverted right x axis y axis)
454mm x 284mm
   1680x1050  59.9 +
   1280x1024  60.0  
   1280x960   60.0  
   1024x768   60.0* 
   800x60060.3  
   640x48060.0  
   720x40070.1  
TV1 connected 1024x768+0+0 (normal left inverted right x axis y axis)
0mm x 0mm
   848x48030.0 +
   640x48030.0 +
   1024x768   30.0* 
   800x60030.0  

This is a Mac mini Core 2 Duo with Intel i945 graphics.

Regards,
Tino
___
Intel-gfx mailing list
Intel-gfx@lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/intel-gfx