This GPL code has to be removed as you can't just put GPL code into a BSD 
project. All code has to be GPL to link it in. You can't just say, 'this piece 
is GPL'.

dan



> Enlightenment CVS committal
> 
> Author  : moom16
> Project : e17
> Module  : libs/emotion
> 
> Dir     : e17/libs/emotion/src/modules/xine
> 
> 
> Modified Files:
>       emotion_xine_vo_out.c 
> 
> 
> Log Message:
> * Add support of yuy2 to emotion_xine module. So now, we can play wmv 
> movies!
> 
> I had to make some module API changes:
>  - rename yuv_size_get to video_data_size_get
>  - add the function "Emotion_Format (*format_get) (void *ef);" in 
> Emotion_Video_Module. 
> This function should return the format used by the video (EMOTION_YV12, 
> EMOTION_YUY2 
> or EMOTION_BGRA).
> 
> I use a yuy2->bgra converter since evas doesn't support YUY2. This converter 
> should 
> be rewrite since it's really not optimized and since it is under GPLv2 
> (emotion 
> is under BSD). I added an explicit comment above the function for that, but 
> it'd 
> be better to use a converter under BSD.
> If it's really incompatible with emotion license, I will remove it.
> 
> ===================================================================
> RCS file: 
> /cvsroot/enlightenment/e17/libs/emotion/src/modules/xine/emotion_xine_vo_out.c,v
> retrieving revision 1.6
> retrieving revision 1.7
> diff -u -3 -r1.6 -r1.7
> --- emotion_xine_vo_out.c     5 Apr 2005 02:21:06 -0000       1.6
> +++ emotion_xine_vo_out.c     8 Jul 2005 19:56:48 -0000       1.7
> @@ -94,6 +94,8 @@
>  static void         _emotion_overlay_mem_blend_8   (uint8_t *mem, uint8_t 
> val, 
> uint8_t o, size_t sz);
>  static void         _emotion_overlay_blend_yuv     (uint8_t *dst_base[3], 
> vo_overlay_t 
> * img_overl, int dst_width, int dst_height, int dst_pitches[3]);
>  
> +static void         _emotion_yuy2_to_bgra32        (int width, int height, 
> unsigned 
> char *src, unsigned char *dst);
> +
>  /***************************************************************************/
>  static vo_info_t _emotion_info = 
>  {
> @@ -215,7 +217,7 @@
>     
>     dv = (Emotion_Driver *)vo_driver;
>  //   printf("emotion: _emotion_capabilities_get()\n");
> -   return VO_CAP_YV12;
> +   return VO_CAP_YV12 | VO_CAP_YUY2;
>  }
>  
>  /***************************************************************************/
> @@ -358,6 +360,7 @@
>              {
>                 int y_size, uv_size;
>                 
> +        fr->frame.format = EMOTION_YV12;
>                 fr->vo_frame.pitches[0] = 8 * ((width + 7) / 8);
>                 fr->vo_frame.pitches[1] = 8 * ((width + 15) / 16);
>                 fr->vo_frame.pitches[2] = 8 * ((width + 15) / 16);
> @@ -374,19 +377,49 @@
>                 fr->frame.y = fr->vo_frame.base[0];
>                 fr->frame.u = fr->vo_frame.base[1];
>                 fr->frame.v = fr->vo_frame.base[2];
> +        fr->frame.bgra_data = NULL;
>                 fr->frame.y_stride = fr->vo_frame.pitches[0];
>                 fr->frame.u_stride = fr->vo_frame.pitches[1];
>                 fr->frame.v_stride = fr->vo_frame.pitches[2];
>                 fr->frame.obj = dv->ev->obj;
>              }
>            break;
> +      case XINE_IMGFMT_YUY2: 
> +            {
> +               int y_size, uv_size;
> +               
> +        fr->frame.format = EMOTION_BGRA;
> +               fr->vo_frame.pitches[0] = 8 * ((width + 3) / 4);
> +               fr->vo_frame.pitches[1] = 0;
> +               fr->vo_frame.pitches[2] = 0;
> +               
> +               fr->vo_frame.base[0] = malloc(fr->vo_frame.pitches[0] * 
> height);
> +               fr->vo_frame.base[1] = NULL;
> +               fr->vo_frame.base[2] = NULL;
> +        
> +               fr->frame.w = fr->width;
> +               fr->frame.h = fr->height;
> +               fr->frame.ratio = fr->vo_frame.ratio;
> +               fr->frame.y = NULL;
> +               fr->frame.u = NULL;
> +               fr->frame.v = NULL;
> +        fr->frame.bgra_data = malloc(fr->width * fr->height * 4);
> +               fr->frame.y_stride = 0;
> +               fr->frame.u_stride = 0;
> +               fr->frame.v_stride = 0;
> +               fr->frame.obj = dv->ev->obj;
> +            }
> +          break;
>          default:
>            break;
>         }
>       if (((format == XINE_IMGFMT_YV12)
>            && ((fr->vo_frame.base[0] == NULL)
>                || (fr->vo_frame.base[1] == NULL)
> -              || (fr->vo_frame.base[2] == NULL))))
> +              || (fr->vo_frame.base[2] == NULL)))
> +      || ((format == XINE_IMGFMT_YUY2)
> +          && ((fr->vo_frame.base[0] == NULL)
> +       || (fr->frame.bgra_data == NULL))))
>         {
>            _emotion_frame_data_free(fr);
>         }
> @@ -408,6 +441,11 @@
>       {
>       void *buf;
>       int ret;
> +
> +   if (fr->format == XINE_IMGFMT_YUY2)
> +     {
> +   _emotion_yuy2_to_bgra32(fr->width, fr->height, fr->vo_frame.base[0], 
> fr->frame.bgra_data);
> +     }
>       
>       buf = &(fr->frame);
>       fr->frame.timestamp = (double)fr->vo_frame.vpts / 90000.0;
> @@ -444,6 +482,11 @@
>       fr->frame.u = fr->vo_frame.base[1];
>       fr->frame.v = fr->vo_frame.base[2];
>       }
> +   if (fr->frame.bgra_data)
> +     {
> +   free(fr->frame.bgra_data);
> +   fr->frame.bgra_data = NULL;
> +     }
>  }
>  
>  static void
> @@ -672,3 +715,65 @@
>        }
>     }
>  }
> +
> +/*MoOm:
> +* yuy2 to bgra converter taken from vgrabbj (http://vgrabbj.gecius.de)
> +* This code is under GPLv2. Copyright Jens Gecius.
> +* If it causes problem with emotion BSD license, tell me, I'll remove it!
> +* TODO: Really need to improve this converter! 
> +*/ 
> +#define LIMIT(x)  ((x) > 0xffff ? 0xff : ((x) <= 0xff ? 0 : ((x) >> 8 )))
> +
> +static void
> +_emotion_yuy2_to_bgra32(int width, int height, unsigned char *src, unsigned 
> char 
> *dst)
> +{
> +   int line, col, linewidth;
> +   int y, yy;
> +   int u, v;
> +   int vr, ug, vg, ub;
> +   int r, g, b;
> +   unsigned char *py, *pu, *pv;
> +
> +   linewidth = width - (width >> 1);
> +   py = src;
> +   pu = src + 1;
> +   pv = src + 3;
> +
> +   y = *py;
> +   yy = y << 8;
> +   u = *pu - 128;
> +   ug =   88 * u;
> +   ub =  454 * u;
> +   v = *pv - 128;
> +   vg =  183 * v;
> +   vr =  359 * v;
> +
> +   for (line = 0; line < height; line++)
> +     {
> +       for (col = 0; col < width; col++)
> +         {
> +           r = LIMIT(yy + vr);
> +           g = LIMIT(yy - ug - vg);
> +           b = LIMIT(yy + ub);
> +           *dst++ = b;
> +           *dst++ = g;
> +           *dst++ = r;
> +           *dst++ = 0;
> +         
> +           py += 2;
> +           y = *py;
> +           yy = y << 8;
> +           if ((col & 1) == 1)
> +             {
> +               pu += 4; //skip yvy every second y
> +               pv += 4; //skip yuy every second y
> +             }
> +           u = *pu - 128;
> +           ug = 88 * u;
> +           ub = 454 * u;
> +           v = *pv - 128;
> +           vg = 183 * v;
> +           vr = 359 * v;
> +         }
> +     }
> +}
> 
> 
> 
> 
> -------------------------------------------------------
> This SF.Net email is sponsored by the 'Do More With Dual!' webinar happening
> July 14 at 8am PDT/11am EDT. We invite you to explore the latest in dual
> core and dual graphics technology at this free one hour event hosted by HP,
> AMD, and NVIDIA.  To register visit http://www.hp.com/go/dualwebinar
> _______________________________________________
> enlightenment-cvs mailing list
> enlightenment-cvs@lists.sourceforge.net
> https://lists.sourceforge.net/lists/listinfo/enlightenment-cvs
> 






-------------------------------------------------------
This SF.Net email is sponsored by the 'Do More With Dual!' webinar happening
July 14 at 8am PDT/11am EDT. We invite you to explore the latest in dual
core and dual graphics technology at this free one hour event hosted by HP,
AMD, and NVIDIA.  To register visit http://www.hp.com/go/dualwebinar
_______________________________________________
enlightenment-devel mailing list
enlightenment-devel@lists.sourceforge.net
https://lists.sourceforge.net/lists/listinfo/enlightenment-devel

Reply via email to