Index: Makefile =================================================================== RCS file: /home/pcvs/ports/multimedia/libxine/Makefile,v retrieving revision 1.165 diff -u -p -r1.165 Makefile --- Makefile 16 Jan 2011 09:08:48 -0000 1.165 +++ Makefile 17 Mar 2011 19:47:07 -0000 @@ -7,7 +7,7 @@ PORTNAME= xine PORTVERSION= 1.1.19 -PORTREVISION= 2 +PORTREVISION= 3 CATEGORIES= multimedia ipv6 MASTER_SITES= SF/${PORTNAME}/${PORTNAME}-lib/${PORTVERSION} PKGNAMEPREFIX= lib @@ -80,6 +80,7 @@ OPTIONS= ARTS "Enable aRts support" off IMAGEMAGICK "Enable ImageMagick support" off \ GTK2 "Enable gdkpixbuf support" off \ WAVPACK "Enable WavPack support" off \ + VAAPI "Enable VAAPI support" on \ VDPAU "Enable VDPAU support" on MAN1= xine-config.1 xine-list-1.1.1 @@ -208,6 +209,10 @@ PLIST_SUB+= WITH_WAVPACK="@comment " CONFIGURE_ARGS+= --without-wavpack .endif +.if defined(WITH_VAAPI) +LIB_DEPENDS+= va.1:${PORTSDIR}/multimedia/libva +.endif + .if defined(WITH_VDPAU) PATCHFILES+= xine-vdpau.bz2 USE_AUTOTOOLS= autoconf @@ -231,6 +236,9 @@ CONFIGURE_ARGS+= --disable-vidix .endif post-patch: +.if defined(WITH_VAAPI) + ${PATCH} ${PATCH_DIST_ARGS} <${FILESDIR}/ffmpeg-vaapi_xine-lib-1.1.19-initerrorhack.diff +.endif .if defined(WITH_VDPAU) for i in `${LS} ${FILESDIR}/extrapatch-vdpau*`; do \ ${PATCH} ${PATCH_DIST_ARGS} <$$i ;\ Index: files/extrapatch-vdpau-src-video_out-video_out_vdpau.c @@ -0,0 +1,91 @@ +--- a/src/video_out/video_out_vdpau.c ++++ b/src/video_out/video_out_vdpau.c +@@ -53,6 +53,8 @@ + + #define NUM_FRAMES_BACK 1 + ++#define LOCKDISPLAY /*define this if you have a buggy libX11/xcb*/ ++ + #define DEINT_BOB 1 + #define DEINT_HALF_TEMPORAL 2 + #define DEINT_HALF_TEMPORAL_SPATIAL 3 +@@ -167,12 +169,30 @@ static void vdpau_reinit( vo_driver_t *t + static VdpVideoSurfaceCreate *orig_vdp_video_surface_create; + static VdpVideoSurfaceDestroy *orig_vdp_video_surface_destroy; + ++static VdpVideoSurfaceCreate *orig_vdp_output_surface_create; ++static VdpVideoSurfaceDestroy *orig_vdp_output_surface_destroy; ++ ++static VdpVideoSurfacePutBitsYCbCr *orig_vdp_video_surface_putbits_ycbcr; ++ + static VdpDecoderCreate *orig_vdp_decoder_create; + static VdpDecoderDestroy *orig_vdp_decoder_destroy; + static VdpDecoderRender *orig_vdp_decoder_render; + + static Display *guarded_display; + ++static VdpStatus guarded_vdp_video_surface_putbits_ycbcr(VdpVideoSurface surface, VdpYCbCrFormat source_ycbcr_format, void const *const *source_data, uint32_t const *source_pitches) ++{ ++ VdpStatus r; ++#ifdef LOCKDISPLAY ++ XLockDisplay(guarded_display); ++#endif ++ r = orig_vdp_video_surface_putbits_ycbcr(surface, source_ycbcr_format, source_data, source_pitches); ++#ifdef LOCKDISPLAY ++ XUnlockDisplay(guarded_display); ++#endif ++ return r; ++} ++ + static VdpStatus guarded_vdp_video_surface_create(VdpDevice device, VdpChromaType chroma_type, uint32_t width, uint32_t height,VdpVideoSurface *surface) + { + VdpStatus r; +@@ -195,6 +215,28 @@ static VdpStatus guarded_vdp_video_surfa + return r; + } + ++static VdpStatus guarded_vdp_output_surface_create(VdpDevice device, VdpChromaType chroma_type, uint32_t width, uint32_t height,VdpVideoSurface *surface) ++{ ++ VdpStatus r; ++#ifdef LOCKDISPLAY ++ XLockDisplay(guarded_display); ++#endif ++ r = orig_vdp_output_surface_create(device, chroma_type, width, height, surface); ++#ifdef LOCKDISPLAY ++ XUnlockDisplay(guarded_display); ++#endif ++ return r; ++} ++ ++static VdpStatus guarded_vdp_output_surface_destroy(VdpVideoSurface surface) ++{ ++ VdpStatus r; ++ XLockDisplay(guarded_display); ++ r = orig_vdp_output_surface_destroy(surface); ++ XUnlockDisplay(guarded_display); ++ return r; ++} ++ + static VdpStatus guarded_vdp_decoder_create(VdpDevice device, VdpDecoderProfile profile, uint32_t width, uint32_t height, uint32_t max_references, VdpDecoder *decoder) + { + VdpStatus r; +@@ -2324,16 +2366,16 @@ static vo_driver_t *vdpau_open_plugin (v + st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_VIDEO_SURFACE_DESTROY , (void*)&orig_vdp_video_surface_destroy ); vdp_video_surface_destroy = guarded_vdp_video_surface_destroy; + if ( vdpau_init_error( st, "Can't get VIDEO_SURFACE_DESTROY proc address !!", &this->vo_driver, 1 ) ) + return NULL; +- st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR , (void*)&vdp_video_surface_putbits_ycbcr ); ++ st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR , (void*)&orig_vdp_video_surface_putbits_ycbcr ); vdp_video_surface_putbits_ycbcr = guarded_vdp_video_surface_putbits_ycbcr; + if ( vdpau_init_error( st, "Can't get VIDEO_SURFACE_PUT_BITS_Y_CB_CR proc address !!", &this->vo_driver, 1 ) ) + return NULL; + st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR , (void*)&vdp_video_surface_getbits_ycbcr ); + if ( vdpau_init_error( st, "Can't get VIDEO_SURFACE_GET_BITS_Y_CB_CR proc address !!", &this->vo_driver, 1 ) ) + return NULL; +- st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_OUTPUT_SURFACE_CREATE , (void*)&vdp_output_surface_create ); ++ st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_OUTPUT_SURFACE_CREATE , (void*)&orig_vdp_output_surface_create ); vdp_output_surface_create = guarded_vdp_output_surface_create; + if ( vdpau_init_error( st, "Can't get OUTPUT_SURFACE_CREATE proc address !!", &this->vo_driver, 1 ) ) + return NULL; +- st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_OUTPUT_SURFACE_DESTROY , (void*)&vdp_output_surface_destroy ); ++ st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_OUTPUT_SURFACE_DESTROY , (void*)&orig_vdp_output_surface_destroy ); vdp_output_surface_destroy = guarded_vdp_output_surface_destroy; + if ( vdpau_init_error( st, "Can't get OUTPUT_SURFACE_DESTROY proc address !!", &this->vo_driver, 1 ) ) + return NULL; + st = vdp_get_proc_address( vdp_device, VDP_FUNC_ID_OUTPUT_SURFACE_RENDER_BITMAP_SURFACE , (void*)&vdp_output_surface_render_bitmap_surface ); Index: files/ffmpeg-vaapi_xine-lib-1.1.19-initerrorhack.diff @@ -0,0 +1,871 @@ +diff -Naur xine-lib-1.1.19.orig/src/combined/ffmpeg/ff_video_decoder.c xine-lib-1.1.19/src/combined/ffmpeg/ff_video_decoder.c +--- xine-lib-1.1.19.orig/src/combined/ffmpeg/ff_video_decoder.c ++++ xine-lib-1.1.19/src/combined/ffmpeg/ff_video_decoder.c +@@ -51,6 +51,21 @@ + # include + #endif + ++#include ++#include ++//#include ++ ++typedef struct ff_va_surfaces_s ff_va_surfaces_t; ++ ++struct ff_va_surfaces_s ++{ ++ int count; ++ VASurfaceID **free; ++ VASurfaceID **used; ++}; ++ ++#define NUM_SURFACES 21 ++ + #define VIDEOBUFSIZE (128*1024) + #define SLICE_BUFFER_SIZE (1194*1024) + +@@ -73,6 +88,7 @@ typedef struct ff_video_class_s { + int thread_count; + int8_t skip_loop_filter_enum; + int8_t choose_speed_over_accuracy; ++ int enable_vaapi; + + xine_t *xine; + } ff_video_class_t; +@@ -111,6 +127,7 @@ struct ff_video_decoder_s { + int slice_offset_size; + + AVFrame *av_frame; ++ //AVPacket av_pkt; + AVCodecContext *context; + AVCodec *codec; + +@@ -138,8 +155,31 @@ struct ff_video_decoder_s { + #ifdef LOG + enum PixelFormat debug_fmt; + #endif ++ ++}; ++ ++typedef struct ff_vaapi_context_s ff_vaapi_context_t; ++ ++struct ff_vaapi_context_s { ++ VAImage va_image; ++ Display *display; ++ VADisplay *va_display; ++ VAImageFormat *va_ifmts; ++ uint8_t *va_image_data; ++ VAContextID va_context_id; ++ VAConfigID va_config_id; ++ VASurfaceID *va_surface_id; ++ struct ff_va_surfaces_s va_surfaces; ++ int width; ++ int height; ++ int va_profile; ++ struct vaapi_context *vaapi_context; ++ int use_vaapi; + }; + ++static struct ff_vaapi_context_s *va_context; ++ ++static VAStatus init_vaapi(struct ff_vaapi_context_s *va_context, int va_profile, int width, int height); + + static void set_stream_info(ff_video_decoder_t *this) { + _x_stream_info_set(this->stream, XINE_STREAM_INFO_VIDEO_WIDTH, this->bih.biWidth); +@@ -147,7 +187,70 @@ static void set_stream_info(ff_video_dec + _x_stream_info_set(this->stream, XINE_STREAM_INFO_VIDEO_RATIO, this->aspect_ratio * 10000); + } + ++/* ++ * XXX Error on init_vaapi() didn't cause fall back to standard rendering, ++ * so do a crude test on plugin open. ++ */ ++#define INIT_ERROR_HACK ++ + #ifdef ENABLE_DIRECT_RENDERING ++ ++static void draw_slice(struct AVCodecContext *context, const AVFrame *src, int offset[4], int y, int type, int height) { ++ uint8_t *source[4]= {src->data[0] + offset[0], src->data[1] + offset[1], src->data[2] + offset[2], src->data[3] + offset[3]}; ++ int strides[4] = {src->linesize[0], src->linesize[1], src->linesize[2]}; ++ ++ if (height < 0) { ++ int i; ++ height = -height; ++ y -= height; ++ for (i = 0; i < 4; i++) { ++ strides[i] = -strides[i]; ++ source[i] -= strides[i]; ++ } ++ } ++} ++ ++static VAProfile* find_profile(VAProfile* p, int np, int codec) ++{ ++ int i; ++ ++ for(i = 0; i < np; i++) ++ { ++ if(p[i] == codec) ++ return &p[i]; ++ } ++ ++ return NULL; ++} ++ ++int get_profile(int codec_id) { ++ int profile; ++ ++ switch (codec_id) { ++ case CODEC_ID_MPEG2VIDEO: ++ profile = VAProfileMPEG2Main; ++ break; ++ case CODEC_ID_MPEG4: ++ case CODEC_ID_H263: ++ profile = VAProfileMPEG4AdvancedSimple; ++ break; ++ case CODEC_ID_H264: ++ profile = VAProfileH264High; ++ break; ++ case CODEC_ID_WMV3: ++ profile = VAProfileVC1Main; ++ break; ++ case CODEC_ID_VC1: ++ profile = VAProfileVC1Advanced; ++ break; ++ default: ++ profile = -1; ++ break; ++ } ++ ++ return profile; ++} ++ + /* called from ffmpeg to do direct rendering method 1 */ + static int get_buffer(AVCodecContext *context, AVFrame *av_frame){ + ff_video_decoder_t *this = (ff_video_decoder_t *)context->opaque; +@@ -169,6 +272,43 @@ static int get_buffer(AVCodecContext *co + + avcodec_align_dimensions(context, &width, &height); + ++ if( va_context->use_vaapi ) { ++ int i = 0; ++ struct ff_va_surfaces_s *va_surfaces = &va_context->va_surfaces; ++ ++ for(i = 0; i < va_surfaces->count; i++) ++ { ++ //printf("srfc #%d %p\n",i,va_srfcs->srfc_free[i]); ++ if(va_surfaces->free[i]) ++ { ++ va_surfaces->used[i] = va_surfaces->free[i]; ++ va_surfaces->free[i] = NULL; ++ break; ++ } ++ } ++ if(i == va_surfaces->count) ++ { ++ printf("ERROR get surface\n"); ++ return -1; ++ } ++ ++ ++ av_frame->type = FF_BUFFER_TYPE_USER; ++ av_frame->age = 256*256*256*64; // FIXME FIXME from ffmpeg ++ av_frame->data[0] = (void*)va_surfaces->used[i]; ++ av_frame->data[1] = NULL; ++ av_frame->data[2] = NULL; ++ av_frame->data[3] = (void*)(size_t)*va_surfaces->used[i]; ++ av_frame->linesize[0] = 0; ++ av_frame->linesize[1] = 0; ++ av_frame->linesize[2] = 0; ++ av_frame->linesize[3] = 0; ++ ++ this->is_direct_rendering_disabled = 1; ++ ++ return 0; ++ } ++ + if( this->context->pix_fmt != PIX_FMT_YUV420P && this->context->pix_fmt != PIX_FMT_YUVJ420P ) { + if (!this->is_direct_rendering_disabled) { + xprintf(this->stream->xine, XINE_VERBOSITY_LOG, +@@ -238,10 +378,33 @@ static int get_buffer(AVCodecContext *co + static void release_buffer(struct AVCodecContext *context, AVFrame *av_frame){ + ff_video_decoder_t *this = (ff_video_decoder_t *)context->opaque; + ++ if( va_context->use_vaapi ) { ++ struct ff_va_surfaces_s *va_surfaces = &va_context->va_surfaces; ++ VASurfaceID va_surface_id = (VASurfaceID)(size_t)av_frame->data[3]; ++ int i = 0; ++ ++ for(i = 0; i < va_surfaces->count; i++) ++ { ++ //printf("srfc #%d %p 0x%08X\n",i,va_srfcs->srfc_used[i], va_srfcs->srfc_used[i] ? *va_srfcs->srfc_used[i] : -1); ++ if(va_surfaces->used[i] && (*va_surfaces->used[i] == va_surface_id)) ++ { ++ va_surfaces->free[i] = va_surfaces->used[i]; ++ va_surfaces->used[i] = NULL; ++ break; ++ } ++ } ++ ++ av_frame->data[0] = NULL; ++ av_frame->data[1] = NULL; ++ av_frame->data[2] = NULL; ++ av_frame->data[3] = NULL; ++ return; ++ } ++ + if (av_frame->type == FF_BUFFER_TYPE_USER) { + if ( av_frame->opaque ) { +- vo_frame_t *img = (vo_frame_t *)av_frame->opaque; +- ++ vo_frame_t *img = (vo_frame_t *)av_frame->opaque; ++ + img->free(img); + } + +@@ -283,13 +446,272 @@ static const int skip_loop_filter_enum_v + AVDISCARD_ALL + }; + ++static const char *VAProfile2string(VAProfile profile) ++{ ++ switch(profile) { ++#define PROFILE(profile) \ ++ case VAProfile##profile: return "VAProfile" #profile ++ PROFILE(MPEG2Simple); ++ PROFILE(MPEG2Main); ++ PROFILE(MPEG4Simple); ++ PROFILE(MPEG4AdvancedSimple); ++ PROFILE(MPEG4Main); ++ PROFILE(H264Baseline); ++ PROFILE(H264Main); ++ PROFILE(H264High); ++ PROFILE(VC1Simple); ++ PROFILE(VC1Main); ++ PROFILE(VC1Advanced); ++#undef PROFILE ++ default: break; ++ } ++ return ""; ++} ++ ++static const char *VAEntrypoint2string(VAEntrypoint entrypoint) ++{ ++ switch(entrypoint) ++ { ++#define ENTRYPOINT(entrypoint) \ ++ case VAEntrypoint##entrypoint: return "VAEntrypoint" #entrypoint ++ ENTRYPOINT(VLD); ++ ENTRYPOINT(IZZ); ++ ENTRYPOINT(IDCT); ++ ENTRYPOINT(MoComp); ++ ENTRYPOINT(Deblocking); ++#undef ENTRYPOINT ++ default: break; ++ } ++ return ""; ++} ++ ++static enum PixelFormat get_format(struct AVCodecContext *context, const enum PixelFormat *fmt) ++{ ++ int i, profile; ++ ++ for (i = 0; fmt[i] != PIX_FMT_NONE; i++) { ++ if (fmt[i] != PIX_FMT_VAAPI_VLD) ++ continue; ++ ++ profile = get_profile(context->codec_id); ++ ++ if (profile >= 0) { ++ VAStatus status; ++ ++ status = init_vaapi(va_context, profile, context->width, context->height); ++ ++ if( status == VA_STATUS_SUCCESS ) { ++ ++ //context->draw_horiz_band = draw_slice; ++ context->draw_horiz_band = NULL; ++ context->slice_flags = SLICE_FLAG_CODED_ORDER | SLICE_FLAG_ALLOW_FIELD; ++ context->dsp_mask = FF_MM_FORCE | FF_MM_MMX | FF_MM_MMXEXT | FF_MM_SSE; ++ ++ va_context->vaapi_context->config_id = va_context->va_config_id; ++ va_context->vaapi_context->context_id = va_context->va_context_id; ++ context->hwaccel_context = va_context->vaapi_context; ++ printf("init vaapi successfully\n"); ++ va_context->use_vaapi = 1; ++ return fmt[i]; ++ } else { ++ va_context->use_vaapi = 0; ++ printf("vaapi init error\n"); ++ } ++ } else { ++ va_context->use_vaapi = 0; ++ } ++ } ++ return PIX_FMT_NONE; ++} ++ ++static void close_vaapi(ff_vaapi_context_t *va_context) { ++ if (va_context->use_vaapi == 0); ++ return; ++ ++ if(va_context->va_context_id) { ++ if(va_context->va_image.image_id != VA_INVALID_ID ) { ++ vaDestroyImage(va_context->va_display, va_context->va_image.image_id ); ++ } ++ if(va_context->va_context_id != VA_INVALID_ID) { ++ vaDestroyContext(va_context->va_display, va_context->va_context_id); ++ } ++ ++ int i = 0; ++ for( i = 0; i < va_context->va_surfaces.count; i++ ) { ++ if( va_context->va_surface_id[i] != VA_INVALID_SURFACE) { ++ vaDestroySurfaces(va_context->va_display, &va_context->va_surface_id[i], 1); ++ } ++ } ++ ++ if(va_context->va_config_id) ++ vaDestroyConfig(va_context->va_display, va_context->va_config_id); ++ ++ vaTerminate(va_context->va_display); ++ XCloseDisplay(va_context->display); ++ ++ free(va_context->va_surfaces.free); ++ free(va_context->va_surfaces.used); ++ free(va_context->va_surface_id); ++ free(va_context->vaapi_context); ++ free(va_context); ++ ++ va_context = NULL; ++ } ++} ++ ++static VAStatus create_vaapi_image(struct ff_vaapi_context_s *va_context) { ++ int i; ++ int fmt_count = vaMaxNumImageFormats( va_context->va_display ); ++ VAImageFormat *va_p_fmt = calloc( fmt_count, sizeof(*va_p_fmt) ); ++ VAImageFormat va_fmt; ++ ++ if( vaQueryImageFormats( va_context->va_display , va_p_fmt, &fmt_count ) ) { ++ free(va_p_fmt); ++ goto error; ++ } ++ ++ for( i = 0; i < fmt_count; i++ ) { ++ if ( va_p_fmt[i].fourcc == VA_FOURCC( 'Y', 'V', '1', '2' ) || ++ va_p_fmt[i].fourcc == VA_FOURCC( 'I', '4', '2', '0' ) || ++ va_p_fmt[i].fourcc == VA_FOURCC( 'N', 'V', '1', '2' ) ) { ++ ++ if( vaCreateImage( va_context->va_display, &va_p_fmt[i], va_context->width, va_context->height, &va_context->va_image) ) { ++ va_context->va_image.image_id = VA_INVALID_ID; ++ continue; ++ } ++ if( vaGetImage(va_context->va_display, va_context->va_surface_id[0], 0, 0, ++ va_context->width, va_context->height, va_context->va_image.image_id) ) { ++ vaDestroyImage( va_context->va_display, va_context->va_image.image_id ); ++ va_context->va_image.image_id = VA_INVALID_ID; ++ continue; ++ } ++ printf("found valid image format\n"); ++ va_fmt = va_p_fmt[i]; ++ break; ++ } ++ } ++ free(va_p_fmt); ++ ++ if(va_context->va_image.image_id == VA_INVALID_ID) ++ goto error; ++ ++ return VA_STATUS_SUCCESS; ++ ++error: ++ return VA_STATUS_ERROR_UNKNOWN; ++} ++ ++static VAStatus init_vaapi(struct ff_vaapi_context_s *va_context, int va_profile, int width, int height) { ++ size_t i; ++ VAStatus status = VA_STATUS_ERROR_UNKNOWN; ++ int surface_count = NUM_SURFACES; ++ VAConfigAttrib va_attrib; ++ int maj, min; ++ VAProfile *va_profiles = NULL; ++ VAProfile *va_profile_search = NULL; ++ int number_profiles; ++ ++ va_context->width = width; ++ va_context->height = height; ++ va_context->va_profile = va_profile; ++ ++ va_context->va_surfaces.free = NULL; ++ va_context->va_surfaces.used = NULL; ++ ++ va_context->va_display = NULL; ++ va_context->va_config_id = VA_INVALID_ID; ++ va_context->va_context_id = VA_INVALID_ID; ++ va_context->va_image.image_id = VA_INVALID_ID; ++ ++ va_context->display = XOpenDisplay(NULL); ++ if(!va_context->display) ++ goto error; ++ ++ va_context->va_display = vaGetDisplay(va_context->display); ++ va_context->vaapi_context->display = va_context->va_display; ++ ++ if(!va_context->va_display) ++ goto error; ++ ++ if(vaInitialize(va_context->va_display, &maj, &min)) ++ goto error; ++ ++ printf("libva: %d.%d\n", maj, min); ++ ++ printf("AvCodecContext w %d h %d\n", va_context->width, va_context->height); ++ ++ memset( &va_attrib, 0, sizeof(va_attrib) ); ++ va_attrib.type = VAConfigAttribRTFormat; ++ ++ va_profiles = malloc(vaMaxNumProfiles(va_context->va_display) * sizeof(VAProfile)); ++ ++ if(vaQueryConfigProfiles(va_context->va_display, va_profiles, &number_profiles)) { ++ free(va_profiles); ++ goto error; ++ } ++ ++ free(va_profiles); ++ ++ ++ va_profile_search = find_profile(va_profiles, number_profiles, va_profile); ++ ++ if(!va_profile_search) ++ goto error; ++ ++ printf("Profile: %d (%s) Entrypoint %d (%s)\n", va_context->va_profile, VAProfile2string(va_context->va_profile), VAEntrypointVLD, VAEntrypoint2string(VAEntrypointVLD)); ++ ++ if( vaGetConfigAttributes(va_context->va_display, va_context->va_profile, VAEntrypointVLD, &va_attrib, 1) ) ++ goto error; ++ ++ if( (va_attrib.value & VA_RT_FORMAT_YUV420) == 0 ) ++ goto error; ++ ++ if( vaCreateConfig(va_context->va_display, va_context->va_profile, VAEntrypointVLD, &va_attrib, 1, &va_context->va_config_id) ) { ++ va_context->va_config_id = VA_INVALID_ID; ++ goto error; ++ } ++ ++ if(va_context->va_surface_id == NULL) { ++ va_context->va_surface_id = malloc(sizeof(VASurfaceID) * surface_count); ++ va_context->va_surfaces.free = malloc(sizeof(VASurfaceID*) * surface_count); ++ va_context->va_surfaces.used = malloc(sizeof(VASurfaceID*) * surface_count); ++ } ++ ++ if( vaCreateSurfaces(va_context->va_display, va_context->width, va_context->height, VA_RT_FORMAT_YUV420, surface_count, va_context->va_surface_id) ) ++ goto error; ++ ++ for(i = 0; i < surface_count; i++) { ++ va_context->va_surfaces.free[i] = &va_context->va_surface_id[i]; ++ va_context->va_surfaces.used[i] = NULL; ++ } ++ ++ va_context->va_surfaces.count = surface_count; ++ ++ if(vaCreateContext(va_context->va_display, va_context->va_config_id, va_context->width, va_context->height, ++ VA_PROGRESSIVE, va_context->va_surface_id, surface_count, &va_context->va_context_id) ) { ++ va_context->va_context_id = VA_INVALID_ID; ++ goto error; ++ } ++ ++ status = create_vaapi_image(va_context); ++ if(status != VA_STATUS_SUCCESS) ++ goto error; ++ ++ va_context->use_vaapi = 1; ++ return VA_STATUS_SUCCESS; ++ ++error: ++ va_context->use_vaapi = 0; ++ return VA_STATUS_ERROR_UNKNOWN; ++} ++ + static void init_video_codec (ff_video_decoder_t *this, unsigned int codec_type) { + size_t i; + + /* find the decoder */ + this->codec = NULL; + +- for(i = 0; i < sizeof(ff_video_lookup)/sizeof(ff_codec_t); i++) ++ for(i = 0; i < sizeof(ff_video_lookup)/sizeof(ff_codec_t); i++) { + if(ff_video_lookup[i].type == codec_type) { + pthread_mutex_lock(&ffmpeg_lock); + this->codec = avcodec_find_decoder(ff_video_lookup[i].id); +@@ -298,6 +720,8 @@ static void init_video_codec (ff_video_d + ff_video_lookup[i].name); + break; + } ++ } ++ + + if (!this->codec) { + xprintf (this->stream->xine, XINE_VERBOSITY_LOG, +@@ -311,9 +735,11 @@ static void init_video_codec (ff_video_d + + this->context->width = this->bih.biWidth; + this->context->height = this->bih.biHeight; ++ + this->context->stream_codec_tag = this->context->codec_tag = + _x_stream_info_get(this->stream, XINE_STREAM_INFO_VIDEO_FOURCC); + ++ //av_init_packet(&this->av_pkt); + + /* Some codecs (eg rv10) copy flags in init so it's necessary to set + * this flag here in case we are going to use direct rendering */ +@@ -323,7 +749,6 @@ static void init_video_codec (ff_video_d + + if (this->class->choose_speed_over_accuracy) + this->context->flags2 |= CODEC_FLAG2_FAST; +- + pthread_mutex_lock(&ffmpeg_lock); + if (avcodec_open (this->context, this->codec) < 0) { + pthread_mutex_unlock(&ffmpeg_lock); +@@ -337,7 +762,6 @@ static void init_video_codec (ff_video_d + + if (this->codec->id == CODEC_ID_VC1 && + (!this->bih.biWidth || !this->bih.biHeight)) { +- /* VC1 codec must be re-opened with correct width and height. */ + avcodec_close(this->context); + + if (avcodec_open (this->context, this->codec) < 0) { +@@ -390,6 +814,13 @@ static void init_video_codec (ff_video_d + xprintf(this->stream->xine, XINE_VERBOSITY_LOG, + _("ffmpeg_video_dec: direct rendering enabled\n")); + } ++ if( this->class->enable_vaapi && !this->is_mpeg12) { ++ this->context->get_buffer = get_buffer; ++ this->context->reget_buffer = get_buffer; ++ this->context->release_buffer = release_buffer; ++ this->context->get_format = get_format; ++ this->context->thread_count = 1; ++ } + #endif + + /* flag for interlaced streams */ +@@ -413,7 +844,13 @@ static void init_video_codec (ff_video_d + this->frame_flags |= VO_INTERLACED_FLAG; + break; + } ++} ++ + ++static void enable_vaapi(void *user_data, xine_cfg_entry_t *entry) { ++ ff_video_class_t *class = (ff_video_class_t *) user_data; ++ ++ class->enable_vaapi = entry->num_value; + } + + static void choose_speed_over_accuracy_cb(void *user_data, xine_cfg_entry_t *entry) { +@@ -542,6 +979,36 @@ static int ff_handle_mpeg_sequence(ff_vi + return 1; + } + ++/* ++*/ ++ ++static int nv12_to_yv12(ff_video_decoder_t *this, uint8_t *data, int len, vo_frame_t *img) { ++ unsigned int Y_size = img->width * this->bih.biHeight; ++ unsigned int UV_size = img->width * this->bih.biHeight / 4; ++ unsigned int idx; ++ unsigned char *dst_Y = img->base[0]; ++ unsigned char *dst_U = img->base[1]; ++ unsigned char *dst_V = img->base[2]; ++ unsigned char *src = data + Y_size; ++ ++ // sanity check raw stream ++ if ( (len != (Y_size + (UV_size<<1))) ) { ++ printf("hmblck: Image size inconsistent with data size.\n"); ++ return 0; ++ } ++ ++ // luma data is easy, just copy it ++ xine_fast_memcpy(dst_Y, data, Y_size); ++ ++ // chroma data is interlaced UVUV... so deinterlace it ++ for(idx=0; idxdebug_fmt = this->context->pix_fmt); + #endif + ++ if(this->context->pix_fmt == PIX_FMT_VAAPI_VLD && va_context->va_image.image_id != VA_INVALID_ID) { ++ void *p_base; ++ const uint32_t fourcc = va_context->va_image.format.fourcc; ++ VAStatus status; ++ ++ if( !vaGetImage( va_context->va_display, (VASurfaceID)(size_t)this->av_frame->data[3], ++ 0, 0, img->width, this->bih.biHeight, va_context->va_image.image_id) ) { ++ ++ status = vaMapBuffer( va_context->va_display, va_context->va_image.buf, &p_base ) ; ++ if ( status == VA_STATUS_SUCCESS ) { ++ if( fourcc == VA_FOURCC('Y','V','1','2') || ++ fourcc == VA_FOURCC('I','4','2','0') ) { ++ ++ lprintf("VAAPI YV12 image\n"); ++ ++ yv12_to_yv12( ++ /* Y */ ++ (uint8_t*)p_base, img->width, ++ img->base[0], img->pitches[0], ++ /* U */ ++ (uint8_t*)p_base + (img->width * img->height * 5/4), img->width/2, ++ img->base[1], img->pitches[1], ++ /* V */ ++ (uint8_t*)p_base + (img->width * img->height), img->width/2, ++ img->base[2], img->pitches[2], ++ /* width x height */ ++ img->width, img->height); ++ ++ } ++ if( fourcc == VA_FOURCC('N','V','1','2') ) { ++ ++ lprintf("VAAPI NV12 image\n"); ++ ++ nv12_to_yv12(this, (uint8_t*)p_base, va_context->va_image.data_size, img); ++ ++ } ++ ++ status = vaUnmapBuffer( va_context->va_display, va_context->va_image.buf ); ++ } ++ } ++ return; ++ } ++ + dy = img->base[0]; + du = img->base[1]; + dv = img->base[2]; +@@ -1022,6 +1532,11 @@ static void ff_handle_mpeg12_buffer (ff_ + + lprintf("handle_mpeg12_buffer\n"); + ++ if (buf->decoder_flags & BUF_FLAG_FRAME_START) { ++ lprintf("BUF_FLAG_FRAME_START\n"); ++ this->size = 0; ++ } ++ + while ((size > 0) || (flush == 1)) { + + uint8_t *current; +@@ -1307,10 +1822,19 @@ static void ff_handle_buffer (ff_video_d + this->context->hurry_up = (this->skipframes > 0); + + lprintf("buffer size: %d\n", this->size); ++ ++ //this->av_pkt.size = this->size; ++ //this->av_pkt.data = &chunk_buf[offset]; ++ //this->av_pkt.data = this->buf; ++ ++ //len = avcodec_decode_video2 (this->context, this->av_frame, &got_picture, &this->av_pkt); ++ + len = avcodec_decode_video (this->context, this->av_frame, + &got_picture, &chunk_buf[offset], + this->size); + ++ //av_free_packet(&this->av_pkt); ++ + #ifdef AVCODEC_HAS_REORDERED_OPAQUE + /* reset consumed pts value */ + this->context->reordered_opaque = ff_tag_pts(this, 0); +@@ -1355,47 +1879,48 @@ static void ff_handle_buffer (ff_video_d + + /* aspect ratio provided by ffmpeg, override previous setting */ + if ((this->aspect_ratio_prio < 2) && +- av_cmp_q(this->context->sample_aspect_ratio, avr00)) { ++ av_cmp_q(this->context->sample_aspect_ratio, avr00)) { + + if (!this->bih.biWidth || !this->bih.biHeight) { + this->bih.biWidth = this->context->width; + this->bih.biHeight = this->context->height; + } + +- this->aspect_ratio = av_q2d(this->context->sample_aspect_ratio) * +- (double)this->bih.biWidth / (double)this->bih.biHeight; +- this->aspect_ratio_prio = 2; +- lprintf("ffmpeg aspect ratio: %f\n", this->aspect_ratio); +- set_stream_info(this); ++ this->aspect_ratio = av_q2d(this->context->sample_aspect_ratio) * ++ (double)this->bih.biWidth / (double)this->bih.biHeight; ++ this->aspect_ratio_prio = 2; ++ lprintf("ffmpeg aspect ratio: %f\n", this->aspect_ratio); ++ set_stream_info(this); + } + + if (got_picture && this->av_frame->data[0]) { + /* got a picture, draw it */ + got_one_picture = 1; ++ + if(!this->av_frame->opaque) { +- /* indirect rendering */ ++ /* indirect rendering */ + +- /* initialize the colorspace converter */ +- if (!this->cs_convert_init) { +- if ((this->context->pix_fmt == PIX_FMT_RGB32) || +- (this->context->pix_fmt == PIX_FMT_RGB565) || +- (this->context->pix_fmt == PIX_FMT_RGB555) || +- (this->context->pix_fmt == PIX_FMT_BGR24) || +- (this->context->pix_fmt == PIX_FMT_RGB24) || +- (this->context->pix_fmt == PIX_FMT_PAL8)) { +- this->output_format = XINE_IMGFMT_YUY2; +- init_yuv_planes(&this->yuv, (this->bih.biWidth + 15) & ~15, this->bih.biHeight); +- this->yuv_init = 1; +- } +- this->cs_convert_init = 1; +- } +- +- if (this->aspect_ratio_prio == 0) { +- this->aspect_ratio = (double)this->bih.biWidth / (double)this->bih.biHeight; +- this->aspect_ratio_prio = 1; +- lprintf("default aspect ratio: %f\n", this->aspect_ratio); +- set_stream_info(this); +- } ++ /* initialize the colorspace converter */ ++ if (!this->cs_convert_init) { ++ if ((this->context->pix_fmt == PIX_FMT_RGB32) || ++ (this->context->pix_fmt == PIX_FMT_RGB565) || ++ (this->context->pix_fmt == PIX_FMT_RGB555) || ++ (this->context->pix_fmt == PIX_FMT_BGR24) || ++ (this->context->pix_fmt == PIX_FMT_RGB24) || ++ (this->context->pix_fmt == PIX_FMT_PAL8)) { ++ this->output_format = XINE_IMGFMT_YUY2; ++ init_yuv_planes(&this->yuv, (this->bih.biWidth + 15) & ~15, this->bih.biHeight); ++ this->yuv_init = 1; ++ } ++ this->cs_convert_init = 1; ++ } ++ ++ if (this->aspect_ratio_prio == 0) { ++ this->aspect_ratio = (double)this->bih.biWidth / (double)this->bih.biHeight; ++ this->aspect_ratio_prio = 1; ++ lprintf("default aspect ratio: %f\n", this->aspect_ratio); ++ set_stream_info(this); ++ } + + /* xine-lib expects the framesize to be a multiple of 16x16 (macroblock) */ + img = this->stream->video_out->get_frame (this->stream->video_out, +@@ -1436,7 +1961,7 @@ static void ff_handle_buffer (ff_video_d + this->av_frame->pict_type); + + } else if (!this->av_frame->opaque) { +- /* colorspace conversion or copy */ ++ /* colorspace conversion or copy */ + ff_convert_frame(this, img); + } + +@@ -1524,6 +2049,15 @@ static void ff_decode_data (video_decode + _x_stream_info_set(this->stream, XINE_STREAM_INFO_FRAME_DURATION, (this->reported_video_step = this->video_step)); + } + ++ int codec_type = buf->type & 0xFFFF0000; ++ if (codec_type == BUF_VIDEO_MPEG) { ++ this->is_mpeg12 = 1; ++ if ( this->mpeg_parser == NULL ) { ++ this->mpeg_parser = calloc(1, sizeof(mpeg_parser_t)); ++ mpeg_parser_init(this->mpeg_parser); ++ } ++ } ++ + if (buf->decoder_flags & BUF_FLAG_PREVIEW) { + + ff_handle_preview_buffer(this, buf); +@@ -1541,24 +2075,24 @@ static void ff_decode_data (video_decode + ff_handle_header_buffer(this, buf); + + if (buf->decoder_flags & BUF_FLAG_ASPECT) { +- if (this->aspect_ratio_prio < 3) { +- this->aspect_ratio = (double)buf->decoder_info[1] / (double)buf->decoder_info[2]; +- this->aspect_ratio_prio = 3; +- lprintf("aspect ratio: %f\n", this->aspect_ratio); +- set_stream_info(this); +- } ++ if (this->aspect_ratio_prio < 3) { ++ this->aspect_ratio = (double)buf->decoder_info[1] / (double)buf->decoder_info[2]; ++ this->aspect_ratio_prio = 3; ++ lprintf("aspect ratio: %f\n", this->aspect_ratio); ++ set_stream_info(this); ++ } + } + + } else { + + /* decode */ + if (buf->pts) +- this->pts = buf->pts; ++ this->pts = buf->pts; + + if (this->is_mpeg12) { +- ff_handle_mpeg12_buffer(this, buf); ++ ff_handle_mpeg12_buffer(this, buf); + } else { +- ff_handle_buffer(this, buf); ++ ff_handle_buffer(this, buf); + } + + } +@@ -1637,6 +2171,8 @@ static void ff_dispose (video_decoder_t + + lprintf ("ff_dispose\n"); + ++ close_vaapi(va_context); ++ + if (this->decoder_ok) { + xine_list_iterator_t it; + AVFrame *av_frame; +@@ -1728,10 +2264,31 @@ static video_decoder_t *ff_video_open_pl + + this->dr1_frames = xine_list_new(); + ++ va_context = calloc(1, sizeof(ff_vaapi_context_t)); ++ va_context->vaapi_context = calloc(1, sizeof(struct vaapi_context)); ++ + #ifdef LOG + this->debug_fmt = -1; + #endif + ++ this->class->enable_vaapi = this->class->xine->config->register_bool(this->class->xine->config, "video.processing.ffmpeg_enable_vaapi", 1, ++ _("Enable usage of VAAPI"), ++ _("Let you enable the usage of VAAPI.\n"), ++ 10, enable_vaapi, this->class); ++ ++#ifdef INIT_ERROR_HACK ++ if ((init_vaapi(va_context, VAProfileH264High, 1280, 720) == VA_STATUS_SUCCESS )) { ++ close_vaapi(va_context); ++ } else { ++ this->class->enable_vaapi = 0; ++ xprintf(this->class->xine, XINE_VERBOSITY_LOG, _("ffmpeg_video_dec: VAAPI Enabled in config, but output video out driver does not support vaapi.\n")); ++ } ++ free(va_context->vaapi_context); ++ free(va_context); ++ va_context = calloc(1, sizeof(ff_vaapi_context_t)); ++ va_context->vaapi_context = calloc(1, sizeof(struct vaapi_context)); ++#endif ++ + return &this->video_decoder; + } + +diff -Naur xine-lib-1.1.19.orig/src/combined/ffmpeg/Makefile.am xine-lib-1.1.19/src/combined/ffmpeg/Makefile.am +--- xine-lib-1.1.19.orig/src/combined/ffmpeg/Makefile.am 2010-03-09 23:17:05.000000000 +0100 ++++ xine-lib-1.1.19/src/combined/ffmpeg/Makefile.am 2010-10-14 14:49:01.000000000 +0200 +@@ -43,7 +43,7 @@ + + xineplug_decode_ff_la_CFLAGS = $(VISIBILITY_FLAG) $(AM_CFLAGS) + xineplug_decode_ff_la_LDFLAGS = $(xineplug_ldflags) $(IMPURE_TEXT_LDFLAGS) +-xineplug_decode_ff_la_LIBADD = $(XINE_LIB) $(MLIB_LIBS) -lm $(ZLIB_LIBS) \ ++xineplug_decode_ff_la_LIBADD = $(XINE_LIB) $(MLIB_LIBS) -lm -lva-x11 -lva -lX11 $(ZLIB_LIBS) \ + $(link_ffmpeg) $(PTHREAD_LIBS) $(LTLIBINTL) + + xineplug_decode_dvaudio_la_CFLAGS = $(VISIBILITY_FLAG) $(AM_CFLAGS)