FFmpeg  4.4.5
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdint.h>
20 #include <string.h>
21 
22 #include <mfx/mfxvideo.h>
23 
24 #include "config.h"
25 
26 #if HAVE_PTHREADS
27 #include <pthread.h>
28 #endif
29 
30 #if CONFIG_VAAPI
31 #include "hwcontext_vaapi.h"
32 #endif
33 #if CONFIG_DXVA2
34 #include "hwcontext_dxva2.h"
35 #endif
36 
37 #include "buffer.h"
38 #include "common.h"
39 #include "hwcontext.h"
40 #include "hwcontext_internal.h"
41 #include "hwcontext_qsv.h"
42 #include "mem.h"
43 #include "pixfmt.h"
44 #include "pixdesc.h"
45 #include "time.h"
46 
47 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
48  (MFX_VERSION_MAJOR > (MAJOR) || \
49  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
50 
51 typedef struct QSVDevicePriv {
54 
55 typedef struct QSVDeviceContext {
56  mfxHDL handle;
57  mfxHandleType handle_type;
58  mfxVersion ver;
59  mfxIMPL impl;
60 
64 
65 typedef struct QSVFramesContext {
66  mfxSession session_download;
68  mfxSession session_upload;
70 #if HAVE_PTHREADS
71  pthread_mutex_t session_lock;
72  pthread_cond_t session_cond;
73 #endif
74 
76  mfxFrameSurface1 *surfaces_internal;
78 
79  // used in the frame allocator for non-opaque surfaces
80  mfxMemId *mem_ids;
81  // used in the opaque alloc request for opaque surfaces
82  mfxFrameSurface1 **surface_ptrs;
83 
84  mfxExtOpaqueSurfaceAlloc opaque_alloc;
85  mfxExtBuffer *ext_buffers[1];
87 
88 static const struct {
89  mfxHandleType handle_type;
93 #if CONFIG_VAAPI
94  { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
95 #endif
96 #if CONFIG_DXVA2
97  { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
98 #endif
99  { 0 },
100 };
101 
102 static const struct {
103  enum AVPixelFormat pix_fmt;
104  uint32_t fourcc;
106  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
107  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
108  { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
109  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
110 #if CONFIG_VAAPI
112  MFX_FOURCC_YUY2 },
113 #if QSV_VERSION_ATLEAST(1, 27)
114  { AV_PIX_FMT_Y210,
115  MFX_FOURCC_Y210 },
116 #endif
117 #endif
118 };
119 
121 {
122  int i;
123  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
125  return supported_pixel_formats[i].fourcc;
126  }
127  return 0;
128 }
129 
131 {
132  AVQSVDeviceContext *hwctx = ctx->hwctx;
133  QSVDeviceContext *s = ctx->internal->priv;
134 
135  mfxStatus err;
136  int i;
137 
138  for (i = 0; supported_handle_types[i].handle_type; i++) {
139  err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
140  &s->handle);
141  if (err == MFX_ERR_NONE) {
142  s->handle_type = supported_handle_types[i].handle_type;
143  s->child_device_type = supported_handle_types[i].device_type;
144  s->child_pix_fmt = supported_handle_types[i].pix_fmt;
145  break;
146  }
147  }
148  if (!s->handle) {
149  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
150  "from the session\n");
151  }
152 
153  err = MFXQueryIMPL(hwctx->session, &s->impl);
154  if (err == MFX_ERR_NONE)
155  err = MFXQueryVersion(hwctx->session, &s->ver);
156  if (err != MFX_ERR_NONE) {
157  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
158  return AVERROR_UNKNOWN;
159  }
160 
161  return 0;
162 }
163 
165 {
166  QSVFramesContext *s = ctx->internal->priv;
167 
168  if (s->session_download) {
169  MFXVideoVPP_Close(s->session_download);
170  MFXClose(s->session_download);
171  }
172  s->session_download = NULL;
173  s->session_download_init = 0;
174 
175  if (s->session_upload) {
176  MFXVideoVPP_Close(s->session_upload);
177  MFXClose(s->session_upload);
178  }
179  s->session_upload = NULL;
180  s->session_upload_init = 0;
181 
182 #if HAVE_PTHREADS
183  pthread_mutex_destroy(&s->session_lock);
184  pthread_cond_destroy(&s->session_cond);
185 #endif
186 
187  av_freep(&s->mem_ids);
188  av_freep(&s->surface_ptrs);
189  av_freep(&s->surfaces_internal);
190  av_buffer_unref(&s->child_frames_ref);
191 }
192 
193 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
194 {
195 }
196 
198 {
200  QSVFramesContext *s = ctx->internal->priv;
201  AVQSVFramesContext *hwctx = ctx->hwctx;
202 
203  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
204  s->nb_surfaces_used++;
205  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
206  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
207  }
208 
209  return NULL;
210 }
211 
213 {
214  AVQSVFramesContext *hwctx = ctx->hwctx;
215  QSVFramesContext *s = ctx->internal->priv;
216  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
217 
218  AVBufferRef *child_device_ref = NULL;
219  AVBufferRef *child_frames_ref = NULL;
220 
221  AVHWDeviceContext *child_device_ctx;
222  AVHWFramesContext *child_frames_ctx;
223 
224  int i, ret = 0;
225 
226  if (!device_priv->handle) {
228  "Cannot create a non-opaque internal surface pool without "
229  "a hardware handle\n");
230  return AVERROR(EINVAL);
231  }
232 
233  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
234  if (!child_device_ref)
235  return AVERROR(ENOMEM);
236  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
237 
238 #if CONFIG_VAAPI
239  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
240  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
241  child_device_hwctx->display = (VADisplay)device_priv->handle;
242  }
243 #endif
244 #if CONFIG_DXVA2
245  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
246  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
247  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
248  }
249 #endif
250 
251  ret = av_hwdevice_ctx_init(child_device_ref);
252  if (ret < 0) {
253  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
254  goto fail;
255  }
256 
257  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
258  if (!child_frames_ref) {
259  ret = AVERROR(ENOMEM);
260  goto fail;
261  }
262  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
263 
264  child_frames_ctx->format = device_priv->child_pix_fmt;
265  child_frames_ctx->sw_format = ctx->sw_format;
266  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
267  child_frames_ctx->width = FFALIGN(ctx->width, 16);
268  child_frames_ctx->height = FFALIGN(ctx->height, 16);
269 
270 #if CONFIG_DXVA2
271  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
272  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
273  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
274  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
275  else
276  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
277  }
278 #endif
279 
280  ret = av_hwframe_ctx_init(child_frames_ref);
281  if (ret < 0) {
282  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
283  goto fail;
284  }
285 
286 #if CONFIG_VAAPI
287  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
288  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
289  for (i = 0; i < ctx->initial_pool_size; i++)
290  s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
291  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
292  }
293 #endif
294 #if CONFIG_DXVA2
295  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
296  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
297  for (i = 0; i < ctx->initial_pool_size; i++)
298  s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
299  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
300  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
301  else
302  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
303  }
304 #endif
305 
306  s->child_frames_ref = child_frames_ref;
307  child_frames_ref = NULL;
308 
309 fail:
310  av_buffer_unref(&child_device_ref);
311  av_buffer_unref(&child_frames_ref);
312  return ret;
313 }
314 
315 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
316 {
317  const AVPixFmtDescriptor *desc;
318  uint32_t fourcc;
319 
320  desc = av_pix_fmt_desc_get(ctx->sw_format);
321  if (!desc)
322  return AVERROR(EINVAL);
323 
324  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
325  if (!fourcc)
326  return AVERROR(EINVAL);
327 
328  surf->Info.BitDepthLuma = desc->comp[0].depth;
329  surf->Info.BitDepthChroma = desc->comp[0].depth;
330  surf->Info.Shift = desc->comp[0].depth > 8;
331 
332  if (desc->log2_chroma_w && desc->log2_chroma_h)
333  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
334  else if (desc->log2_chroma_w)
335  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
336  else
337  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
338 
339  surf->Info.FourCC = fourcc;
340  surf->Info.Width = FFALIGN(ctx->width, 16);
341  surf->Info.CropW = ctx->width;
342  surf->Info.Height = FFALIGN(ctx->height, 16);
343  surf->Info.CropH = ctx->height;
344  surf->Info.FrameRateExtN = 25;
345  surf->Info.FrameRateExtD = 1;
346  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
347 
348  return 0;
349 }
350 
352 {
353  QSVFramesContext *s = ctx->internal->priv;
354  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
355 
356  int i, ret = 0;
357 
358  if (ctx->initial_pool_size <= 0) {
359  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
360  return AVERROR(EINVAL);
361  }
362 
363  s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
364  sizeof(*s->surfaces_internal));
365  if (!s->surfaces_internal)
366  return AVERROR(ENOMEM);
367 
368  for (i = 0; i < ctx->initial_pool_size; i++) {
369  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
370  if (ret < 0)
371  return ret;
372  }
373 
374  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
375  ret = qsv_init_child_ctx(ctx);
376  if (ret < 0)
377  return ret;
378  }
379 
380  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
382  if (!ctx->internal->pool_internal)
383  return AVERROR(ENOMEM);
384 
385  frames_hwctx->surfaces = s->surfaces_internal;
386  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
387 
388  return 0;
389 }
390 
391 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
392  mfxFrameAllocResponse *resp)
393 {
394  AVHWFramesContext *ctx = pthis;
395  QSVFramesContext *s = ctx->internal->priv;
396  AVQSVFramesContext *hwctx = ctx->hwctx;
397  mfxFrameInfo *i = &req->Info;
398  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
399 
400  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
401  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
402  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
403  return MFX_ERR_UNSUPPORTED;
404  if (i->Width > i1->Width || i->Height > i1->Height ||
405  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
406  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
407  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
408  i->Width, i->Height, i->FourCC, i->ChromaFormat,
409  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
410  return MFX_ERR_UNSUPPORTED;
411  }
412 
413  resp->mids = s->mem_ids;
414  resp->NumFrameActual = hwctx->nb_surfaces;
415 
416  return MFX_ERR_NONE;
417 }
418 
419 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
420 {
421  return MFX_ERR_NONE;
422 }
423 
424 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
425 {
426  return MFX_ERR_UNSUPPORTED;
427 }
428 
429 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
430 {
431  return MFX_ERR_UNSUPPORTED;
432 }
433 
434 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
435 {
436  *hdl = mid;
437  return MFX_ERR_NONE;
438 }
439 
441  mfxSession *session, int upload)
442 {
443  QSVFramesContext *s = ctx->internal->priv;
444  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
445  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
446  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
447 
448  mfxFrameAllocator frame_allocator = {
449  .pthis = ctx,
450  .Alloc = frame_alloc,
451  .Lock = frame_lock,
452  .Unlock = frame_unlock,
453  .GetHDL = frame_get_hdl,
454  .Free = frame_free,
455  };
456 
457  mfxVideoParam par;
458  mfxStatus err;
459 
460  err = MFXInit(device_priv->impl, &device_priv->ver, session);
461  if (err != MFX_ERR_NONE) {
462  av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
463  return AVERROR_UNKNOWN;
464  }
465 
466  if (device_priv->handle) {
467  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
468  device_priv->handle);
469  if (err != MFX_ERR_NONE)
470  return AVERROR_UNKNOWN;
471  }
472 
473  if (!opaque) {
474  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
475  if (err != MFX_ERR_NONE)
476  return AVERROR_UNKNOWN;
477  }
478 
479  memset(&par, 0, sizeof(par));
480 
481  if (opaque) {
482  par.ExtParam = s->ext_buffers;
483  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
484  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
485  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
486  } else {
487  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
488  MFX_IOPATTERN_IN_VIDEO_MEMORY;
489  }
490 
491  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
492  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
493  par.AsyncDepth = 1;
494 
495  par.vpp.In = frames_hwctx->surfaces[0].Info;
496 
497  /* Apparently VPP requires the frame rate to be set to some value, otherwise
498  * init will fail (probably for the framerate conversion filter). Since we
499  * are only doing data upload/download here, we just invent an arbitrary
500  * value */
501  par.vpp.In.FrameRateExtN = 25;
502  par.vpp.In.FrameRateExtD = 1;
503  par.vpp.Out = par.vpp.In;
504 
505  err = MFXVideoVPP_Init(*session, &par);
506  if (err != MFX_ERR_NONE) {
507  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
508  "Surface upload/download will not be possible\n");
509  MFXClose(*session);
510  *session = NULL;
511  }
512 
513  return 0;
514 }
515 
517 {
518  QSVFramesContext *s = ctx->internal->priv;
519  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
520 
521  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
522 
523  uint32_t fourcc;
524  int i, ret;
525 
526  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
527  if (!fourcc) {
528  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
529  return AVERROR(ENOSYS);
530  }
531 
532  if (!ctx->pool) {
533  ret = qsv_init_pool(ctx, fourcc);
534  if (ret < 0) {
535  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
536  return ret;
537  }
538  }
539 
540  if (opaque) {
541  s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
542  sizeof(*s->surface_ptrs));
543  if (!s->surface_ptrs)
544  return AVERROR(ENOMEM);
545 
546  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
547  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
548 
549  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
550  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
551  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
552 
553  s->opaque_alloc.Out = s->opaque_alloc.In;
554 
555  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
556  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
557 
558  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
559  } else {
560  s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
561  if (!s->mem_ids)
562  return AVERROR(ENOMEM);
563 
564  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
565  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
566  }
567 
568  s->session_download = NULL;
569  s->session_upload = NULL;
570 
571  s->session_download_init = 0;
572  s->session_upload_init = 0;
573 
574 #if HAVE_PTHREADS
575  pthread_mutex_init(&s->session_lock, NULL);
576  pthread_cond_init(&s->session_cond, NULL);
577 #endif
578 
579  return 0;
580 }
581 
583 {
584  frame->buf[0] = av_buffer_pool_get(ctx->pool);
585  if (!frame->buf[0])
586  return AVERROR(ENOMEM);
587 
588  frame->data[3] = frame->buf[0]->data;
590  frame->width = ctx->width;
591  frame->height = ctx->height;
592 
593  return 0;
594 }
595 
598  enum AVPixelFormat **formats)
599 {
600  enum AVPixelFormat *fmts;
601 
602  fmts = av_malloc_array(2, sizeof(*fmts));
603  if (!fmts)
604  return AVERROR(ENOMEM);
605 
606  fmts[0] = ctx->sw_format;
607  fmts[1] = AV_PIX_FMT_NONE;
608 
609  *formats = fmts;
610 
611  return 0;
612 }
613 
615  AVHWFramesContext *src_ctx, int flags)
616 {
617  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
618  int i;
619 
620  switch (dst_ctx->device_ctx->type) {
621 #if CONFIG_VAAPI
623  {
624  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
625  dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces,
626  sizeof(*dst_hwctx->surface_ids));
627  if (!dst_hwctx->surface_ids)
628  return AVERROR(ENOMEM);
629  for (i = 0; i < src_hwctx->nb_surfaces; i++)
630  dst_hwctx->surface_ids[i] =
631  *(VASurfaceID*)src_hwctx->surfaces[i].Data.MemId;
632  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
633  }
634  break;
635 #endif
636 #if CONFIG_DXVA2
638  {
639  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
640  dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces,
641  sizeof(*dst_hwctx->surfaces));
642  if (!dst_hwctx->surfaces)
643  return AVERROR(ENOMEM);
644  for (i = 0; i < src_hwctx->nb_surfaces; i++)
645  dst_hwctx->surfaces[i] =
646  (IDirect3DSurface9*)src_hwctx->surfaces[i].Data.MemId;
647  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
648  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
649  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
650  else
651  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
652  }
653  break;
654 #endif
655  default:
656  return AVERROR(ENOSYS);
657  }
658 
659  return 0;
660 }
661 
663  AVFrame *dst, const AVFrame *src, int flags)
664 {
665  QSVFramesContext *s = ctx->internal->priv;
666  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
667  AVHWFramesContext *child_frames_ctx;
668  const AVPixFmtDescriptor *desc;
669  uint8_t *child_data;
670  AVFrame *dummy;
671  int ret = 0;
672 
673  if (!s->child_frames_ref)
674  return AVERROR(ENOSYS);
675  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
676 
677  switch (child_frames_ctx->device_ctx->type) {
678 #if CONFIG_VAAPI
680  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
681  break;
682 #endif
683 #if CONFIG_DXVA2
685  child_data = surf->Data.MemId;
686  break;
687 #endif
688  default:
689  return AVERROR(ENOSYS);
690  }
691 
692  if (dst->format == child_frames_ctx->format) {
693  ret = ff_hwframe_map_create(s->child_frames_ref,
694  dst, src, NULL, NULL);
695  if (ret < 0)
696  return ret;
697 
698  dst->width = src->width;
699  dst->height = src->height;
700  dst->data[3] = child_data;
701 
702  return 0;
703  }
704 
706  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
707  // This only supports mapping to software.
708  return AVERROR(ENOSYS);
709  }
710 
711  dummy = av_frame_alloc();
712  if (!dummy)
713  return AVERROR(ENOMEM);
714 
715  dummy->buf[0] = av_buffer_ref(src->buf[0]);
716  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
717  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
718  goto fail;
719 
720  dummy->format = child_frames_ctx->format;
721  dummy->width = src->width;
722  dummy->height = src->height;
723  dummy->data[3] = child_data;
724 
725  ret = av_hwframe_map(dst, dummy, flags);
726 
727 fail:
729 
730  return ret;
731 }
732 
734  const AVFrame *src)
735 {
736  QSVFramesContext *s = ctx->internal->priv;
737  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
738  int download = !!src->hw_frames_ctx;
739  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
740 
741  AVFrame *dummy;
742  int ret;
743 
744  dummy = av_frame_alloc();
745  if (!dummy)
746  return AVERROR(ENOMEM);
747 
748  dummy->format = child_frames_ctx->format;
749  dummy->width = src->width;
750  dummy->height = src->height;
751  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
752  dummy->data[3] = surf->Data.MemId;
753  dummy->hw_frames_ctx = s->child_frames_ref;
754 
755  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
757 
758  dummy->buf[0] = NULL;
759  dummy->data[3] = NULL;
760  dummy->hw_frames_ctx = NULL;
761 
763 
764  return ret;
765 }
766 
767 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
768 {
769  switch (frame->format) {
770  case AV_PIX_FMT_NV12:
771  case AV_PIX_FMT_P010:
772  surface->Data.Y = frame->data[0];
773  surface->Data.UV = frame->data[1];
774  break;
775 
776  case AV_PIX_FMT_YUV420P:
777  surface->Data.Y = frame->data[0];
778  surface->Data.U = frame->data[1];
779  surface->Data.V = frame->data[2];
780  break;
781 
782  case AV_PIX_FMT_BGRA:
783  surface->Data.B = frame->data[0];
784  surface->Data.G = frame->data[0] + 1;
785  surface->Data.R = frame->data[0] + 2;
786  surface->Data.A = frame->data[0] + 3;
787  break;
788 #if CONFIG_VAAPI
789  case AV_PIX_FMT_YUYV422:
790  surface->Data.Y = frame->data[0];
791  surface->Data.U = frame->data[0] + 1;
792  surface->Data.V = frame->data[0] + 3;
793  break;
794 
795  case AV_PIX_FMT_Y210:
796  surface->Data.Y16 = (mfxU16 *)frame->data[0];
797  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
798  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
799  break;
800 #endif
801  default:
802  return MFX_ERR_UNSUPPORTED;
803  }
804  surface->Data.Pitch = frame->linesize[0];
805  surface->Data.TimeStamp = frame->pts;
806 
807  return 0;
808 }
809 
811  const AVFrame *src)
812 {
813  QSVFramesContext *s = ctx->internal->priv;
814  mfxFrameSurface1 out = {{ 0 }};
815  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
816 
817  mfxSyncPoint sync = NULL;
818  mfxStatus err;
819  int ret = 0;
820 
821  while (!s->session_download_init && !s->session_download && !ret) {
822 #if HAVE_PTHREADS
823  if (pthread_mutex_trylock(&s->session_lock) == 0) {
824 #endif
825  if (!s->session_download_init) {
826  ret = qsv_init_internal_session(ctx, &s->session_download, 0);
827  if (s->session_download)
828  s->session_download_init = 1;
829  }
830 #if HAVE_PTHREADS
831  pthread_mutex_unlock(&s->session_lock);
832  pthread_cond_signal(&s->session_cond);
833  } else {
834  pthread_mutex_lock(&s->session_lock);
835  while (!s->session_download_init && !s->session_download) {
836  pthread_cond_wait(&s->session_cond, &s->session_lock);
837  }
838  pthread_mutex_unlock(&s->session_lock);
839  }
840 #endif
841  }
842 
843  if (ret < 0)
844  return ret;
845 
846  if (!s->session_download) {
847  if (s->child_frames_ref)
848  return qsv_transfer_data_child(ctx, dst, src);
849 
850  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
851  return AVERROR(ENOSYS);
852  }
853 
854  out.Info = in->Info;
855  map_frame_to_surface(dst, &out);
856 
857  do {
858  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
859  if (err == MFX_WRN_DEVICE_BUSY)
860  av_usleep(1);
861  } while (err == MFX_WRN_DEVICE_BUSY);
862 
863  if (err < 0 || !sync) {
864  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
865  return AVERROR_UNKNOWN;
866  }
867 
868  do {
869  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
870  } while (err == MFX_WRN_IN_EXECUTION);
871  if (err < 0) {
872  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
873  return AVERROR_UNKNOWN;
874  }
875 
876  return 0;
877 }
878 
880  const AVFrame *src)
881 {
882  QSVFramesContext *s = ctx->internal->priv;
883  mfxFrameSurface1 in = {{ 0 }};
884  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
885 
886  mfxSyncPoint sync = NULL;
887  mfxStatus err;
888  int ret = 0;
889  /* make a copy if the input is not padded as libmfx requires */
890  AVFrame tmp_frame;
891  const AVFrame *src_frame;
892  int realigned = 0;
893 
894 
895  while (!s->session_upload_init && !s->session_upload && !ret) {
896 #if HAVE_PTHREADS
897  if (pthread_mutex_trylock(&s->session_lock) == 0) {
898 #endif
899  if (!s->session_upload_init) {
900  ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
901  if (s->session_upload)
902  s->session_upload_init = 1;
903  }
904 #if HAVE_PTHREADS
905  pthread_mutex_unlock(&s->session_lock);
906  pthread_cond_signal(&s->session_cond);
907  } else {
908  pthread_mutex_lock(&s->session_lock);
909  while (!s->session_upload_init && !s->session_upload) {
910  pthread_cond_wait(&s->session_cond, &s->session_lock);
911  }
912  pthread_mutex_unlock(&s->session_lock);
913  }
914 #endif
915  }
916  if (ret < 0)
917  return ret;
918 
919  if (src->height & 15 || src->linesize[0] & 15) {
920  realigned = 1;
921  memset(&tmp_frame, 0, sizeof(tmp_frame));
922  tmp_frame.format = src->format;
923  tmp_frame.width = FFALIGN(src->width, 16);
924  tmp_frame.height = FFALIGN(src->height, 16);
925  ret = av_frame_get_buffer(&tmp_frame, 0);
926  if (ret < 0)
927  return ret;
928 
929  ret = av_frame_copy(&tmp_frame, src);
930  if (ret < 0) {
931  av_frame_unref(&tmp_frame);
932  return ret;
933  }
934  }
935 
936  src_frame = realigned ? &tmp_frame : src;
937 
938  if (!s->session_upload) {
939  if (s->child_frames_ref)
940  return qsv_transfer_data_child(ctx, dst, src_frame);
941 
942  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
943  return AVERROR(ENOSYS);
944  }
945 
946  in.Info = out->Info;
947  map_frame_to_surface(src_frame, &in);
948 
949  do {
950  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
951  if (err == MFX_WRN_DEVICE_BUSY)
952  av_usleep(1);
953  } while (err == MFX_WRN_DEVICE_BUSY);
954 
955  if (err < 0 || !sync) {
956  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
957  return AVERROR_UNKNOWN;
958  }
959 
960  do {
961  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
962  } while (err == MFX_WRN_IN_EXECUTION);
963  if (err < 0) {
964  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
965  return AVERROR_UNKNOWN;
966  }
967 
968  if (realigned)
969  av_frame_unref(&tmp_frame);
970 
971  return 0;
972 }
973 
975  AVHWFramesContext *src_ctx, int flags)
976 {
977  QSVFramesContext *s = dst_ctx->internal->priv;
978  AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
979  int i;
980 
981  switch (src_ctx->device_ctx->type) {
982 #if CONFIG_VAAPI
984  {
985  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
986  s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
987  sizeof(*s->surfaces_internal));
988  if (!s->surfaces_internal)
989  return AVERROR(ENOMEM);
990  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
991  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
992  s->surfaces_internal[i].Data.MemId = src_hwctx->surface_ids + i;
993  }
994  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
995  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
996  }
997  break;
998 #endif
999 #if CONFIG_DXVA2
1001  {
1002  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1003  s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
1004  sizeof(*s->surfaces_internal));
1005  if (!s->surfaces_internal)
1006  return AVERROR(ENOMEM);
1007  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1008  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1009  s->surfaces_internal[i].Data.MemId = (mfxMemId)src_hwctx->surfaces[i];
1010  }
1011  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1012  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1013  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1014  else
1015  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1016  }
1017  break;
1018 #endif
1019  default:
1020  return AVERROR(ENOSYS);
1021  }
1022 
1023  dst_hwctx->surfaces = s->surfaces_internal;
1024 
1025  return 0;
1026 }
1027 
1028 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1029  AVFrame *dst, const AVFrame *src, int flags)
1030 {
1031  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1032  int i, err;
1033 
1034  for (i = 0; i < hwctx->nb_surfaces; i++) {
1035 #if CONFIG_VAAPI
1036  if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
1037  (VASurfaceID)(uintptr_t)src->data[3])
1038  break;
1039 #endif
1040 #if CONFIG_DXVA2
1041  if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
1042  (IDirect3DSurface9*)(uintptr_t)src->data[3])
1043  break;
1044 #endif
1045  }
1046  if (i >= hwctx->nb_surfaces) {
1047  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1048  "is not in the mapped frames context.\n");
1049  return AVERROR(EINVAL);
1050  }
1051 
1053  dst, src, NULL, NULL);
1054  if (err)
1055  return err;
1056 
1057  dst->width = src->width;
1058  dst->height = src->height;
1059  dst->data[3] = (uint8_t*)&hwctx->surfaces[i];
1060 
1061  return 0;
1062 }
1063 
1065  const void *hwconfig,
1066  AVHWFramesConstraints *constraints)
1067 {
1068  int i;
1069 
1071  sizeof(*constraints->valid_sw_formats));
1072  if (!constraints->valid_sw_formats)
1073  return AVERROR(ENOMEM);
1074 
1075  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1076  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1078 
1079  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1080  if (!constraints->valid_hw_formats)
1081  return AVERROR(ENOMEM);
1082 
1083  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1084  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1085 
1086  return 0;
1087 }
1088 
1090 {
1091  AVQSVDeviceContext *hwctx = ctx->hwctx;
1092  QSVDevicePriv *priv = ctx->user_opaque;
1093 
1094  if (hwctx->session)
1095  MFXClose(hwctx->session);
1096 
1098  av_freep(&priv);
1099 }
1100 
1101 static mfxIMPL choose_implementation(const char *device)
1102 {
1103  static const struct {
1104  const char *name;
1105  mfxIMPL impl;
1106  } impl_map[] = {
1107  { "auto", MFX_IMPL_AUTO },
1108  { "sw", MFX_IMPL_SOFTWARE },
1109  { "hw", MFX_IMPL_HARDWARE },
1110  { "auto_any", MFX_IMPL_AUTO_ANY },
1111  { "hw_any", MFX_IMPL_HARDWARE_ANY },
1112  { "hw2", MFX_IMPL_HARDWARE2 },
1113  { "hw3", MFX_IMPL_HARDWARE3 },
1114  { "hw4", MFX_IMPL_HARDWARE4 },
1115  };
1116 
1117  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
1118  int i;
1119 
1120  if (device) {
1121  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
1122  if (!strcmp(device, impl_map[i].name)) {
1123  impl = impl_map[i].impl;
1124  break;
1125  }
1126  if (i == FF_ARRAY_ELEMS(impl_map))
1127  impl = strtol(device, NULL, 0);
1128  }
1129 
1130  return impl;
1131 }
1132 
1134  mfxIMPL implementation,
1135  AVHWDeviceContext *child_device_ctx,
1136  int flags)
1137 {
1138  AVQSVDeviceContext *hwctx = ctx->hwctx;
1139 
1140  mfxVersion ver = { { 3, 1 } };
1141  mfxHDL handle;
1142  mfxHandleType handle_type;
1143  mfxStatus err;
1144  int ret;
1145 
1146  switch (child_device_ctx->type) {
1147 #if CONFIG_VAAPI
1149  {
1150  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1151  handle_type = MFX_HANDLE_VA_DISPLAY;
1152  handle = (mfxHDL)child_device_hwctx->display;
1153  }
1154  break;
1155 #endif
1156 #if CONFIG_DXVA2
1158  {
1159  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1160  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1161  handle = (mfxHDL)child_device_hwctx->devmgr;
1162  }
1163  break;
1164 #endif
1165  default:
1166  ret = AVERROR(ENOSYS);
1167  goto fail;
1168  }
1169 
1170  err = MFXInit(implementation, &ver, &hwctx->session);
1171  if (err != MFX_ERR_NONE) {
1172  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1173  "%d.\n", err);
1174  ret = AVERROR_UNKNOWN;
1175  goto fail;
1176  }
1177 
1178  err = MFXQueryVersion(hwctx->session, &ver);
1179  if (err != MFX_ERR_NONE) {
1180  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
1181  ret = AVERROR_UNKNOWN;
1182  goto fail;
1183  }
1184 
1186  "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
1187  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1188 
1189  MFXClose(hwctx->session);
1190 
1191  err = MFXInit(implementation, &ver, &hwctx->session);
1192  if (err != MFX_ERR_NONE) {
1194  "Error initializing an MFX session: %d.\n", err);
1195  ret = AVERROR_UNKNOWN;
1196  goto fail;
1197  }
1198 
1199  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
1200  if (err != MFX_ERR_NONE) {
1201  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
1202  "%d\n", err);
1203  ret = AVERROR_UNKNOWN;
1204  goto fail;
1205  }
1206 
1207  return 0;
1208 
1209 fail:
1210  if (hwctx->session)
1211  MFXClose(hwctx->session);
1212  return ret;
1213 }
1214 
1216  AVHWDeviceContext *child_device_ctx,
1217  AVDictionary *opts, int flags)
1218 {
1219  return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
1220  child_device_ctx, flags);
1221 }
1222 
1223 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
1224  AVDictionary *opts, int flags)
1225 {
1226  QSVDevicePriv *priv;
1227  enum AVHWDeviceType child_device_type;
1228  AVHWDeviceContext *child_device;
1229  AVDictionary *child_device_opts;
1230  AVDictionaryEntry *e;
1231 
1232  mfxIMPL impl;
1233  int ret;
1234 
1235  priv = av_mallocz(sizeof(*priv));
1236  if (!priv)
1237  return AVERROR(ENOMEM);
1238 
1239  ctx->user_opaque = priv;
1240  ctx->free = qsv_device_free;
1241 
1242  e = av_dict_get(opts, "child_device", NULL, 0);
1243 
1244  child_device_opts = NULL;
1245  if (CONFIG_VAAPI) {
1246  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
1247  // libmfx does not actually implement VAAPI properly, rather it
1248  // depends on the specific behaviour of a matching iHD driver when
1249  // used on recent Intel hardware. Set options to the VAAPI device
1250  // creation so that we should pick a usable setup by default if
1251  // possible, even when multiple devices and drivers are available.
1252  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
1253  av_dict_set(&child_device_opts, "driver", "iHD", 0);
1254  } else if (CONFIG_DXVA2)
1255  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
1256  else {
1257  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1258  return AVERROR(ENOSYS);
1259  }
1260 
1261  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
1262  e ? e->value : NULL, child_device_opts, 0);
1263 
1264  av_dict_free(&child_device_opts);
1265  if (ret < 0)
1266  return ret;
1267 
1268  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
1269 
1270  impl = choose_implementation(device);
1271 
1272  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
1273 }
1274 
1277  .name = "QSV",
1278 
1279  .device_hwctx_size = sizeof(AVQSVDeviceContext),
1280  .device_priv_size = sizeof(QSVDeviceContext),
1281  .frames_hwctx_size = sizeof(AVQSVFramesContext),
1282  .frames_priv_size = sizeof(QSVFramesContext),
1283 
1284  .device_create = qsv_device_create,
1285  .device_derive = qsv_device_derive,
1286  .device_init = qsv_device_init,
1287  .frames_get_constraints = qsv_frames_get_constraints,
1288  .frames_init = qsv_frames_init,
1289  .frames_uninit = qsv_frames_uninit,
1290  .frames_get_buffer = qsv_get_buffer,
1291  .transfer_get_formats = qsv_transfer_get_formats,
1292  .transfer_data_to = qsv_transfer_data_to,
1293  .transfer_data_from = qsv_transfer_data_from,
1294  .map_to = qsv_map_to,
1295  .map_from = qsv_map_from,
1296  .frames_derive_to = qsv_frames_derive_to,
1297  .frames_derive_from = qsv_frames_derive_from,
1298 
1299  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
1300 };
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
uint8_t
refcounted data buffer API
#define flags(name, subs,...)
Definition: cbs_av1.c:572
#define s(width, name)
Definition: cbs_vp9.c:257
#define fail()
Definition: checkasm.h:133
common internal and external API header
#define CONFIG_VAAPI
Definition: config.h:550
#define CONFIG_DXVA2
Definition: config.h:546
#define NULL
Definition: coverity.c:32
static AVFrame * frame
#define pthread_mutex_lock(a)
Definition: ffprobe.c:63
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:67
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:125
AVBufferRef * av_buffer_create(uint8_t *data, buffer_size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:29
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:379
AVBufferPool * av_buffer_pool_init2(buffer_size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, buffer_size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:245
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
#define AVERROR(e)
Definition: error.h:43
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:337
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:799
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
void * av_mallocz_array(size_t nmemb, size_t size)
Allocate a memory block for an array with av_mallocz().
Definition: mem.c:190
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:200
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:142
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:610
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:333
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:737
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:789
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:443
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:247
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWDeviceType
Definition: hwcontext.h:27
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
An API-specific header for AV_HWDEVICE_TYPE_DXVA2.
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
static const struct @305 supported_pixel_formats[]
static int qsv_device_init(AVHWDeviceContext *ctx)
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
mfxHandleType handle_type
Definition: hwcontext_qsv.c:89
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
static void qsv_device_free(AVHWDeviceContext *ctx)
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
const HWContextType ff_hwcontext_type_qsv
static int qsv_frames_init(AVHWFramesContext *ctx)
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:91
enum AVHWDeviceType device_type
Definition: hwcontext_qsv.c:90
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
static const struct @304 supported_handle_types[]
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
static mfxIMPL choose_implementation(const char *device)
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
uint32_t fourcc
static void qsv_frames_uninit(AVHWFramesContext *ctx)
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
static AVBufferRef * qsv_pool_alloc(void *opaque, buffer_size_t size)
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
An API-specific header for AV_HWDEVICE_TYPE_QSV.
API-specific header for AV_HWDEVICE_TYPE_VAAPI.
int i
Definition: input.c:407
int buffer_size_t
Definition: internal.h:306
const char * desc
Definition: libsvtav1.c:79
#define FFALIGN(x, a)
Definition: macros.h:48
Memory handling functions.
int dummy
Definition: motion.c:64
const char data[16]
Definition: mxf.c:142
static av_always_inline int pthread_cond_signal(pthread_cond_t *cond)
Definition: os2threads.h:152
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
Definition: os2threads.h:144
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
Definition: os2threads.h:133
_fmutex pthread_mutex_t
Definition: os2threads.h:53
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: os2threads.h:192
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2573
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:140
pixel format definitions
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:451
#define AV_PIX_FMT_P010
Definition: pixfmt.h:448
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:137
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
@ AV_PIX_FMT_VAAPI
Definition: pixfmt.h:122
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:222
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
const char * name
Definition: qsvenc.c:46
formats
Definition: signature.h:48
#define FF_ARRAY_ELEMS(a)
A reference to a data buffer.
Definition: buffer.h:84
uint8_t * data
The data buffer.
Definition: buffer.h:92
This struct is allocated as AVHWDeviceContext.hwctx.
IDirect3DDeviceManager9 * devmgr
This struct is allocated as AVHWFramesContext.hwctx.
IDirect3DSurface9 ** surfaces
The surface pool.
DWORD surface_type
The surface type (e.g.
char * value
Definition: dict.h:83
AVFormatInternal * internal
An opaque field for libavformat internal usage.
Definition: avformat.h:1699
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:411
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
int width
Definition: frame.h:376
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:657
int height
Definition: frame.h:376
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:509
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:391
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
mfxSession session
Definition: hwcontext_qsv.h:36
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
VAAPI connection details.
VADisplay display
The VADisplay handle, to be filled by the user.
VAAPI-specific data associated with a frame pool.
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
enum AVHWDeviceType type
mfxVersion ver
Definition: hwcontext_qsv.c:58
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:61
mfxHandleType handle_type
Definition: hwcontext_qsv.c:57
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:62
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:52
mfxSession session_upload
Definition: hwcontext_qsv.c:68
mfxSession session_download
Definition: hwcontext_qsv.c:66
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:80
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:85
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:75
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:84
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:76
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:82
#define av_malloc_array(a, b)
#define av_freep(p)
#define av_log(a,...)
#define src
Definition: vp8dsp.c:255
FILE * out
Definition: movenc.c:54
AVFormatContext * ctx
Definition: movenc.c:48
AVDictionary * opts
Definition: movenc.c:50
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
int size
if(ret< 0)
Definition: vf_mcdeint.c:282