Libav
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of Libav.
3  *
4  * Libav is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * Libav is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with Libav; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdint.h>
20 #include <string.h>
21 
22 #include <mfx/mfxvideo.h>
23 
24 #include "config.h"
25 
26 #if CONFIG_VAAPI
27 #include "hwcontext_vaapi.h"
28 #endif
29 #if CONFIG_DXVA2
30 #include "hwcontext_dxva2.h"
31 #endif
32 
33 #include "buffer.h"
34 #include "common.h"
35 #include "hwcontext.h"
36 #include "hwcontext_internal.h"
37 #include "hwcontext_qsv.h"
38 #include "mem.h"
39 #include "pixfmt.h"
40 #include "pixdesc.h"
41 #include "time.h"
42 
43 typedef struct QSVDevicePriv {
46 
47 typedef struct QSVDeviceContext {
48  mfxHDL handle;
49  mfxHandleType handle_type;
50  mfxVersion ver;
51  mfxIMPL impl;
52 
53  enum AVHWDeviceType child_device_type;
54  enum AVPixelFormat child_pix_fmt;
56 
57 typedef struct QSVFramesContext {
58  mfxSession session_download;
59  mfxSession session_upload;
60 
62  mfxFrameSurface1 *surfaces_internal;
64 
65  // used in the frame allocator for non-opaque surfaces
66  mfxMemId *mem_ids;
67  // used in the opaque alloc request for opaque surfaces
68  mfxFrameSurface1 **surface_ptrs;
69 
70  mfxExtOpaqueSurfaceAlloc opaque_alloc;
71  mfxExtBuffer *ext_buffers[1];
73 
74 static const struct {
75  mfxHandleType handle_type;
79 #if CONFIG_VAAPI
80  { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
81 #endif
82 #if CONFIG_DXVA2
83  { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
84 #endif
85  { 0 },
86 };
87 
88 static const struct {
90  uint32_t fourcc;
92  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
93  { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
94 };
95 
97 {
98  AVQSVDeviceContext *hwctx = ctx->hwctx;
99  QSVDeviceContext *s = ctx->internal->priv;
100 
101  mfxStatus err;
102  int i;
103 
104  for (i = 0; supported_handle_types[i].handle_type; i++) {
105  err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
106  &s->handle);
107  if (err == MFX_ERR_NONE) {
108  s->handle_type = supported_handle_types[i].handle_type;
109  s->child_device_type = supported_handle_types[i].device_type;
110  s->child_pix_fmt = supported_handle_types[i].pix_fmt;
111  break;
112  }
113  }
114  if (!s->handle) {
115  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
116  "from the session\n");
117  }
118 
119  err = MFXQueryIMPL(hwctx->session, &s->impl);
120  if (err == MFX_ERR_NONE)
121  err = MFXQueryVersion(hwctx->session, &s->ver);
122  if (err != MFX_ERR_NONE) {
123  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
124  return AVERROR_UNKNOWN;
125  }
126 
127  return 0;
128 }
129 
131 {
132  QSVFramesContext *s = ctx->internal->priv;
133 
134  if (s->session_download) {
135  MFXVideoVPP_Close(s->session_download);
136  MFXClose(s->session_download);
137  }
138  s->session_download = NULL;
139 
140  if (s->session_upload) {
141  MFXVideoVPP_Close(s->session_upload);
142  MFXClose(s->session_upload);
143  }
144  s->session_upload = NULL;
145 
146  av_freep(&s->mem_ids);
147  av_freep(&s->surface_ptrs);
150 }
151 
152 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
153 {
154 }
155 
156 static AVBufferRef *qsv_pool_alloc(void *opaque, int size)
157 {
159  QSVFramesContext *s = ctx->internal->priv;
160  AVQSVFramesContext *hwctx = ctx->hwctx;
161 
162  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
163  s->nb_surfaces_used++;
165  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
166  }
167 
168  return NULL;
169 }
170 
172 {
173  AVQSVFramesContext *hwctx = ctx->hwctx;
174  QSVFramesContext *s = ctx->internal->priv;
175  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
176 
177  AVBufferRef *child_device_ref = NULL;
178  AVBufferRef *child_frames_ref = NULL;
179 
181  AVHWFramesContext *child_frames_ctx;
182 
183  int i, ret = 0;
184 
185  if (!device_priv->handle) {
186  av_log(ctx, AV_LOG_ERROR,
187  "Cannot create a non-opaque internal surface pool without "
188  "a hardware handle\n");
189  return AVERROR(EINVAL);
190  }
191 
192  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
193  if (!child_device_ref)
194  return AVERROR(ENOMEM);
195  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
196 
197 #if CONFIG_VAAPI
198  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
199  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
200  child_device_hwctx->display = (VADisplay)device_priv->handle;
201  }
202 #endif
203 #if CONFIG_DXVA2
204  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
205  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
206  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
207  }
208 #endif
209 
210  ret = av_hwdevice_ctx_init(child_device_ref);
211  if (ret < 0) {
212  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
213  goto fail;
214  }
215 
216  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
217  if (!child_frames_ref) {
218  ret = AVERROR(ENOMEM);
219  goto fail;
220  }
221  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
222 
223  child_frames_ctx->format = device_priv->child_pix_fmt;
224  child_frames_ctx->sw_format = ctx->sw_format;
225  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
226  child_frames_ctx->width = ctx->width;
227  child_frames_ctx->height = ctx->height;
228 
229 #if CONFIG_DXVA2
230  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
231  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
232  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
233  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
234  else
235  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
236  }
237 #endif
238 
239  ret = av_hwframe_ctx_init(child_frames_ref);
240  if (ret < 0) {
241  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
242  goto fail;
243  }
244 
245 #if CONFIG_VAAPI
246  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
247  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
248  for (i = 0; i < ctx->initial_pool_size; i++)
249  s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
250  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
251  }
252 #endif
253 #if CONFIG_DXVA2
254  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
255  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
256  for (i = 0; i < ctx->initial_pool_size; i++)
257  s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
258  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
259  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
260  else
261  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
262  }
263 #endif
264 
265  s->child_frames_ref = child_frames_ref;
266  child_frames_ref = NULL;
267 
268 fail:
269  av_buffer_unref(&child_device_ref);
270  av_buffer_unref(&child_frames_ref);
271  return ret;
272 }
273 
275 {
276  QSVFramesContext *s = ctx->internal->priv;
277  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
278  const AVPixFmtDescriptor *desc;
279 
280  int i, ret = 0;
281 
282  desc = av_pix_fmt_desc_get(ctx->sw_format);
283  if (!desc)
284  return AVERROR_BUG;
285 
286  if (ctx->initial_pool_size <= 0) {
287  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
288  return AVERROR(EINVAL);
289  }
290 
292  sizeof(*s->surfaces_internal));
293  if (!s->surfaces_internal)
294  return AVERROR(ENOMEM);
295 
296  for (i = 0; i < ctx->initial_pool_size; i++) {
297  mfxFrameSurface1 *surf = &s->surfaces_internal[i];
298 
299  surf->Info.BitDepthLuma = desc->comp[0].depth;
300  surf->Info.BitDepthChroma = desc->comp[0].depth;
301  surf->Info.Shift = desc->comp[0].depth > 8;
302 
303  if (desc->log2_chroma_w && desc->log2_chroma_h)
304  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
305  else if (desc->log2_chroma_w)
306  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
307  else
308  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
309 
310  surf->Info.FourCC = fourcc;
311  surf->Info.Width = ctx->width;
312  surf->Info.CropW = ctx->width;
313  surf->Info.Height = ctx->height;
314  surf->Info.CropH = ctx->height;
315  surf->Info.FrameRateExtN = 25;
316  surf->Info.FrameRateExtD = 1;
317  }
318 
319  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
320  ret = qsv_init_child_ctx(ctx);
321  if (ret < 0)
322  return ret;
323  }
324 
325  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
326  ctx, qsv_pool_alloc, NULL);
327  if (!ctx->internal->pool_internal)
328  return AVERROR(ENOMEM);
329 
330  frames_hwctx->surfaces = s->surfaces_internal;
331  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
332 
333  return 0;
334 }
335 
336 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
337  mfxFrameAllocResponse *resp)
338 {
339  AVHWFramesContext *ctx = pthis;
340  QSVFramesContext *s = ctx->internal->priv;
341  AVQSVFramesContext *hwctx = ctx->hwctx;
342  mfxFrameInfo *i = &req->Info;
343  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
344 
345  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
346  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
347  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
348  return MFX_ERR_UNSUPPORTED;
349  if (i->Width != i1->Width || i->Height != i1->Height ||
350  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
351  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
352  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
353  i->Width, i->Height, i->FourCC, i->ChromaFormat,
354  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
355  return MFX_ERR_UNSUPPORTED;
356  }
357 
358  resp->mids = s->mem_ids;
359  resp->NumFrameActual = hwctx->nb_surfaces;
360 
361  return MFX_ERR_NONE;
362 }
363 
364 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
365 {
366  return MFX_ERR_NONE;
367 }
368 
369 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
370 {
371  return MFX_ERR_UNSUPPORTED;
372 }
373 
374 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
375 {
376  return MFX_ERR_UNSUPPORTED;
377 }
378 
379 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
380 {
381  *hdl = mid;
382  return MFX_ERR_NONE;
383 }
384 
386  mfxSession *session, int upload)
387 {
388  QSVFramesContext *s = ctx->internal->priv;
389  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
390  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
391  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
392 
393  mfxFrameAllocator frame_allocator = {
394  .pthis = ctx,
395  .Alloc = frame_alloc,
396  .Lock = frame_lock,
397  .Unlock = frame_unlock,
398  .GetHDL = frame_get_hdl,
399  .Free = frame_free,
400  };
401 
402  mfxVideoParam par;
403  mfxStatus err;
404 
405  err = MFXInit(device_priv->impl, &device_priv->ver, session);
406  if (err != MFX_ERR_NONE) {
407  av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
408  return AVERROR_UNKNOWN;
409  }
410 
411  if (device_priv->handle) {
412  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
413  device_priv->handle);
414  if (err != MFX_ERR_NONE)
415  return AVERROR_UNKNOWN;
416  }
417 
418  if (!opaque) {
419  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
420  if (err != MFX_ERR_NONE)
421  return AVERROR_UNKNOWN;
422  }
423 
424  memset(&par, 0, sizeof(par));
425 
426  if (opaque) {
427  par.ExtParam = s->ext_buffers;
428  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
429  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
430  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
431  } else {
432  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
433  MFX_IOPATTERN_IN_VIDEO_MEMORY;
434  }
435 
436  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
437  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
438  par.AsyncDepth = 1;
439 
440  par.vpp.In = frames_hwctx->surfaces[0].Info;
441 
442  /* Apparently VPP requires the frame rate to be set to some value, otherwise
443  * init will fail (probably for the framerate conversion filter). Since we
444  * are only doing data upload/download here, we just invent an arbitrary
445  * value */
446  par.vpp.In.FrameRateExtN = 25;
447  par.vpp.In.FrameRateExtD = 1;
448  par.vpp.Out = par.vpp.In;
449 
450  err = MFXVideoVPP_Init(*session, &par);
451  if (err != MFX_ERR_NONE) {
452  av_log(ctx, AV_LOG_ERROR, "Error opening the internal VPP session\n");
453  return AVERROR_UNKNOWN;
454  }
455 
456  return 0;
457 }
458 
460 {
461  QSVFramesContext *s = ctx->internal->priv;
462  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
463 
464  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
465 
466  uint32_t fourcc = 0;
467  int i, ret;
468 
469  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
470  if (supported_pixel_formats[i].pix_fmt == ctx->sw_format) {
471  fourcc = supported_pixel_formats[i].fourcc;
472  break;
473  }
474  }
475  if (!fourcc) {
476  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
477  return AVERROR(ENOSYS);
478  }
479 
480  if (!ctx->pool) {
481  ret = qsv_init_pool(ctx, fourcc);
482  if (ret < 0) {
483  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
484  return ret;
485  }
486  }
487 
488  if (opaque) {
489  s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
490  sizeof(*s->surface_ptrs));
491  if (!s->surface_ptrs)
492  return AVERROR(ENOMEM);
493 
494  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
495  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
496 
497  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
498  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
499  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
500 
501  s->opaque_alloc.Out = s->opaque_alloc.In;
502 
503  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
504  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
505 
506  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
507  } else {
508  s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
509  if (!s->mem_ids)
510  return AVERROR(ENOMEM);
511 
512  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
513  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
514  }
515 
516  ret = qsv_init_internal_session(ctx, &s->session_download, 0);
517  if (ret < 0)
518  return ret;
519 
520  ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
521  if (ret < 0)
522  return ret;
523 
524  return 0;
525 }
526 
528 {
529  frame->buf[0] = av_buffer_pool_get(ctx->pool);
530  if (!frame->buf[0])
531  return AVERROR(ENOMEM);
532 
533  frame->data[3] = frame->buf[0]->data;
534  frame->format = AV_PIX_FMT_QSV;
535  frame->width = ctx->width;
536  frame->height = ctx->height;
537 
538  return 0;
539 }
540 
543  enum AVPixelFormat **formats)
544 {
545  enum AVPixelFormat *fmts;
546 
547  fmts = av_malloc_array(2, sizeof(*fmts));
548  if (!fmts)
549  return AVERROR(ENOMEM);
550 
551  fmts[0] = ctx->sw_format;
552  fmts[1] = AV_PIX_FMT_NONE;
553 
554  *formats = fmts;
555 
556  return 0;
557 }
558 
560  const AVFrame *src)
561 {
562  QSVFramesContext *s = ctx->internal->priv;
563  mfxFrameSurface1 out = {{ 0 }};
564  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
565 
566  mfxSyncPoint sync = NULL;
567  mfxStatus err;
568 
569  out.Info = in->Info;
570  out.Data.PitchLow = dst->linesize[0];
571  out.Data.Y = dst->data[0];
572  out.Data.U = dst->data[1];
573  out.Data.V = dst->data[2];
574  out.Data.A = dst->data[3];
575 
576  do {
577  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
578  if (err == MFX_WRN_DEVICE_BUSY)
579  av_usleep(1);
580  } while (err == MFX_WRN_DEVICE_BUSY);
581 
582  if (err < 0 || !sync) {
583  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
584  return AVERROR_UNKNOWN;
585  }
586 
587  do {
588  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
589  } while (err == MFX_WRN_IN_EXECUTION);
590  if (err < 0) {
591  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
592  return AVERROR_UNKNOWN;
593  }
594 
595  return 0;
596 }
597 
599  const AVFrame *src)
600 {
601  QSVFramesContext *s = ctx->internal->priv;
602  mfxFrameSurface1 in = {{ 0 }};
603  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
604 
605  mfxSyncPoint sync = NULL;
606  mfxStatus err;
607 
608  in.Info = out->Info;
609  in.Data.PitchLow = src->linesize[0];
610  in.Data.Y = src->data[0];
611  in.Data.U = src->data[1];
612  in.Data.V = src->data[2];
613  in.Data.A = src->data[3];
614 
615  do {
616  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
617  if (err == MFX_WRN_DEVICE_BUSY)
618  av_usleep(1);
619  } while (err == MFX_WRN_DEVICE_BUSY);
620 
621  if (err < 0 || !sync) {
622  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
623  return AVERROR_UNKNOWN;
624  }
625 
626  do {
627  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
628  } while (err == MFX_WRN_IN_EXECUTION);
629  if (err < 0) {
630  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
631  return AVERROR_UNKNOWN;
632  }
633 
634  return 0;
635 }
636 
638  const void *hwconfig,
639  AVHWFramesConstraints *constraints)
640 {
641  int i;
642 
644  sizeof(*constraints->valid_sw_formats));
645  if (!constraints->valid_sw_formats)
646  return AVERROR(ENOMEM);
647 
648  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
649  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
650  constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
651 
652  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
653  if (!constraints->valid_hw_formats)
654  return AVERROR(ENOMEM);
655 
656  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
657  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
658 
659  return 0;
660 }
661 
663 {
664  AVQSVDeviceContext *hwctx = ctx->hwctx;
665  QSVDevicePriv *priv = ctx->user_opaque;
666 
667  if (hwctx->session)
668  MFXClose(hwctx->session);
669 
671  av_freep(&priv);
672 }
673 
674 static mfxIMPL choose_implementation(const char *device)
675 {
676  static const struct {
677  const char *name;
678  mfxIMPL impl;
679  } impl_map[] = {
680  { "auto", MFX_IMPL_AUTO },
681  { "sw", MFX_IMPL_SOFTWARE },
682  { "hw", MFX_IMPL_HARDWARE },
683  { "auto_any", MFX_IMPL_AUTO_ANY },
684  { "hw_any", MFX_IMPL_HARDWARE_ANY },
685  { "hw2", MFX_IMPL_HARDWARE2 },
686  { "hw3", MFX_IMPL_HARDWARE3 },
687  { "hw4", MFX_IMPL_HARDWARE4 },
688  };
689 
690  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
691  int i;
692 
693  if (device) {
694  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
695  if (!strcmp(device, impl_map[i].name)) {
696  impl = impl_map[i].impl;
697  break;
698  }
699  if (i == FF_ARRAY_ELEMS(impl_map))
700  impl = strtol(device, NULL, 0);
701  }
702 
703  return impl;
704 }
705 
706 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
707  AVDictionary *opts, int flags)
708 {
709  AVQSVDeviceContext *hwctx = ctx->hwctx;
710  QSVDevicePriv *priv;
711  enum AVHWDeviceType child_device_type;
713 
714  mfxVersion ver = { { 3, 1 } };
715  mfxIMPL impl;
716  mfxHDL handle;
717  mfxHandleType handle_type;
718  mfxStatus err;
719  int ret;
720 
721  priv = av_mallocz(sizeof(*priv));
722  if (!priv)
723  return AVERROR(ENOMEM);
724 
725  ctx->user_opaque = priv;
726  ctx->free = qsv_device_free;
727 
728  e = av_dict_get(opts, "child_device", NULL, 0);
729 
730  if (CONFIG_VAAPI)
731  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
732  else if (CONFIG_DXVA2)
733  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
734  else {
735  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
736  return AVERROR(ENOSYS);
737  }
738 
739  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
740  e ? e->value : NULL, NULL, 0);
741  if (ret < 0)
742  return ret;
743 
744  {
746 #if CONFIG_VAAPI
747  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
748  handle_type = MFX_HANDLE_VA_DISPLAY;
749  handle = (mfxHDL)child_device_hwctx->display;
750 #elif CONFIG_DXVA2
751  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
752  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
753  handle = (mfxHDL)child_device_hwctx->devmgr;
754 #endif
755  }
756 
757  impl = choose_implementation(device);
758 
759  err = MFXInit(impl, &ver, &hwctx->session);
760  if (err != MFX_ERR_NONE) {
761  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session\n");
762  return AVERROR_UNKNOWN;
763  }
764 
765  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
766  if (err != MFX_ERR_NONE)
767  return AVERROR_UNKNOWN;
768 
769  return 0;
770 }
771 
774  .name = "QSV",
775 
776  .device_hwctx_size = sizeof(AVQSVDeviceContext),
777  .device_priv_size = sizeof(QSVDeviceContext),
778  .frames_hwctx_size = sizeof(AVQSVFramesContext),
779  .frames_priv_size = sizeof(QSVFramesContext),
780 
781  .device_create = qsv_device_create,
782  .device_init = qsv_device_init,
783  .frames_get_constraints = qsv_frames_get_constraints,
784  .frames_init = qsv_frames_init,
785  .frames_uninit = qsv_frames_uninit,
786  .frames_get_buffer = qsv_get_buffer,
787  .transfer_get_formats = qsv_transfer_get_formats,
788  .transfer_data_to = qsv_transfer_data_to,
789  .transfer_data_from = qsv_transfer_data_from,
790 
791  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
792 };
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:54
uint32_t fourcc
Definition: hwcontext_qsv.c:90
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:106
int size
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:1768
VAAPI-specific data associated with a frame pool.
This structure describes decoded (raw) audio or video data.
Definition: frame.h:140
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:62
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:71
This struct is allocated as AVHWFramesContext.hwctx.
memory handling functions
mfxHandleType handle_type
Definition: hwcontext_qsv.c:75
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:308
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
const char * desc
Definition: nvenc.c:101
An API-specific header for AV_HWDEVICE_TYPE_DXVA2.
av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (%s)\, len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt), use_generic ? ac->func_descr_generic :ac->func_descr)
static int sync(AVFormatContext *s, uint8_t *header)
Read input until we find the next ident.
Definition: lxfdec.c:86
static enum AVSampleFormat formats[]
Definition: avresample.c:163
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:222
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:66
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:202
mfxHandleType handle_type
Definition: hwcontext_qsv.c:49
#define FF_ARRAY_ELEMS(a)
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:68
static const struct @163 supported_handle_types[]
mfxVersion ver
Definition: hwcontext_qsv.c:50
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:91
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
void av_freep(void *arg)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc() and set the pointer ...
Definition: mem.c:202
API-specific header for AV_HWDEVICE_TYPE_VAAPI.
#define AV_PIX_FMT_P010
Definition: pixfmt.h:288
AVBufferPool * pool_internal
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
enum AVHWDeviceType type
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:119
uint8_t
DWORD surface_type
The surface type (e.g.
#define CONFIG_DXVA2
Definition: config.h:391
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:53
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
const char data[16]
Definition: mxf.c:70
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:38
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:463
static int flags
Definition: log.c:50
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:140
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
static int qsv_frames_init(AVHWFramesContext *ctx)
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
#define src
Definition: vp8dsp.c:254
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
int width
width and height of the video frame
Definition: frame.h:179
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:124
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:100
#define AVERROR(e)
Definition: error.h:43
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:86
enum AVHWDeviceType device_type
Definition: hwcontext_qsv.c:76
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
IDirect3DDeviceManager9 * devmgr
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:263
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:72
#define fail()
Definition: checkasm.h:80
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
#define CONFIG_VAAPI
Definition: config.h:396
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:192
AVDictionary * opts
Definition: movenc.c:50
const char * name
Definition: qsvenc.c:44
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:142
AVFormatContext * ctx
Definition: movenc.c:48
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:68
mfxSession session_download
Definition: hwcontext_qsv.c:58
AVBufferPool * av_buffer_pool_init2(int size, void *opaque, AVBufferRef *(*alloc)(void *opaque, int size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:197
if(ac->has_optimized_func)
VADisplay display
The VADisplay handle, to be filled by the user.
mfxSession session
Definition: hwcontext_qsv.h:36
static void qsv_frames_uninit(AVHWFramesContext *ctx)
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:191
NULL
Definition: eval.c:55
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given pixel format.
Definition: hwcontext.c:74
static const struct @164 supported_pixel_formats[]
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
const HWContextType ff_hwcontext_type_qsv
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:374
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:158
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:80
uint8_t * data
The data buffer.
Definition: buffer.h:89
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:155
#define AVERROR_BUG
Bug detected, please report the issue.
Definition: error.h:60
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:132
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:117
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:215
static mfxIMPL choose_implementation(const char *device)
refcounted data buffer API
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:379
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:70
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer. ...
Definition: pixfmt.h:137
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:127
static AVBufferRef * qsv_pool_alloc(void *opaque, int size)
static void * av_malloc_array(size_t nmemb, size_t size)
Definition: mem.h:92
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:146
void * user_opaque
Arbitrary user data, to be used e.g.
Definition: hwcontext.h:102
A reference to a data buffer.
Definition: buffer.h:81
static void qsv_device_free(AVHWDeviceContext *ctx)
common internal and external API header
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:177
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:96
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:61
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:54
IDirect3DSurface9 ** surfaces
The surface pool.
AVHWFrameTransferDirection
Definition: hwcontext.h:336
pixel format definitions
AVBufferPool * pool
A pool from which the frames are allocated by av_hwframe_get_buffer().
Definition: hwcontext.h:183
AVHWDeviceType
Definition: hwcontext.h:27
This struct is allocated as AVHWDeviceContext.hwctx.
char * value
Definition: dict.h:74
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:386
VAAPI connection details.
static void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.h:205
void(* free)(struct AVHWDeviceContext *ctx)
This field may be set by the caller before calling av_hwdevice_ctx_init().
Definition: hwcontext.h:97
int height
Definition: frame.h:179
FILE * out
Definition: movenc.c:54
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
An API-specific header for AV_HWDEVICE_TYPE_QSV.
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:44
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:61
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:310
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:64
int depth
Number of bits in the component.
Definition: pixdesc.h:57
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:215
AVPixelFormat
Pixel format.
Definition: pixfmt.h:57
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:77
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:211
mfxSession session_upload
Definition: hwcontext_qsv.c:59