[libav-devel] [PATCH 2/5] hwframe: add a VDPAU implementation

Rémi Denis-Courmont remi at remlab.net
Mon Dec 21 18:07:24 CET 2015


Le 2015-12-20 21:59, Anton Khirnov a écrit :
> diff --git a/libavutil/hwframe_vdpau.c b/libavutil/hwframe_vdpau.c
> new file mode 100644
> index 0000000..3cbea4e3
> --- /dev/null
> +++ b/libavutil/hwframe_vdpau.c
> @@ -0,0 +1,264 @@
> +/*
> + * This file is part of Libav.
> + *
> + * Libav is free software; you can redistribute it and/or
> + * modify it under the terms of the GNU Lesser General Public
> + * License as published by the Free Software Foundation; either
> + * version 2.1 of the License, or (at your option) any later 
> version.
> + *
> + * Libav is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
> + * Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public
> + * License along with Libav; if not, write to the Free Software
> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
> 02110-1301 USA
> + */
> +
> +#include <stdint.h>
> +#include <string.h>
> +
> +#include <vdpau/vdpau.h>
> +
> +#include "common.h"
> +#include "hwframe.h"
> +#include "hwframe_internal.h"
> +#include "hwframe_vdpau.h"
> +#include "mem.h"
> +#include "pixfmt.h"
> +#include "pixdesc.h"
> +
> +typedef struct VDPAUFramesContext {
> +    VdpVideoSurfaceGetParameters                    
> *get_surf_parameters;
> +    VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities 
> *get_transfer_caps;
> +    VdpVideoSurfaceGetBitsYCbCr                     *get_data;
> +
> +    enum AVPixelFormat *pix_fmts[3];
> +    int              nb_pix_fmts[3];
> +} VDPAUFramesContext;
> +
> +typedef struct VDPAUPixFmtMap {
> +    VdpYCbCrFormat vdpau_fmt;
> +    enum AVPixelFormat pix_fmt;
> +} VDPAUPixFmtMap;
> +
> +static const VDPAUPixFmtMap pix_fmts_420[] = {
> +    { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12    },
> +    { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
> +    { 0,                     AV_PIX_FMT_NONE,   },
> +};
> +
> +static const VDPAUPixFmtMap pix_fmts_422[] = {
> +    { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
> +    { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
> +    { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
> +    { 0,                     AV_PIX_FMT_NONE,   },

NV12 really meaning NV16 is possible.

> +};
> +
> +static const VDPAUPixFmtMap pix_fmts_444[] = {
> +    { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV444P },
> +    { 0,                     AV_PIX_FMT_NONE,   },
> +};

Similarly, NV12 => NV24.

> +
> +static const struct {
> +    VdpChromaType chroma_type;
> +    const VDPAUPixFmtMap *map;
> +} vdpau_pix_fmts[] = {
> +    { VDP_CHROMA_TYPE_420, pix_fmts_420 },
> +    { VDP_CHROMA_TYPE_422, pix_fmts_422 },
> +    { VDP_CHROMA_TYPE_444, pix_fmts_444 },
> +};
> +
> +static int count_pixfmts(const VDPAUPixFmtMap *map)
> +{
> +    int count = 0;
> +    while (map->pix_fmt != AV_PIX_FMT_NONE) {
> +        map++;
> +        count++;
> +    }
> +    return count;
> +}
> +
> +static int vdpau_init_pixmfts(AVHWFramesContext *ctx)
> +{
> +    AVVDPAUFramesContext *hwctx = ctx->hwctx;
> +    VDPAUFramesContext    *priv = ctx->internal->priv;
> +    int i;
> +
> +    for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
> +        const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
> +        int nb_pix_fmts;
> +
> +        nb_pix_fmts = count_pixfmts(map);
> +        priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1,
> sizeof(*priv->pix_fmts[i]));
> +        if (!priv->pix_fmts[i])
> +            return AVERROR(ENOMEM);
> +
> +        nb_pix_fmts = 0;
> +        while (map->pix_fmt != AV_PIX_FMT_NONE) {
> +            VdpBool supported;
> +            VdpStatus err = priv->get_transfer_caps(hwctx->device,
> vdpau_pix_fmts[i].chroma_type,
> +                                                    map->vdpau_fmt,
> &supported);
> +            if (err != VDP_STATUS_OK)
> +                return AVERROR_UNKNOWN;

I'd just skip the format instead of failing.

> +            if (supported)
> +                priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
> +            map++;
> +        }
> +        priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
> +        priv->nb_pix_fmts[i]             = nb_pix_fmts;
> +    }
> +
> +    return 0;
> +}
> +
> +static int vdpau_init(AVHWFramesContext *ctx)
> +{
> +    AVVDPAUFramesContext *hwctx = ctx->hwctx;
> +    VDPAUFramesContext   *priv  = ctx->internal->priv;
> +    VdpStatus             err;
> +    int                   ret;
> +
> +#define GET_CALLBACK(id, result)
>           \
> +do {
>           \
> +    void *tmp;
>           \
> +    err = hwctx->get_proc_address(hwctx->device, id, &tmp);
>           \
> +    if (err != VDP_STATUS_OK) {
>           \
> +        av_log(ctx, AV_LOG_ERROR, "Error getting the " #id "
> callback.\n");     \
> +        return AVERROR_UNKNOWN;
>           \
> +    }
>           \
> +    priv->result = tmp;
>           \
> +} while (0)
> +
> +    GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_PARAMETERS,
> get_surf_parameters);
> +
> 
> GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
> +                 get_transfer_caps);
> +    GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, 
> get_data);
> +
> +    ret = vdpau_init_pixmfts(ctx);
> +    if (ret < 0) {
> +        av_log(ctx, AV_LOG_ERROR, "Error querying the supported
> pixel formats\n");
> +        return ret;
> +    }
> +
> +    return 0;
> +}
> +
> +static void vdpau_uninit(AVHWFramesContext *ctx)
> +{
> +    VDPAUFramesContext *priv = ctx->internal->priv;
> +    int i;
> +
> +    for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
> +        av_freep(&priv->pix_fmts[i]);
> +}
> +
> +static int vdpau_get_target_formats(AVHWFramesContext *ctx, const
> AVFrame *frame,
> +                                    enum AVPixelFormat **formats)
> +{
> +    VDPAUFramesContext   *priv  = ctx->internal->priv;
> +
> +    VdpChromaType chroma_type;
> +    VdpStatus     err;
> +    uint32_t      width, height;
> +
> +    enum AVPixelFormat *fmts;
> +    int i, idx = -1;
> +
> +    err =
> priv->get_surf_parameters((VdpVideoSurface)(uintptr_t)frame->data[3],
> &chroma_type,
> +                                    &width, &height);
> +    if (err != VDP_STATUS_OK) {
> +        av_log(ctx, AV_LOG_ERROR, "Error querying the surface
> parameters\n");
> +        return AVERROR_UNKNOWN;
> +    }
> +
> +    for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
> +        if (vdpau_pix_fmts[i].chroma_type == chroma_type) {
> +            idx = i;
> +            break;
> +        }
> +    }
> +    if (idx < 0) {
> +        av_log(ctx, AV_LOG_ERROR, "Unsupported chroma type: %d\n",
> chroma_type);
> +        return AVERROR(ENOSYS);
> +    }
> +    if (priv->nb_pix_fmts[idx] == 1) {
> +        av_log(ctx, AV_LOG_ERROR,
> +               "No target formats are supported for chroma type
> %d\n", chroma_type);
> +        return AVERROR(ENOSYS);
> +    }
> +
> +    fmts = av_malloc_array(priv->nb_pix_fmts[idx], sizeof(*fmts));
> +    if (!fmts)
> +        return AVERROR(ENOMEM);
> +
> +    memcpy(fmts, priv->pix_fmts[idx], sizeof(*fmts) *
> (priv->nb_pix_fmts[idx]));
> +    *formats = fmts;
> +
> +    return 0;
> +}
> +
> +static int vdpau_retrieve_data(AVHWFramesContext *ctx, AVFrame *dst,
> +                               const AVFrame *src)
> +{
> +    VDPAUFramesContext   *priv  = ctx->internal->priv;
> +    VdpVideoSurface       surf  = 
> (VdpVideoSurface)(uintptr_t)src->data[3];
> +
> +    const VDPAUPixFmtMap *map;
> +    VdpChromaType chroma_type;
> +    VdpYCbCrFormat vdpau_format;
> +    VdpStatus err;
> +    uint32_t width, height;
> +    int i, idx;
> +
> +    err = priv->get_surf_parameters(surf, &chroma_type, &width, 
> &height);
> +    if (err != VDP_STATUS_OK) {
> +        av_log(ctx, AV_LOG_ERROR, "Error querying the surface
> parameters\n");
> +        return AVERROR_UNKNOWN;
> +    }
> +    for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
> +        if (vdpau_pix_fmts[i].chroma_type == chroma_type) {
> +            idx = i;
> +            break;
> +        }
> +    }
> +    if (idx < 0) {
> +        av_log(ctx, AV_LOG_ERROR, "Unsupported chroma type: %d\n",
> chroma_type);
> +        return AVERROR(ENOSYS);
> +    }
> +    map = vdpau_pix_fmts[idx].map;
> +
> +    for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
> +        if (map[i].pix_fmt == dst->format) {
> +            vdpau_format = map[i].vdpau_fmt;
> +            break;
> +        }
> +    }
> +    if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
> +        av_log(ctx, AV_LOG_ERROR,
> +               "Unsupported target pixel format: %s\n",
> +               av_get_pix_fmt_name(dst->format));
> +        return AVERROR(EINVAL);
> +    }
> +
> +    err = priv->get_data(surf, vdpau_format, (void * const 
> *)dst->data,

Suspicious cast.

> +                         dst->linesize);

I am not sure but that might be a bit naive w.r.t. potential alignment 
requirements.

> +    if (err != VDP_STATUS_OK) {
> +        av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a
> VDPAU surface\n");
> +        return AVERROR_UNKNOWN;
> +    }
> +
> +    return 0;
> +}
> +
> +const HWFrameType ff_hwframe_type_vdpau = {
> +    .format             = AV_PIX_FMT_VDPAU,
> +    .name               = "VDPAU",
> +    .hwctx_size         = sizeof(AVVDPAUFramesContext),
> +    .priv_size          = sizeof(VDPAUFramesContext),
> +    .init               = vdpau_init,
> +    .uninit             = vdpau_uninit,
> +    .get_target_formats = vdpau_get_target_formats,
> +    .retrieve_data      = vdpau_retrieve_data,
> +};

-- 
Rémi Denis-Courmont
http://www.remlab.net/


More information about the libav-devel mailing list