early-access version 2864
This commit is contained in:
1
externals/ffmpeg/ffmpeg/libavdevice/Makefile
vendored
1
externals/ffmpeg/ffmpeg/libavdevice/Makefile
vendored
@@ -15,7 +15,6 @@ OBJS-$(CONFIG_SHARED) += reverse.o
|
||||
OBJS-$(CONFIG_ALSA_INDEV) += alsa_dec.o alsa.o timefilter.o
|
||||
OBJS-$(CONFIG_ALSA_OUTDEV) += alsa_enc.o alsa.o
|
||||
OBJS-$(CONFIG_ANDROID_CAMERA_INDEV) += android_camera.o
|
||||
OBJS-$(CONFIG_AUDIOTOOLBOX_OUTDEV) += audiotoolbox.o
|
||||
OBJS-$(CONFIG_AVFOUNDATION_INDEV) += avfoundation.o
|
||||
OBJS-$(CONFIG_BKTR_INDEV) += bktr.o
|
||||
OBJS-$(CONFIG_CACA_OUTDEV) += caca.o
|
||||
|
73
externals/ffmpeg/ffmpeg/libavdevice/alldevices.c
vendored
73
externals/ffmpeg/ffmpeg/libavdevice/alldevices.c
vendored
@@ -27,7 +27,6 @@
|
||||
extern AVInputFormat ff_alsa_demuxer;
|
||||
extern AVOutputFormat ff_alsa_muxer;
|
||||
extern AVInputFormat ff_android_camera_demuxer;
|
||||
extern AVOutputFormat ff_audiotoolbox_muxer;
|
||||
extern AVInputFormat ff_avfoundation_demuxer;
|
||||
extern AVInputFormat ff_bktr_demuxer;
|
||||
extern AVOutputFormat ff_caca_muxer;
|
||||
@@ -67,75 +66,3 @@ void avdevice_register_all(void)
|
||||
{
|
||||
avpriv_register_devices(outdev_list, indev_list);
|
||||
}
|
||||
|
||||
static void *next_input(const AVInputFormat *prev, AVClassCategory c2)
|
||||
{
|
||||
const AVClass *pc;
|
||||
const AVClassCategory c1 = AV_CLASS_CATEGORY_DEVICE_INPUT;
|
||||
AVClassCategory category = AV_CLASS_CATEGORY_NA;
|
||||
const AVInputFormat *fmt = NULL;
|
||||
int i = 0;
|
||||
|
||||
while (prev && (fmt = indev_list[i])) {
|
||||
i++;
|
||||
if (prev == fmt)
|
||||
break;
|
||||
}
|
||||
|
||||
do {
|
||||
fmt = indev_list[i++];
|
||||
if (!fmt)
|
||||
break;
|
||||
pc = fmt->priv_class;
|
||||
if (!pc)
|
||||
continue;
|
||||
category = pc->category;
|
||||
} while (category != c1 && category != c2);
|
||||
return (AVInputFormat *)fmt;
|
||||
}
|
||||
|
||||
static void *next_output(const AVOutputFormat *prev, AVClassCategory c2)
|
||||
{
|
||||
const AVClass *pc;
|
||||
const AVClassCategory c1 = AV_CLASS_CATEGORY_DEVICE_OUTPUT;
|
||||
AVClassCategory category = AV_CLASS_CATEGORY_NA;
|
||||
const AVOutputFormat *fmt = NULL;
|
||||
int i = 0;
|
||||
|
||||
while (prev && (fmt = outdev_list[i])) {
|
||||
i++;
|
||||
if (prev == fmt)
|
||||
break;
|
||||
}
|
||||
|
||||
do {
|
||||
fmt = outdev_list[i++];
|
||||
if (!fmt)
|
||||
break;
|
||||
pc = fmt->priv_class;
|
||||
if (!pc)
|
||||
continue;
|
||||
category = pc->category;
|
||||
} while (category != c1 && category != c2);
|
||||
return (AVOutputFormat *)fmt;
|
||||
}
|
||||
|
||||
AVInputFormat *av_input_audio_device_next(AVInputFormat *d)
|
||||
{
|
||||
return next_input(d, AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT);
|
||||
}
|
||||
|
||||
AVInputFormat *av_input_video_device_next(AVInputFormat *d)
|
||||
{
|
||||
return next_input(d, AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT);
|
||||
}
|
||||
|
||||
AVOutputFormat *av_output_audio_device_next(AVOutputFormat *d)
|
||||
{
|
||||
return next_output(d, AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT);
|
||||
}
|
||||
|
||||
AVOutputFormat *av_output_video_device_next(AVOutputFormat *d)
|
||||
{
|
||||
return next_output(d, AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT);
|
||||
}
|
||||
|
5
externals/ffmpeg/ffmpeg/libavdevice/alsa.c
vendored
5
externals/ffmpeg/ffmpeg/libavdevice/alsa.c
vendored
@@ -286,10 +286,6 @@ av_cold int ff_alsa_open(AVFormatContext *ctx, snd_pcm_stream_t mode,
|
||||
}
|
||||
}
|
||||
|
||||
s->pkt = av_packet_alloc();
|
||||
if (!s->pkt)
|
||||
goto fail1;
|
||||
|
||||
s->h = h;
|
||||
return 0;
|
||||
|
||||
@@ -312,7 +308,6 @@ av_cold int ff_alsa_close(AVFormatContext *s1)
|
||||
if (CONFIG_ALSA_INDEV)
|
||||
ff_timefilter_destroy(s->timefilter);
|
||||
snd_pcm_close(s->h);
|
||||
av_packet_free(&s->pkt);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
1
externals/ffmpeg/ffmpeg/libavdevice/alsa.h
vendored
1
externals/ffmpeg/ffmpeg/libavdevice/alsa.h
vendored
@@ -58,7 +58,6 @@ typedef struct AlsaData {
|
||||
void *reorder_buf;
|
||||
int reorder_buf_size; ///< in frames
|
||||
int64_t timestamp; ///< current timestamp, without latency applied.
|
||||
AVPacket *pkt;
|
||||
} AlsaData;
|
||||
|
||||
/**
|
||||
|
22
externals/ffmpeg/ffmpeg/libavdevice/alsa_dec.c
vendored
22
externals/ffmpeg/ffmpeg/libavdevice/alsa_dec.c
vendored
@@ -105,36 +105,34 @@ static int audio_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
||||
int64_t dts;
|
||||
snd_pcm_sframes_t delay = 0;
|
||||
|
||||
if (!s->pkt->data) {
|
||||
int ret = av_new_packet(s->pkt, s->period_size * s->frame_size);
|
||||
if (ret < 0)
|
||||
return ret;
|
||||
s->pkt->size = 0;
|
||||
if (av_new_packet(pkt, s->period_size * s->frame_size) < 0) {
|
||||
return AVERROR(EIO);
|
||||
}
|
||||
|
||||
do {
|
||||
while ((res = snd_pcm_readi(s->h, s->pkt->data + s->pkt->size, s->period_size - s->pkt->size / s->frame_size)) < 0) {
|
||||
while ((res = snd_pcm_readi(s->h, pkt->data, s->period_size)) < 0) {
|
||||
if (res == -EAGAIN) {
|
||||
av_packet_unref(pkt);
|
||||
|
||||
return AVERROR(EAGAIN);
|
||||
}
|
||||
s->pkt->size = 0;
|
||||
if (ff_alsa_xrun_recover(s1, res) < 0) {
|
||||
av_log(s1, AV_LOG_ERROR, "ALSA read error: %s\n",
|
||||
snd_strerror(res));
|
||||
av_packet_unref(pkt);
|
||||
|
||||
return AVERROR(EIO);
|
||||
}
|
||||
ff_timefilter_reset(s->timefilter);
|
||||
}
|
||||
s->pkt->size += res * s->frame_size;
|
||||
} while (s->pkt->size < s->period_size * s->frame_size);
|
||||
}
|
||||
|
||||
av_packet_move_ref(pkt, s->pkt);
|
||||
dts = av_gettime();
|
||||
snd_pcm_delay(s->h, &delay);
|
||||
dts -= av_rescale(delay + res, 1000000, s->sample_rate);
|
||||
pkt->pts = ff_timefilter_update(s->timefilter, dts, s->last_period);
|
||||
s->last_period = res;
|
||||
|
||||
pkt->size = res * s->frame_size;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
117
externals/ffmpeg/ffmpeg/libavdevice/avdevice.c
vendored
117
externals/ffmpeg/ffmpeg/libavdevice/avdevice.c
vendored
@@ -27,11 +27,39 @@
|
||||
#include "libavutil/ffversion.h"
|
||||
const char av_device_ffversion[] = "FFmpeg version " FFMPEG_VERSION;
|
||||
|
||||
#if FF_API_DEVICE_CAPABILITIES
|
||||
#define E AV_OPT_FLAG_ENCODING_PARAM
|
||||
#define D AV_OPT_FLAG_DECODING_PARAM
|
||||
#define A AV_OPT_FLAG_AUDIO_PARAM
|
||||
#define V AV_OPT_FLAG_VIDEO_PARAM
|
||||
#define OFFSET(x) offsetof(AVDeviceCapabilitiesQuery, x)
|
||||
|
||||
const AVOption av_device_capabilities[] = {
|
||||
{ "codec", "codec", OFFSET(codec), AV_OPT_TYPE_INT,
|
||||
{.i64 = AV_CODEC_ID_NONE}, AV_CODEC_ID_NONE, INT_MAX, E|D|A|V },
|
||||
{ "sample_format", "sample format", OFFSET(sample_format), AV_OPT_TYPE_SAMPLE_FMT,
|
||||
{.i64 = AV_SAMPLE_FMT_NONE}, AV_SAMPLE_FMT_NONE, INT_MAX, E|D|A },
|
||||
{ "sample_rate", "sample rate", OFFSET(sample_rate), AV_OPT_TYPE_INT,
|
||||
{.i64 = -1}, -1, INT_MAX, E|D|A },
|
||||
{ "channels", "channels", OFFSET(channels), AV_OPT_TYPE_INT,
|
||||
{.i64 = -1}, -1, INT_MAX, E|D|A },
|
||||
{ "channel_layout", "channel layout", OFFSET(channel_layout), AV_OPT_TYPE_CHANNEL_LAYOUT,
|
||||
{.i64 = -1}, -1, INT_MAX, E|D|A },
|
||||
{ "pixel_format", "pixel format", OFFSET(pixel_format), AV_OPT_TYPE_PIXEL_FMT,
|
||||
{.i64 = AV_PIX_FMT_NONE}, AV_PIX_FMT_NONE, INT_MAX, E|D|V },
|
||||
{ "window_size", "window size", OFFSET(window_width), AV_OPT_TYPE_IMAGE_SIZE,
|
||||
{.str = NULL}, -1, INT_MAX, E|D|V },
|
||||
{ "frame_size", "frame size", OFFSET(frame_width), AV_OPT_TYPE_IMAGE_SIZE,
|
||||
{.str = NULL}, -1, INT_MAX, E|D|V },
|
||||
{ "fps", "fps", OFFSET(fps), AV_OPT_TYPE_RATIONAL,
|
||||
{.dbl = -1}, -1, INT_MAX, E|D|V },
|
||||
{ NULL }
|
||||
};
|
||||
#endif
|
||||
|
||||
#undef E
|
||||
#undef D
|
||||
#undef A
|
||||
#undef V
|
||||
#undef OFFSET
|
||||
|
||||
unsigned avdevice_version(void)
|
||||
{
|
||||
@@ -50,6 +78,52 @@ const char * avdevice_license(void)
|
||||
return &LICENSE_PREFIX FFMPEG_LICENSE[sizeof(LICENSE_PREFIX) - 1];
|
||||
}
|
||||
|
||||
static void *device_next(void *prev, int output,
|
||||
AVClassCategory c1, AVClassCategory c2)
|
||||
{
|
||||
const AVClass *pc;
|
||||
AVClassCategory category = AV_CLASS_CATEGORY_NA;
|
||||
do {
|
||||
if (output) {
|
||||
if (!(prev = av_oformat_next(prev)))
|
||||
break;
|
||||
pc = ((AVOutputFormat *)prev)->priv_class;
|
||||
} else {
|
||||
if (!(prev = av_iformat_next(prev)))
|
||||
break;
|
||||
pc = ((AVInputFormat *)prev)->priv_class;
|
||||
}
|
||||
if (!pc)
|
||||
continue;
|
||||
category = pc->category;
|
||||
} while (category != c1 && category != c2);
|
||||
return prev;
|
||||
}
|
||||
|
||||
AVInputFormat *av_input_audio_device_next(AVInputFormat *d)
|
||||
{
|
||||
return device_next(d, 0, AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT,
|
||||
AV_CLASS_CATEGORY_DEVICE_INPUT);
|
||||
}
|
||||
|
||||
AVInputFormat *av_input_video_device_next(AVInputFormat *d)
|
||||
{
|
||||
return device_next(d, 0, AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
|
||||
AV_CLASS_CATEGORY_DEVICE_INPUT);
|
||||
}
|
||||
|
||||
AVOutputFormat *av_output_audio_device_next(AVOutputFormat *d)
|
||||
{
|
||||
return device_next(d, 1, AV_CLASS_CATEGORY_DEVICE_AUDIO_OUTPUT,
|
||||
AV_CLASS_CATEGORY_DEVICE_OUTPUT);
|
||||
}
|
||||
|
||||
AVOutputFormat *av_output_video_device_next(AVOutputFormat *d)
|
||||
{
|
||||
return device_next(d, 1, AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT,
|
||||
AV_CLASS_CATEGORY_DEVICE_OUTPUT);
|
||||
}
|
||||
|
||||
int avdevice_app_to_dev_control_message(struct AVFormatContext *s, enum AVAppToDevMessageType type,
|
||||
void *data, size_t data_size)
|
||||
{
|
||||
@@ -66,18 +140,49 @@ int avdevice_dev_to_app_control_message(struct AVFormatContext *s, enum AVDevToA
|
||||
return s->control_message_cb(s, type, data, data_size);
|
||||
}
|
||||
|
||||
#if FF_API_DEVICE_CAPABILITIES
|
||||
int avdevice_capabilities_create(AVDeviceCapabilitiesQuery **caps, AVFormatContext *s,
|
||||
AVDictionary **device_options)
|
||||
{
|
||||
return AVERROR(ENOSYS);
|
||||
int ret;
|
||||
av_assert0(s && caps);
|
||||
av_assert0(s->iformat || s->oformat);
|
||||
if ((s->oformat && !s->oformat->create_device_capabilities) ||
|
||||
(s->iformat && !s->iformat->create_device_capabilities))
|
||||
return AVERROR(ENOSYS);
|
||||
*caps = av_mallocz(sizeof(**caps));
|
||||
if (!(*caps))
|
||||
return AVERROR(ENOMEM);
|
||||
(*caps)->device_context = s;
|
||||
if (((ret = av_opt_set_dict(s->priv_data, device_options)) < 0))
|
||||
goto fail;
|
||||
if (s->iformat) {
|
||||
if ((ret = s->iformat->create_device_capabilities(s, *caps)) < 0)
|
||||
goto fail;
|
||||
} else {
|
||||
if ((ret = s->oformat->create_device_capabilities(s, *caps)) < 0)
|
||||
goto fail;
|
||||
}
|
||||
av_opt_set_defaults(*caps);
|
||||
return 0;
|
||||
fail:
|
||||
av_freep(caps);
|
||||
return ret;
|
||||
}
|
||||
|
||||
void avdevice_capabilities_free(AVDeviceCapabilitiesQuery **caps, AVFormatContext *s)
|
||||
{
|
||||
return;
|
||||
if (!s || !caps || !(*caps))
|
||||
return;
|
||||
av_assert0(s->iformat || s->oformat);
|
||||
if (s->iformat) {
|
||||
if (s->iformat->free_device_capabilities)
|
||||
s->iformat->free_device_capabilities(s, *caps);
|
||||
} else {
|
||||
if (s->oformat->free_device_capabilities)
|
||||
s->oformat->free_device_capabilities(s, *caps);
|
||||
}
|
||||
av_freep(caps);
|
||||
}
|
||||
#endif
|
||||
|
||||
int avdevice_list_devices(AVFormatContext *s, AVDeviceInfoList **device_list)
|
||||
{
|
||||
|
@@ -321,7 +321,6 @@ int avdevice_dev_to_app_control_message(struct AVFormatContext *s,
|
||||
enum AVDevToAppMessageType type,
|
||||
void *data, size_t data_size);
|
||||
|
||||
#if FF_API_DEVICE_CAPABILITIES
|
||||
/**
|
||||
* Following API allows user to probe device capabilities (supported codecs,
|
||||
* pixel formats, sample formats, resolutions, channel counts, etc).
|
||||
@@ -417,7 +416,6 @@ typedef struct AVDeviceCapabilitiesQuery {
|
||||
/**
|
||||
* AVOption table used by devices to implement device capabilities API. Should not be used by a user.
|
||||
*/
|
||||
attribute_deprecated
|
||||
extern const AVOption av_device_capabilities[];
|
||||
|
||||
/**
|
||||
@@ -437,7 +435,6 @@ extern const AVOption av_device_capabilities[];
|
||||
*
|
||||
* @return >= 0 on success, negative otherwise.
|
||||
*/
|
||||
attribute_deprecated
|
||||
int avdevice_capabilities_create(AVDeviceCapabilitiesQuery **caps, AVFormatContext *s,
|
||||
AVDictionary **device_options);
|
||||
|
||||
@@ -447,9 +444,7 @@ int avdevice_capabilities_create(AVDeviceCapabilitiesQuery **caps, AVFormatConte
|
||||
* @param caps Device capabilities data to be freed.
|
||||
* @param s Context of the device.
|
||||
*/
|
||||
attribute_deprecated
|
||||
void avdevice_capabilities_free(AVDeviceCapabilitiesQuery **caps, AVFormatContext *s);
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Structure describes basic parameters of the device.
|
||||
|
4
externals/ffmpeg/ffmpeg/libavdevice/bktr.c
vendored
4
externals/ffmpeg/ffmpeg/libavdevice/bktr.c
vendored
@@ -225,14 +225,14 @@ static void bktr_getframe(uint64_t per_frame)
|
||||
{
|
||||
uint64_t curtime;
|
||||
|
||||
curtime = av_gettime_relative();
|
||||
curtime = av_gettime();
|
||||
if (!last_frame_time
|
||||
|| ((last_frame_time + per_frame) > curtime)) {
|
||||
if (!usleep(last_frame_time + per_frame + per_frame / 8 - curtime)) {
|
||||
if (!nsignals)
|
||||
av_log(NULL, AV_LOG_INFO,
|
||||
"SLEPT NO signals - %d microseconds late\n",
|
||||
(int)(av_gettime_relative() - last_frame_time - per_frame));
|
||||
(int)(av_gettime() - last_frame_time - per_frame));
|
||||
}
|
||||
}
|
||||
nsignals = 0;
|
||||
|
2
externals/ffmpeg/ffmpeg/libavdevice/caca.c
vendored
2
externals/ffmpeg/ffmpeg/libavdevice/caca.c
vendored
@@ -180,7 +180,7 @@ static int caca_write_header(AVFormatContext *s)
|
||||
if (!c->window_title)
|
||||
c->window_title = av_strdup(s->url);
|
||||
caca_set_display_title(c->display, c->window_title);
|
||||
caca_set_display_time(c->display, av_rescale_q(1, st->time_base, AV_TIME_BASE_Q));
|
||||
caca_set_display_time(c->display, av_rescale_q(1, st->codec->time_base, AV_TIME_BASE_Q));
|
||||
|
||||
return 0;
|
||||
|
||||
|
@@ -70,30 +70,9 @@ static IDeckLinkIterator *decklink_create_iterator(AVFormatContext *avctx)
|
||||
#else
|
||||
iter = CreateDeckLinkIteratorInstance();
|
||||
#endif
|
||||
if (!iter) {
|
||||
if (!iter)
|
||||
av_log(avctx, AV_LOG_ERROR, "Could not create DeckLink iterator. "
|
||||
"Make sure you have DeckLink drivers " BLACKMAGIC_DECKLINK_API_VERSION_STRING " or newer installed.\n");
|
||||
} else {
|
||||
IDeckLinkAPIInformation *api;
|
||||
int64_t version;
|
||||
#ifdef _WIN32
|
||||
if (CoCreateInstance(CLSID_CDeckLinkAPIInformation, NULL, CLSCTX_ALL,
|
||||
IID_IDeckLinkAPIInformation, (void**) &api) != S_OK) {
|
||||
api = NULL;
|
||||
}
|
||||
#else
|
||||
api = CreateDeckLinkAPIInformationInstance();
|
||||
#endif
|
||||
if (api && api->GetInt(BMDDeckLinkAPIVersion, &version) == S_OK) {
|
||||
if (version < BLACKMAGIC_DECKLINK_API_VERSION)
|
||||
av_log(avctx, AV_LOG_WARNING, "Installed DeckLink drivers are too old and may be incompatible with the SDK this module was built against. "
|
||||
"Make sure you have DeckLink drivers " BLACKMAGIC_DECKLINK_API_VERSION_STRING " or newer installed.\n");
|
||||
} else {
|
||||
av_log(avctx, AV_LOG_ERROR, "Failed to check installed DeckLink API version.\n");
|
||||
}
|
||||
if (api)
|
||||
api->Release();
|
||||
}
|
||||
|
||||
return iter;
|
||||
}
|
||||
@@ -293,7 +272,7 @@ int ff_decklink_set_format(AVFormatContext *avctx,
|
||||
#if BLACKMAGIC_DECKLINK_API_VERSION >= 0x0b050000
|
||||
if (direction == DIRECTION_IN) {
|
||||
BMDDisplayMode actualMode = ctx->bmd_mode;
|
||||
if (ctx->dli->DoesSupportVideoMode(ctx->video_input, ctx->bmd_mode, ctx->raw_format,
|
||||
if (ctx->dli->DoesSupportVideoMode(ctx->video_input, ctx->bmd_mode, (BMDPixelFormat) cctx->raw_format,
|
||||
bmdNoVideoInputConversion, bmdSupportedVideoModeDefault,
|
||||
&actualMode, &support) != S_OK || !support || ctx->bmd_mode != actualMode)
|
||||
return -1;
|
||||
@@ -307,7 +286,7 @@ int ff_decklink_set_format(AVFormatContext *avctx,
|
||||
return 0;
|
||||
#elif BLACKMAGIC_DECKLINK_API_VERSION >= 0x0b000000
|
||||
if (direction == DIRECTION_IN) {
|
||||
if (ctx->dli->DoesSupportVideoMode(ctx->video_input, ctx->bmd_mode, ctx->raw_format,
|
||||
if (ctx->dli->DoesSupportVideoMode(ctx->video_input, ctx->bmd_mode, (BMDPixelFormat) cctx->raw_format,
|
||||
bmdSupportedVideoModeDefault,
|
||||
&support) != S_OK)
|
||||
return -1;
|
||||
@@ -324,7 +303,7 @@ int ff_decklink_set_format(AVFormatContext *avctx,
|
||||
return 0;
|
||||
#else
|
||||
if (direction == DIRECTION_IN) {
|
||||
if (ctx->dli->DoesSupportVideoMode(ctx->bmd_mode, ctx->raw_format,
|
||||
if (ctx->dli->DoesSupportVideoMode(ctx->bmd_mode, (BMDPixelFormat) cctx->raw_format,
|
||||
bmdVideoOutputFlagDefault,
|
||||
&support, NULL) != S_OK)
|
||||
return -1;
|
||||
|
@@ -75,7 +75,7 @@ class decklink_output_callback;
|
||||
class decklink_input_callback;
|
||||
|
||||
typedef struct AVPacketQueue {
|
||||
PacketList *first_pkt, *last_pkt;
|
||||
AVPacketList *first_pkt, *last_pkt;
|
||||
int nb_packets;
|
||||
unsigned long long size;
|
||||
int abort_request;
|
||||
@@ -120,14 +120,12 @@ struct decklink_ctx {
|
||||
unsigned int dropped;
|
||||
AVStream *audio_st;
|
||||
AVStream *video_st;
|
||||
AVStream *klv_st;
|
||||
AVStream *teletext_st;
|
||||
uint16_t cdp_sequence_num;
|
||||
|
||||
/* Options */
|
||||
int list_devices;
|
||||
int list_formats;
|
||||
int enable_klv;
|
||||
int64_t teletext_lines;
|
||||
double preroll;
|
||||
int duplex_mode;
|
||||
@@ -155,14 +153,16 @@ struct decklink_ctx {
|
||||
|
||||
typedef enum { DIRECTION_IN, DIRECTION_OUT} decklink_direction_t;
|
||||
|
||||
static const BMDPixelFormat decklink_raw_format_map[] = {
|
||||
(BMDPixelFormat)0,
|
||||
bmdFormat8BitYUV,
|
||||
bmdFormat10BitYUV,
|
||||
bmdFormat8BitARGB,
|
||||
bmdFormat8BitBGRA,
|
||||
bmdFormat10BitRGB,
|
||||
};
|
||||
#ifdef _WIN32
|
||||
#if BLACKMAGIC_DECKLINK_API_VERSION < 0x0a040000
|
||||
typedef unsigned long buffercount_type;
|
||||
#else
|
||||
typedef unsigned int buffercount_type;
|
||||
#endif
|
||||
IDeckLinkIterator *CreateDeckLinkIteratorInstance(void);
|
||||
#else
|
||||
typedef uint32_t buffercount_type;
|
||||
#endif
|
||||
|
||||
static const BMDAudioConnection decklink_audio_connection_map[] = {
|
||||
(BMDAudioConnection)0,
|
||||
@@ -193,11 +193,6 @@ static const BMDTimecodeFormat decklink_timecode_format_map[] = {
|
||||
bmdTimecodeVITC,
|
||||
bmdTimecodeVITCField2,
|
||||
bmdTimecodeSerial,
|
||||
#if BLACKMAGIC_DECKLINK_API_VERSION >= 0x0b000000
|
||||
bmdTimecodeRP188HighFrameRate,
|
||||
#else
|
||||
(BMDTimecodeFormat)0,
|
||||
#endif
|
||||
};
|
||||
|
||||
int ff_decklink_set_configs(AVFormatContext *avctx, decklink_direction_t direction);
|
||||
|
@@ -23,8 +23,6 @@
|
||||
#ifndef AVDEVICE_DECKLINK_COMMON_C_H
|
||||
#define AVDEVICE_DECKLINK_COMMON_C_H
|
||||
|
||||
#include <DeckLinkAPIVersion.h>
|
||||
|
||||
typedef enum DecklinkPtsSource {
|
||||
PTS_SRC_AUDIO = 1,
|
||||
PTS_SRC_VIDEO = 2,
|
||||
@@ -42,7 +40,6 @@ struct decklink_cctx {
|
||||
/* Options */
|
||||
int list_devices;
|
||||
int list_formats;
|
||||
int enable_klv;
|
||||
int64_t teletext_lines;
|
||||
double preroll;
|
||||
int audio_channels;
|
||||
|
248
externals/ffmpeg/ffmpeg/libavdevice/decklink_dec.cpp
vendored
248
externals/ffmpeg/ffmpeg/libavdevice/decklink_dec.cpp
vendored
@@ -22,7 +22,6 @@
|
||||
*/
|
||||
|
||||
#include <atomic>
|
||||
#include <vector>
|
||||
using std::atomic;
|
||||
|
||||
/* Include internal.h first to avoid conflict between winsock.h (used by
|
||||
@@ -35,16 +34,13 @@ extern "C" {
|
||||
|
||||
extern "C" {
|
||||
#include "config.h"
|
||||
#include "libavcodec/packet_internal.h"
|
||||
#include "libavformat/avformat.h"
|
||||
#include "libavutil/avassert.h"
|
||||
#include "libavutil/avutil.h"
|
||||
#include "libavutil/common.h"
|
||||
#include "libavutil/internal.h"
|
||||
#include "libavutil/imgutils.h"
|
||||
#include "libavutil/intreadwrite.h"
|
||||
#include "libavutil/time.h"
|
||||
#include "libavutil/timecode.h"
|
||||
#include "libavutil/mathematics.h"
|
||||
#include "libavutil/reverse.h"
|
||||
#include "avdevice.h"
|
||||
@@ -483,7 +479,7 @@ static void avpacket_queue_init(AVFormatContext *avctx, AVPacketQueue *q)
|
||||
|
||||
static void avpacket_queue_flush(AVPacketQueue *q)
|
||||
{
|
||||
PacketList *pkt, *pkt1;
|
||||
AVPacketList *pkt, *pkt1;
|
||||
|
||||
pthread_mutex_lock(&q->mutex);
|
||||
for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
|
||||
@@ -516,7 +512,7 @@ static unsigned long long avpacket_queue_size(AVPacketQueue *q)
|
||||
|
||||
static int avpacket_queue_put(AVPacketQueue *q, AVPacket *pkt)
|
||||
{
|
||||
PacketList *pkt1;
|
||||
AVPacketList *pkt1;
|
||||
|
||||
// Drop Packet if queue size is > maximum queue size
|
||||
if (avpacket_queue_size(q) > (uint64_t)q->max_q_size) {
|
||||
@@ -530,7 +526,7 @@ static int avpacket_queue_put(AVPacketQueue *q, AVPacket *pkt)
|
||||
return -1;
|
||||
}
|
||||
|
||||
pkt1 = (PacketList *)av_malloc(sizeof(PacketList));
|
||||
pkt1 = (AVPacketList *)av_malloc(sizeof(AVPacketList));
|
||||
if (!pkt1) {
|
||||
av_packet_unref(pkt);
|
||||
return -1;
|
||||
@@ -558,7 +554,7 @@ static int avpacket_queue_put(AVPacketQueue *q, AVPacket *pkt)
|
||||
|
||||
static int avpacket_queue_get(AVPacketQueue *q, AVPacket *pkt, int block)
|
||||
{
|
||||
PacketList *pkt1;
|
||||
AVPacketList *pkt1;
|
||||
int ret;
|
||||
|
||||
pthread_mutex_lock(&q->mutex);
|
||||
@@ -587,108 +583,6 @@ static int avpacket_queue_get(AVPacketQueue *q, AVPacket *pkt, int block)
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void handle_klv(AVFormatContext *avctx, decklink_ctx *ctx, IDeckLinkVideoInputFrame *videoFrame, int64_t pts)
|
||||
{
|
||||
const uint8_t KLV_DID = 0x44;
|
||||
const uint8_t KLV_IN_VANC_SDID = 0x04;
|
||||
|
||||
struct KLVPacket
|
||||
{
|
||||
uint16_t sequence_counter;
|
||||
std::vector<uint8_t> data;
|
||||
};
|
||||
|
||||
size_t total_size = 0;
|
||||
std::vector<std::vector<KLVPacket>> klv_packets(256);
|
||||
|
||||
IDeckLinkVideoFrameAncillaryPackets *packets = nullptr;
|
||||
if (videoFrame->QueryInterface(IID_IDeckLinkVideoFrameAncillaryPackets, (void**)&packets) != S_OK)
|
||||
return;
|
||||
|
||||
IDeckLinkAncillaryPacketIterator *it = nullptr;
|
||||
if (packets->GetPacketIterator(&it) != S_OK) {
|
||||
packets->Release();
|
||||
return;
|
||||
}
|
||||
|
||||
IDeckLinkAncillaryPacket *packet = nullptr;
|
||||
while (it->Next(&packet) == S_OK) {
|
||||
uint8_t *data = nullptr;
|
||||
uint32_t size = 0;
|
||||
|
||||
if (packet->GetDID() == KLV_DID && packet->GetSDID() == KLV_IN_VANC_SDID) {
|
||||
av_log(avctx, AV_LOG_DEBUG, "Found KLV VANC packet on line: %d\n", packet->GetLineNumber());
|
||||
|
||||
if (packet->GetBytes(bmdAncillaryPacketFormatUInt8, (const void**) &data, &size) == S_OK) {
|
||||
// MID and PSC
|
||||
if (size > 3) {
|
||||
uint8_t mid = data[0];
|
||||
uint16_t psc = data[1] << 8 | data[2];
|
||||
|
||||
av_log(avctx, AV_LOG_DEBUG, "KLV with MID: %d and PSC: %d\n", mid, psc);
|
||||
|
||||
auto& list = klv_packets[mid];
|
||||
uint16_t expected_psc = list.size() + 1;
|
||||
|
||||
if (psc == expected_psc) {
|
||||
uint32_t data_len = size - 3;
|
||||
total_size += data_len;
|
||||
|
||||
KLVPacket packet{ psc };
|
||||
packet.data.resize(data_len);
|
||||
memcpy(packet.data.data(), data + 3, data_len);
|
||||
|
||||
list.push_back(std::move(packet));
|
||||
} else {
|
||||
av_log(avctx, AV_LOG_WARNING, "Out of order PSC: %d for MID: %d\n", psc, mid);
|
||||
|
||||
if (!list.empty()) {
|
||||
for (auto& klv : list)
|
||||
total_size -= klv.data.size();
|
||||
|
||||
list.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
packet->Release();
|
||||
}
|
||||
|
||||
it->Release();
|
||||
packets->Release();
|
||||
|
||||
if (total_size > 0) {
|
||||
std::vector<uint8_t> klv;
|
||||
klv.reserve(total_size);
|
||||
|
||||
for (size_t i = 0; i < klv_packets.size(); ++i) {
|
||||
auto& list = klv_packets[i];
|
||||
|
||||
if (list.empty())
|
||||
continue;
|
||||
|
||||
av_log(avctx, AV_LOG_DEBUG, "Joining MID: %d\n", (int)i);
|
||||
|
||||
for (auto& packet : list)
|
||||
klv.insert(klv.end(), packet.data.begin(), packet.data.end());
|
||||
}
|
||||
|
||||
AVPacket klv_packet = { 0 };
|
||||
klv_packet.pts = pts;
|
||||
klv_packet.dts = pts;
|
||||
klv_packet.flags |= AV_PKT_FLAG_KEY;
|
||||
klv_packet.stream_index = ctx->klv_st->index;
|
||||
klv_packet.data = klv.data();
|
||||
klv_packet.size = klv.size();
|
||||
|
||||
if (avpacket_queue_put(&ctx->queue, &klv_packet) < 0) {
|
||||
++ctx->dropped;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class decklink_input_callback : public IDeckLinkInputCallback
|
||||
{
|
||||
public:
|
||||
@@ -790,52 +684,6 @@ static int64_t get_pkt_pts(IDeckLinkVideoInputFrame *videoFrame,
|
||||
return pts;
|
||||
}
|
||||
|
||||
static int get_bmd_timecode(AVFormatContext *avctx, AVTimecode *tc, AVRational frame_rate, BMDTimecodeFormat tc_format, IDeckLinkVideoInputFrame *videoFrame)
|
||||
{
|
||||
IDeckLinkTimecode *timecode;
|
||||
int ret = AVERROR(ENOENT);
|
||||
#if BLACKMAGIC_DECKLINK_API_VERSION >= 0x0b000000
|
||||
int hfr = (tc_format == bmdTimecodeRP188HighFrameRate);
|
||||
#else
|
||||
int hfr = 0;
|
||||
#endif
|
||||
if (videoFrame->GetTimecode(tc_format, &timecode) == S_OK) {
|
||||
uint8_t hh, mm, ss, ff;
|
||||
if (timecode->GetComponents(&hh, &mm, &ss, &ff) == S_OK) {
|
||||
int flags = (timecode->GetFlags() & bmdTimecodeIsDropFrame) ? AV_TIMECODE_FLAG_DROPFRAME : 0;
|
||||
if (!hfr && av_cmp_q(frame_rate, av_make_q(30, 1)) == 1)
|
||||
ff = ff << 1 | !!(timecode->GetFlags() & bmdTimecodeFieldMark);
|
||||
ret = av_timecode_init_from_components(tc, frame_rate, flags, hh, mm, ss, ff, avctx);
|
||||
}
|
||||
timecode->Release();
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int get_frame_timecode(AVFormatContext *avctx, decklink_ctx *ctx, AVTimecode *tc, IDeckLinkVideoInputFrame *videoFrame)
|
||||
{
|
||||
AVRational frame_rate = ctx->video_st->r_frame_rate;
|
||||
int ret;
|
||||
/* 50/60 fps content has alternating VITC1 and VITC2 timecode (see SMPTE ST
|
||||
* 12-2, section 7), so the native ordering of RP188Any (HFR, VITC1, LTC,
|
||||
* VITC2) would not work because LTC might not contain the field flag.
|
||||
* Therefore we query the types manually. */
|
||||
if (ctx->tc_format == bmdTimecodeRP188Any && av_cmp_q(frame_rate, av_make_q(30, 1)) == 1) {
|
||||
#if BLACKMAGIC_DECKLINK_API_VERSION >= 0x0b000000
|
||||
ret = get_bmd_timecode(avctx, tc, frame_rate, bmdTimecodeRP188HighFrameRate, videoFrame);
|
||||
if (ret == AVERROR(ENOENT))
|
||||
#endif
|
||||
ret = get_bmd_timecode(avctx, tc, frame_rate, bmdTimecodeRP188VITC1, videoFrame);
|
||||
if (ret == AVERROR(ENOENT))
|
||||
ret = get_bmd_timecode(avctx, tc, frame_rate, bmdTimecodeRP188VITC2, videoFrame);
|
||||
if (ret == AVERROR(ENOENT))
|
||||
ret = get_bmd_timecode(avctx, tc, frame_rate, bmdTimecodeRP188LTC, videoFrame);
|
||||
} else {
|
||||
ret = get_bmd_timecode(avctx, tc, frame_rate, ctx->tc_format, videoFrame);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
HRESULT decklink_input_callback::VideoInputFrameArrived(
|
||||
IDeckLinkVideoInputFrame *videoFrame, IDeckLinkAudioInputPacket *audioFrame)
|
||||
{
|
||||
@@ -873,7 +721,8 @@ HRESULT decklink_input_callback::VideoInputFrameArrived(
|
||||
|
||||
// Handle Video Frame
|
||||
if (videoFrame) {
|
||||
AVPacket pkt = { 0 };
|
||||
AVPacket pkt;
|
||||
av_init_packet(&pkt);
|
||||
if (ctx->frameCount % 25 == 0) {
|
||||
unsigned long long qsize = avpacket_queue_size(&ctx->queue);
|
||||
av_log(avctx, AV_LOG_DEBUG,
|
||||
@@ -916,27 +765,20 @@ HRESULT decklink_input_callback::VideoInputFrameArrived(
|
||||
|
||||
// Handle Timecode (if requested)
|
||||
if (ctx->tc_format) {
|
||||
AVTimecode tcr;
|
||||
if (get_frame_timecode(avctx, ctx, &tcr, videoFrame) >= 0) {
|
||||
char tcstr[AV_TIMECODE_STR_SIZE];
|
||||
const char *tc = av_timecode_make_string(&tcr, tcstr, 0);
|
||||
IDeckLinkTimecode *timecode;
|
||||
if (videoFrame->GetTimecode(ctx->tc_format, &timecode) == S_OK) {
|
||||
const char *tc = NULL;
|
||||
DECKLINK_STR decklink_tc;
|
||||
if (timecode->GetString(&decklink_tc) == S_OK) {
|
||||
tc = DECKLINK_STRDUP(decklink_tc);
|
||||
DECKLINK_FREE(decklink_tc);
|
||||
}
|
||||
timecode->Release();
|
||||
if (tc) {
|
||||
AVDictionary* metadata_dict = NULL;
|
||||
int metadata_len;
|
||||
uint8_t* packed_metadata;
|
||||
|
||||
if (av_cmp_q(ctx->video_st->r_frame_rate, av_make_q(60, 1)) < 1) {
|
||||
uint32_t tc_data = av_timecode_get_smpte_from_framenum(&tcr, 0);
|
||||
int size = sizeof(uint32_t) * 4;
|
||||
uint32_t *sd = (uint32_t *)av_packet_new_side_data(&pkt, AV_PKT_DATA_S12M_TIMECODE, size);
|
||||
|
||||
if (sd) {
|
||||
*sd = 1; // one TC
|
||||
*(sd + 1) = tc_data; // TC
|
||||
}
|
||||
}
|
||||
|
||||
if (av_dict_set(&metadata_dict, "timecode", tc, 0) >= 0) {
|
||||
buffer_size_t metadata_len;
|
||||
if (av_dict_set(&metadata_dict, "timecode", tc, AV_DICT_DONT_STRDUP_VAL) >= 0) {
|
||||
packed_metadata = av_packet_pack_dictionary(metadata_dict, &metadata_len);
|
||||
av_dict_free(&metadata_dict);
|
||||
if (packed_metadata) {
|
||||
@@ -975,23 +817,19 @@ HRESULT decklink_input_callback::VideoInputFrameArrived(
|
||||
|
||||
if (!no_video) {
|
||||
IDeckLinkVideoFrameAncillary *vanc;
|
||||
AVPacket txt_pkt = { 0 };
|
||||
AVPacket txt_pkt;
|
||||
uint8_t txt_buf0[3531]; // 35 * 46 bytes decoded teletext lines + 1 byte data_identifier + 1920 bytes OP47 decode buffer
|
||||
uint8_t *txt_buf = txt_buf0;
|
||||
|
||||
if (ctx->enable_klv) {
|
||||
handle_klv(avctx, ctx, videoFrame, pkt.pts);
|
||||
}
|
||||
|
||||
if (videoFrame->GetAncillaryData(&vanc) == S_OK) {
|
||||
int i;
|
||||
int64_t line_mask = 1;
|
||||
BMDPixelFormat vanc_format = vanc->GetPixelFormat();
|
||||
txt_buf[0] = 0x10; // data_identifier - EBU_data
|
||||
txt_buf++;
|
||||
#if CONFIG_LIBZVBI
|
||||
if (ctx->bmd_mode == bmdModePAL && ctx->teletext_lines &&
|
||||
(vanc_format == bmdFormat8BitYUV || vanc_format == bmdFormat10BitYUV)) {
|
||||
int64_t line_mask = 1;
|
||||
av_assert0(videoFrame->GetWidth() == 720);
|
||||
for (i = 6; i < 336; i++, line_mask <<= 1) {
|
||||
uint8_t *buf;
|
||||
@@ -1034,6 +872,7 @@ HRESULT decklink_input_callback::VideoInputFrameArrived(
|
||||
txt_buf[1] = 0x2c; // data_unit_length
|
||||
txt_buf += 46;
|
||||
}
|
||||
av_init_packet(&txt_pkt);
|
||||
txt_pkt.pts = pkt.pts;
|
||||
txt_pkt.dts = pkt.dts;
|
||||
txt_pkt.stream_index = ctx->teletext_st->index;
|
||||
@@ -1057,8 +896,9 @@ HRESULT decklink_input_callback::VideoInputFrameArrived(
|
||||
|
||||
// Handle Audio Frame
|
||||
if (audioFrame) {
|
||||
AVPacket pkt = { 0 };
|
||||
AVPacket pkt;
|
||||
BMDTimeValue audio_pts;
|
||||
av_init_packet(&pkt);
|
||||
|
||||
//hack among hacks
|
||||
pkt.size = audioFrame->GetSampleFrameCount() * ctx->audio_st->codecpar->channels * (ctx->audio_depth / 8);
|
||||
@@ -1082,13 +922,9 @@ HRESULT decklink_input_callback::VideoInputFrameArrived(
|
||||
|
||||
HRESULT decklink_input_callback::VideoInputFormatChanged(
|
||||
BMDVideoInputFormatChangedEvents events, IDeckLinkDisplayMode *mode,
|
||||
BMDDetectedVideoInputFormatFlags formatFlags)
|
||||
BMDDetectedVideoInputFormatFlags)
|
||||
{
|
||||
struct decklink_cctx *cctx = (struct decklink_cctx *) avctx->priv_data;
|
||||
ctx->bmd_mode = mode->GetDisplayMode();
|
||||
// check the C context member to make sure we set both raw_format and bmd_mode with data from the same format change callback
|
||||
if (!cctx->raw_format)
|
||||
ctx->raw_format = (formatFlags & bmdDetectedVideoInputRGB444) ? bmdFormat8BitARGB : bmdFormat8BitYUV;
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
@@ -1176,7 +1012,6 @@ av_cold int ff_decklink_read_header(AVFormatContext *avctx)
|
||||
return AVERROR(ENOMEM);
|
||||
ctx->list_devices = cctx->list_devices;
|
||||
ctx->list_formats = cctx->list_formats;
|
||||
ctx->enable_klv = cctx->enable_klv;
|
||||
ctx->teletext_lines = cctx->teletext_lines;
|
||||
ctx->preroll = cctx->preroll;
|
||||
ctx->duplex_mode = cctx->duplex_mode;
|
||||
@@ -1190,8 +1025,6 @@ av_cold int ff_decklink_read_header(AVFormatContext *avctx)
|
||||
ctx->video_pts_source = cctx->video_pts_source;
|
||||
ctx->draw_bars = cctx->draw_bars;
|
||||
ctx->audio_depth = cctx->audio_depth;
|
||||
if (cctx->raw_format > 0 && (unsigned int)cctx->raw_format < FF_ARRAY_ELEMS(decklink_raw_format_map))
|
||||
ctx->raw_format = decklink_raw_format_map[cctx->raw_format];
|
||||
cctx->ctx = ctx;
|
||||
|
||||
/* Check audio channel option for valid values: 2, 8 or 16 */
|
||||
@@ -1217,6 +1050,7 @@ av_cold int ff_decklink_read_header(AVFormatContext *avctx)
|
||||
|
||||
/* List available devices. */
|
||||
if (ctx->list_devices) {
|
||||
av_log(avctx, AV_LOG_WARNING, "The -list_devices option is deprecated and will be removed. Please use ffmpeg -sources decklink instead.\n");
|
||||
ff_decklink_list_devices_legacy(avctx, 1, 0);
|
||||
return AVERROR_EXIT;
|
||||
}
|
||||
@@ -1270,8 +1104,6 @@ av_cold int ff_decklink_read_header(AVFormatContext *avctx)
|
||||
}
|
||||
av_log(avctx, AV_LOG_INFO, "Autodetected the input mode\n");
|
||||
}
|
||||
if (ctx->raw_format == (BMDPixelFormat)0)
|
||||
ctx->raw_format = bmdFormat8BitYUV;
|
||||
if (ff_decklink_set_format(avctx, DIRECTION_IN) < 0) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Could not set format code %s for %s\n",
|
||||
cctx->format_code ? cctx->format_code : "(unset)", avctx->url);
|
||||
@@ -1315,34 +1147,40 @@ av_cold int ff_decklink_read_header(AVFormatContext *avctx)
|
||||
st->time_base.num = ctx->bmd_tb_num;
|
||||
st->r_frame_rate = av_make_q(st->time_base.den, st->time_base.num);
|
||||
|
||||
switch(ctx->raw_format) {
|
||||
switch((BMDPixelFormat)cctx->raw_format) {
|
||||
case bmdFormat8BitYUV:
|
||||
st->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
|
||||
st->codecpar->codec_tag = MKTAG('U', 'Y', 'V', 'Y');
|
||||
st->codecpar->format = AV_PIX_FMT_UYVY422;
|
||||
st->codecpar->bit_rate = av_rescale(ctx->bmd_width * ctx->bmd_height * 16, st->time_base.den, st->time_base.num);
|
||||
break;
|
||||
case bmdFormat10BitYUV:
|
||||
st->codecpar->codec_id = AV_CODEC_ID_V210;
|
||||
st->codecpar->codec_tag = MKTAG('V','2','1','0');
|
||||
st->codecpar->bit_rate = av_rescale(ctx->bmd_width * ctx->bmd_height * 64, st->time_base.den, st->time_base.num * 3);
|
||||
st->codecpar->bits_per_coded_sample = 10;
|
||||
break;
|
||||
case bmdFormat8BitARGB:
|
||||
st->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
|
||||
st->codecpar->format = AV_PIX_FMT_0RGB;
|
||||
st->codecpar->codec_tag = avcodec_pix_fmt_to_codec_tag((enum AVPixelFormat)st->codecpar->format);
|
||||
st->codecpar->bit_rate = av_rescale(ctx->bmd_width * ctx->bmd_height * 32, st->time_base.den, st->time_base.num);
|
||||
break;
|
||||
case bmdFormat8BitBGRA:
|
||||
st->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
|
||||
st->codecpar->format = AV_PIX_FMT_BGR0;
|
||||
st->codecpar->codec_tag = avcodec_pix_fmt_to_codec_tag((enum AVPixelFormat)st->codecpar->format);
|
||||
st->codecpar->bit_rate = av_rescale(ctx->bmd_width * ctx->bmd_height * 32, st->time_base.den, st->time_base.num);
|
||||
break;
|
||||
case bmdFormat10BitRGB:
|
||||
st->codecpar->codec_id = AV_CODEC_ID_R210;
|
||||
st->codecpar->codec_tag = MKTAG('R','2','1','0');
|
||||
st->codecpar->format = AV_PIX_FMT_RGB48LE;
|
||||
st->codecpar->bit_rate = av_rescale(ctx->bmd_width * ctx->bmd_height * 30, st->time_base.den, st->time_base.num);
|
||||
st->codecpar->bits_per_coded_sample = 10;
|
||||
break;
|
||||
default:
|
||||
char fourcc_str[AV_FOURCC_MAX_STRING_SIZE] = {0};
|
||||
av_fourcc_make_string(fourcc_str, ctx->raw_format);
|
||||
av_log(avctx, AV_LOG_ERROR, "Raw Format %s not supported\n", fourcc_str);
|
||||
av_log(avctx, AV_LOG_ERROR, "Raw Format %.4s not supported\n", (char*) &cctx->raw_format);
|
||||
ret = AVERROR(EINVAL);
|
||||
goto error;
|
||||
}
|
||||
@@ -1364,20 +1202,6 @@ av_cold int ff_decklink_read_header(AVFormatContext *avctx)
|
||||
|
||||
ctx->video_st=st;
|
||||
|
||||
if (ctx->enable_klv) {
|
||||
st = avformat_new_stream(avctx, NULL);
|
||||
if (!st) {
|
||||
ret = AVERROR(ENOMEM);
|
||||
goto error;
|
||||
}
|
||||
st->codecpar->codec_type = AVMEDIA_TYPE_DATA;
|
||||
st->time_base.den = ctx->bmd_tb_den;
|
||||
st->time_base.num = ctx->bmd_tb_num;
|
||||
st->codecpar->codec_id = AV_CODEC_ID_SMPTE_KLV;
|
||||
avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
|
||||
ctx->klv_st = st;
|
||||
}
|
||||
|
||||
if (ctx->teletext_lines) {
|
||||
st = avformat_new_stream(avctx, NULL);
|
||||
if (!st) {
|
||||
@@ -1403,7 +1227,7 @@ av_cold int ff_decklink_read_header(AVFormatContext *avctx)
|
||||
}
|
||||
|
||||
result = ctx->dli->EnableVideoInput(ctx->bmd_mode,
|
||||
ctx->raw_format,
|
||||
(BMDPixelFormat) cctx->raw_format,
|
||||
bmdVideoInputFlagDefault);
|
||||
|
||||
if (result != S_OK) {
|
||||
@@ -1435,7 +1259,7 @@ int ff_decklink_read_packet(AVFormatContext *avctx, AVPacket *pkt)
|
||||
avpacket_queue_get(&ctx->queue, pkt, 1);
|
||||
|
||||
if (ctx->tc_format && !(av_dict_get(ctx->video_st->metadata, "timecode", NULL, 0))) {
|
||||
buffer_size_t size;
|
||||
int size;
|
||||
const uint8_t *side_metadata = av_packet_get_side_data(pkt, AV_PKT_DATA_STRINGS_METADATA, &size);
|
||||
if (side_metadata) {
|
||||
if (av_packet_unpack_dictionary(side_metadata, size, &ctx->video_st->metadata) < 0)
|
||||
|
@@ -30,17 +30,15 @@
|
||||
#define DEC AV_OPT_FLAG_DECODING_PARAM
|
||||
|
||||
static const AVOption options[] = {
|
||||
{ "list_devices", "use ffmpeg -sources decklink instead", OFFSET(list_devices), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, DEC | AV_OPT_FLAG_DEPRECATED},
|
||||
{ "list_devices", "list available devices" , OFFSET(list_devices), AV_OPT_TYPE_INT , { .i64 = 0 }, 0, 1, DEC },
|
||||
{ "list_formats", "list supported formats" , OFFSET(list_formats), AV_OPT_TYPE_INT , { .i64 = 0 }, 0, 1, DEC },
|
||||
{ "format_code", "set format by fourcc" , OFFSET(format_code), AV_OPT_TYPE_STRING, { .str = NULL}, 0, 0, DEC },
|
||||
{ "raw_format", "pixel format to be returned by the card when capturing" , OFFSET(raw_format), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 5, DEC, "raw_format" },
|
||||
{ "auto", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 0 }, 0, 0, DEC, "raw_format"},
|
||||
{ "uyvy422", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 1 }, 0, 0, DEC, "raw_format"},
|
||||
{ "yuv422p10", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 2 }, 0, 0, DEC, "raw_format"},
|
||||
{ "argb", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 3 }, 0, 0, DEC, "raw_format"},
|
||||
{ "bgra", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 4 }, 0, 0, DEC, "raw_format"},
|
||||
{ "rgb10", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 5 }, 0, 0, DEC, "raw_format"},
|
||||
{ "enable_klv", "output klv if present in vanc", OFFSET(enable_klv), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, DEC },
|
||||
{ "raw_format", "pixel format to be returned by the card when capturing" , OFFSET(raw_format), AV_OPT_TYPE_INT, { .i64 = MKBETAG('2','v','u','y')}, 0, UINT_MAX, DEC, "raw_format" },
|
||||
{ "uyvy422", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MKBETAG('2','v','u','y') }, 0, 0, DEC, "raw_format"},
|
||||
{ "yuv422p10", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MKBETAG('v','2','1','0') }, 0, 0, DEC, "raw_format"},
|
||||
{ "argb", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 32 }, 0, 0, DEC, "raw_format"},
|
||||
{ "bgra", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MKBETAG('B','G','R','A') }, 0, 0, DEC, "raw_format"},
|
||||
{ "rgb10", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MKBETAG('r','2','1','0') }, 0, 0, DEC, "raw_format"},
|
||||
{ "teletext_lines", "teletext lines bitmask", OFFSET(teletext_lines), AV_OPT_TYPE_INT64, { .i64 = 0 }, 0, 0x7ffffffffLL, DEC, "teletext_lines"},
|
||||
{ "standard", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 0x7fff9fffeLL}, 0, 0, DEC, "teletext_lines"},
|
||||
{ "all", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 0x7ffffffffLL}, 0, 0, DEC, "teletext_lines"},
|
||||
@@ -49,7 +47,7 @@ static const AVOption options[] = {
|
||||
{ "unset", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 0}, 0, 0, DEC, "duplex_mode"},
|
||||
{ "half", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 1}, 0, 0, DEC, "duplex_mode"},
|
||||
{ "full", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 2}, 0, 0, DEC, "duplex_mode"},
|
||||
{ "timecode_format", "timecode format", OFFSET(tc_format), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 8, DEC, "tc_format"},
|
||||
{ "timecode_format", "timecode format", OFFSET(tc_format), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 7, DEC, "tc_format"},
|
||||
{ "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 0}, 0, 0, DEC, "tc_format"},
|
||||
{ "rp188vitc", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 1}, 0, 0, DEC, "tc_format"},
|
||||
{ "rp188vitc2", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 2}, 0, 0, DEC, "tc_format"},
|
||||
@@ -58,9 +56,6 @@ static const AVOption options[] = {
|
||||
{ "vitc", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 5}, 0, 0, DEC, "tc_format"},
|
||||
{ "vitc2", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 6}, 0, 0, DEC, "tc_format"},
|
||||
{ "serial", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 7}, 0, 0, DEC, "tc_format"},
|
||||
#if BLACKMAGIC_DECKLINK_API_VERSION >= 0x0b000000
|
||||
{ "rp188hfr", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 8}, 0, 0, DEC, "tc_format"},
|
||||
#endif
|
||||
{ "video_input", "video input", OFFSET(video_input), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 6, DEC, "video_input"},
|
||||
{ "unset", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 0}, 0, 0, DEC, "video_input"},
|
||||
{ "sdi", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 1}, 0, 0, DEC, "video_input"},
|
||||
|
@@ -32,7 +32,6 @@ extern "C" {
|
||||
|
||||
extern "C" {
|
||||
#include "libavformat/avformat.h"
|
||||
#include "libavutil/internal.h"
|
||||
#include "libavutil/imgutils.h"
|
||||
#include "avdevice.h"
|
||||
}
|
||||
@@ -313,8 +312,7 @@ static void construct_cc(AVFormatContext *avctx, struct decklink_ctx *ctx,
|
||||
uint16_t *cdp_words;
|
||||
uint16_t len;
|
||||
uint8_t cc_count;
|
||||
buffer_size_t size;
|
||||
int ret, i;
|
||||
int size, ret, i;
|
||||
|
||||
const uint8_t *data = av_packet_get_side_data(pkt, AV_PKT_DATA_A53_CC, &size);
|
||||
if (!data)
|
||||
@@ -438,7 +436,7 @@ static int decklink_write_video_packet(AVFormatContext *avctx, AVPacket *pkt)
|
||||
AVFrame *avframe = NULL, *tmp = (AVFrame *)pkt->data;
|
||||
AVPacket *avpacket = NULL;
|
||||
decklink_frame *frame;
|
||||
uint32_t buffered;
|
||||
buffercount_type buffered;
|
||||
HRESULT hr;
|
||||
|
||||
if (st->codecpar->codec_id == AV_CODEC_ID_WRAPPED_AVFRAME) {
|
||||
@@ -527,7 +525,7 @@ static int decklink_write_audio_packet(AVFormatContext *avctx, AVPacket *pkt)
|
||||
struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
|
||||
struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
|
||||
int sample_count = pkt->size / (ctx->channels << 1);
|
||||
uint32_t buffered;
|
||||
buffercount_type buffered;
|
||||
|
||||
ctx->dlo->GetBufferedAudioSampleFrameCount(&buffered);
|
||||
if (pkt->pts > 1 && !buffered)
|
||||
@@ -570,6 +568,7 @@ av_cold int ff_decklink_write_header(AVFormatContext *avctx)
|
||||
|
||||
/* List available devices and exit. */
|
||||
if (ctx->list_devices) {
|
||||
av_log(avctx, AV_LOG_WARNING, "The -list_devices option is deprecated and will be removed. Please use ffmpeg -sinks decklink instead.\n");
|
||||
ff_decklink_list_devices_legacy(avctx, 0, 1);
|
||||
return AVERROR_EXIT;
|
||||
}
|
||||
|
@@ -28,7 +28,7 @@
|
||||
#define OFFSET(x) offsetof(struct decklink_cctx, x)
|
||||
#define ENC AV_OPT_FLAG_ENCODING_PARAM
|
||||
static const AVOption options[] = {
|
||||
{ "list_devices", "use ffmpeg -sinks decklink instead", OFFSET(list_devices), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, ENC | AV_OPT_FLAG_DEPRECATED},
|
||||
{ "list_devices", "list available devices" , OFFSET(list_devices), AV_OPT_TYPE_INT , { .i64 = 0 }, 0, 1, ENC },
|
||||
{ "list_formats", "list supported formats" , OFFSET(list_formats), AV_OPT_TYPE_INT , { .i64 = 0 }, 0, 1, ENC },
|
||||
{ "preroll" , "video preroll in seconds", OFFSET(preroll ), AV_OPT_TYPE_DOUBLE, { .dbl = 0.5 }, 0, 5, ENC },
|
||||
{ "duplex_mode" , "duplex mode" , OFFSET(duplex_mode ), AV_OPT_TYPE_INT , { .i64 = 0 }, 0, 2, ENC, "duplex_mode"},
|
||||
|
34
externals/ffmpeg/ffmpeg/libavdevice/dshow.c
vendored
34
externals/ffmpeg/ffmpeg/libavdevice/dshow.c
vendored
@@ -58,7 +58,7 @@ static int
|
||||
dshow_read_close(AVFormatContext *s)
|
||||
{
|
||||
struct dshow_ctx *ctx = s->priv_data;
|
||||
PacketList *pktl;
|
||||
AVPacketList *pktl;
|
||||
|
||||
if (ctx->control) {
|
||||
IMediaControl_Stop(ctx->control);
|
||||
@@ -87,13 +87,13 @@ dshow_read_close(AVFormatContext *s)
|
||||
}
|
||||
|
||||
if (ctx->capture_pin[VideoDevice])
|
||||
ff_dshow_pin_Release(ctx->capture_pin[VideoDevice]);
|
||||
libAVPin_Release(ctx->capture_pin[VideoDevice]);
|
||||
if (ctx->capture_pin[AudioDevice])
|
||||
ff_dshow_pin_Release(ctx->capture_pin[AudioDevice]);
|
||||
libAVPin_Release(ctx->capture_pin[AudioDevice]);
|
||||
if (ctx->capture_filter[VideoDevice])
|
||||
ff_dshow_filter_Release(ctx->capture_filter[VideoDevice]);
|
||||
libAVFilter_Release(ctx->capture_filter[VideoDevice]);
|
||||
if (ctx->capture_filter[AudioDevice])
|
||||
ff_dshow_filter_Release(ctx->capture_filter[AudioDevice]);
|
||||
libAVFilter_Release(ctx->capture_filter[AudioDevice]);
|
||||
|
||||
if (ctx->device_pin[VideoDevice])
|
||||
IPin_Release(ctx->device_pin[VideoDevice]);
|
||||
@@ -118,7 +118,7 @@ dshow_read_close(AVFormatContext *s)
|
||||
|
||||
pktl = ctx->pktl;
|
||||
while (pktl) {
|
||||
PacketList *next = pktl->next;
|
||||
AVPacketList *next = pktl->next;
|
||||
av_packet_unref(&pktl->pkt);
|
||||
av_free(pktl);
|
||||
pktl = next;
|
||||
@@ -162,7 +162,7 @@ callback(void *priv_data, int index, uint8_t *buf, int buf_size, int64_t time, e
|
||||
{
|
||||
AVFormatContext *s = priv_data;
|
||||
struct dshow_ctx *ctx = s->priv_data;
|
||||
PacketList **ppktl, *pktl_next;
|
||||
AVPacketList **ppktl, *pktl_next;
|
||||
|
||||
// dump_videohdr(s, vdhdr);
|
||||
|
||||
@@ -171,7 +171,7 @@ callback(void *priv_data, int index, uint8_t *buf, int buf_size, int64_t time, e
|
||||
if(shall_we_drop(s, index, devtype))
|
||||
goto fail;
|
||||
|
||||
pktl_next = av_mallocz(sizeof(PacketList));
|
||||
pktl_next = av_mallocz(sizeof(AVPacketList));
|
||||
if(!pktl_next)
|
||||
goto fail;
|
||||
|
||||
@@ -510,7 +510,7 @@ end:
|
||||
* Pops up a user dialog allowing them to adjust properties for the given filter, if possible.
|
||||
*/
|
||||
void
|
||||
ff_dshow_show_filter_properties(IBaseFilter *device_filter, AVFormatContext *avctx) {
|
||||
dshow_show_filter_properties(IBaseFilter *device_filter, AVFormatContext *avctx) {
|
||||
ISpecifyPropertyPages *property_pages = NULL;
|
||||
IUnknown *device_filter_iunknown = NULL;
|
||||
HRESULT hr;
|
||||
@@ -582,7 +582,7 @@ dshow_cycle_pins(AVFormatContext *avctx, enum dshowDeviceType devtype,
|
||||
int should_show_properties = (devtype == VideoDevice) ? ctx->show_video_device_dialog : ctx->show_audio_device_dialog;
|
||||
|
||||
if (should_show_properties)
|
||||
ff_dshow_show_filter_properties(device_filter, avctx);
|
||||
dshow_show_filter_properties(device_filter, avctx);
|
||||
|
||||
r = IBaseFilter_EnumPins(device_filter, &pins);
|
||||
if (r != S_OK) {
|
||||
@@ -731,8 +731,8 @@ dshow_open_device(AVFormatContext *avctx, ICreateDevEnum *devenum,
|
||||
char *device_filter_unique_name = NULL;
|
||||
IGraphBuilder *graph = ctx->graph;
|
||||
IPin *device_pin = NULL;
|
||||
DShowPin *capture_pin = NULL;
|
||||
DShowFilter *capture_filter = NULL;
|
||||
libAVPin *capture_pin = NULL;
|
||||
libAVFilter *capture_filter = NULL;
|
||||
ICaptureGraphBuilder2 *graph_builder2 = NULL;
|
||||
int ret = AVERROR(EIO);
|
||||
int r;
|
||||
@@ -807,7 +807,7 @@ dshow_open_device(AVFormatContext *avctx, ICreateDevEnum *devenum,
|
||||
|
||||
ctx->device_pin[devtype] = device_pin;
|
||||
|
||||
capture_filter = ff_dshow_filter_Create(avctx, callback, devtype);
|
||||
capture_filter = libAVFilter_Create(avctx, callback, devtype);
|
||||
if (!capture_filter) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Could not create grabber filter.\n");
|
||||
goto error;
|
||||
@@ -863,7 +863,7 @@ dshow_open_device(AVFormatContext *avctx, ICreateDevEnum *devenum,
|
||||
goto error;
|
||||
}
|
||||
|
||||
ff_dshow_pin_AddRef(capture_filter->pin);
|
||||
libAVPin_AddRef(capture_filter->pin);
|
||||
capture_pin = capture_filter->pin;
|
||||
ctx->capture_pin[devtype] = capture_pin;
|
||||
|
||||
@@ -887,7 +887,7 @@ dshow_open_device(AVFormatContext *avctx, ICreateDevEnum *devenum,
|
||||
goto error;
|
||||
}
|
||||
|
||||
r = ff_dshow_try_setup_crossbar_options(graph_builder2, device_filter, devtype, avctx);
|
||||
r = dshow_try_setup_crossbar_options(graph_builder2, device_filter, devtype, avctx);
|
||||
|
||||
if (r != S_OK) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Could not setup CrossBar\n");
|
||||
@@ -953,7 +953,7 @@ dshow_add_device(AVFormatContext *avctx,
|
||||
|
||||
ctx->capture_filter[devtype]->stream_index = st->index;
|
||||
|
||||
ff_dshow_pin_ConnectionMediaType(ctx->capture_pin[devtype], &type);
|
||||
libAVPin_ConnectionMediaType(ctx->capture_pin[devtype], &type);
|
||||
|
||||
par = st->codecpar;
|
||||
if (devtype == VideoDevice) {
|
||||
@@ -1262,7 +1262,7 @@ static int dshow_check_event_queue(IMediaEvent *media_event)
|
||||
static int dshow_read_packet(AVFormatContext *s, AVPacket *pkt)
|
||||
{
|
||||
struct dshow_ctx *ctx = s->priv_data;
|
||||
PacketList *pktl = NULL;
|
||||
AVPacketList *pktl = NULL;
|
||||
|
||||
while (!ctx->eof && !pktl) {
|
||||
WaitForSingleObject(ctx->mutex, INFINITE);
|
||||
|
225
externals/ffmpeg/ffmpeg/libavdevice/dshow_capture.h
vendored
225
externals/ffmpeg/ffmpeg/libavdevice/dshow_capture.h
vendored
@@ -34,7 +34,6 @@
|
||||
#include <dvdmedia.h>
|
||||
|
||||
#include "libavcodec/internal.h"
|
||||
#include "libavcodec/packet_internal.h"
|
||||
|
||||
/* EC_DEVICE_LOST is not defined in MinGW dshow headers. */
|
||||
#ifndef EC_DEVICE_LOST
|
||||
@@ -69,20 +68,20 @@ enum dshowSourceFilterType {
|
||||
AudioSourceDevice = 1,
|
||||
};
|
||||
|
||||
#define DECLARE_QUERYINTERFACE(prefix, class, ...) \
|
||||
long \
|
||||
ff_dshow_##prefix##_QueryInterface(class *this, const GUID *riid, void **ppvObject) \
|
||||
#define DECLARE_QUERYINTERFACE(class, ...) \
|
||||
long WINAPI \
|
||||
class##_QueryInterface(class *this, const GUID *riid, void **ppvObject) \
|
||||
{ \
|
||||
struct GUIDoffset ifaces[] = __VA_ARGS__; \
|
||||
int i; \
|
||||
dshowdebug("ff_dshow_"AV_STRINGIFY(prefix)"_QueryInterface(%p, %p, %p)\n", this, riid, ppvObject); \
|
||||
dshowdebug(AV_STRINGIFY(class)"_QueryInterface(%p, %p, %p)\n", this, riid, ppvObject); \
|
||||
ff_printGUID(riid); \
|
||||
if (!ppvObject) \
|
||||
return E_POINTER; \
|
||||
for (i = 0; i < sizeof(ifaces)/sizeof(ifaces[0]); i++) { \
|
||||
if (IsEqualGUID(riid, ifaces[i].iid)) { \
|
||||
void *obj = (void *) ((uint8_t *) this + ifaces[i].offset); \
|
||||
ff_dshow_##prefix##_AddRef(this); \
|
||||
class##_AddRef(this); \
|
||||
dshowdebug("\tfound %d with offset %d\n", i, ifaces[i].offset); \
|
||||
*ppvObject = (void *) obj; \
|
||||
return S_OK; \
|
||||
@@ -92,28 +91,28 @@ ff_dshow_##prefix##_QueryInterface(class *this, const GUID *riid, void **ppvObje
|
||||
*ppvObject = NULL; \
|
||||
return E_NOINTERFACE; \
|
||||
}
|
||||
#define DECLARE_ADDREF(prefix, class) \
|
||||
unsigned long \
|
||||
ff_dshow_##prefix##_AddRef(class *this) \
|
||||
#define DECLARE_ADDREF(class) \
|
||||
unsigned long WINAPI \
|
||||
class##_AddRef(class *this) \
|
||||
{ \
|
||||
dshowdebug("ff_dshow_"AV_STRINGIFY(prefix)"_AddRef(%p)\t%ld\n", this, this->ref+1); \
|
||||
dshowdebug(AV_STRINGIFY(class)"_AddRef(%p)\t%ld\n", this, this->ref+1); \
|
||||
return InterlockedIncrement(&this->ref); \
|
||||
}
|
||||
#define DECLARE_RELEASE(prefix, class) \
|
||||
unsigned long \
|
||||
ff_dshow_##prefix##_Release(class *this) \
|
||||
#define DECLARE_RELEASE(class) \
|
||||
unsigned long WINAPI \
|
||||
class##_Release(class *this) \
|
||||
{ \
|
||||
long ref = InterlockedDecrement(&this->ref); \
|
||||
dshowdebug("ff_dshow_"AV_STRINGIFY(prefix)"_Release(%p)\t%ld\n", this, ref); \
|
||||
dshowdebug(AV_STRINGIFY(class)"_Release(%p)\t%ld\n", this, ref); \
|
||||
if (!ref) \
|
||||
ff_dshow_##prefix##_Destroy(this); \
|
||||
class##_Destroy(this); \
|
||||
return ref; \
|
||||
}
|
||||
|
||||
#define DECLARE_DESTROY(prefix, class, func) \
|
||||
void ff_dshow_##prefix##_Destroy(class *this) \
|
||||
#define DECLARE_DESTROY(class, func) \
|
||||
void class##_Destroy(class *this) \
|
||||
{ \
|
||||
dshowdebug("ff_dshow_"AV_STRINGIFY(prefix)"_Destroy(%p)\n", this); \
|
||||
dshowdebug(AV_STRINGIFY(class)"_Destroy(%p)\n", this); \
|
||||
func(this); \
|
||||
if (this) { \
|
||||
if (this->vtbl) \
|
||||
@@ -121,12 +120,12 @@ void ff_dshow_##prefix##_Destroy(class *this) \
|
||||
CoTaskMemFree(this); \
|
||||
} \
|
||||
}
|
||||
#define DECLARE_CREATE(prefix, class, setup, ...) \
|
||||
class *ff_dshow_##prefix##_Create(__VA_ARGS__) \
|
||||
#define DECLARE_CREATE(class, setup, ...) \
|
||||
class *class##_Create(__VA_ARGS__) \
|
||||
{ \
|
||||
class *this = CoTaskMemAlloc(sizeof(class)); \
|
||||
void *vtbl = CoTaskMemAlloc(sizeof(*this->vtbl)); \
|
||||
dshowdebug("ff_dshow_"AV_STRINGIFY(prefix)"_Create(%p)\n", this); \
|
||||
dshowdebug(AV_STRINGIFY(class)"_Create(%p)\n", this); \
|
||||
if (!this || !vtbl) \
|
||||
goto fail; \
|
||||
ZeroMemory(this, sizeof(class)); \
|
||||
@@ -135,123 +134,123 @@ class *ff_dshow_##prefix##_Create(__VA_ARGS__) \
|
||||
this->vtbl = vtbl; \
|
||||
if (!setup) \
|
||||
goto fail; \
|
||||
dshowdebug("created ff_dshow_"AV_STRINGIFY(prefix)" %p\n", this); \
|
||||
dshowdebug("created "AV_STRINGIFY(class)" %p\n", this); \
|
||||
return this; \
|
||||
fail: \
|
||||
ff_dshow_##prefix##_Destroy(this); \
|
||||
dshowdebug("could not create ff_dshow_"AV_STRINGIFY(prefix)"\n"); \
|
||||
class##_Destroy(this); \
|
||||
dshowdebug("could not create "AV_STRINGIFY(class)"\n"); \
|
||||
return NULL; \
|
||||
}
|
||||
|
||||
#define SETVTBL(vtbl, prefix, fn) \
|
||||
do { (vtbl)->fn = (void *) ff_dshow_##prefix##_##fn; } while(0)
|
||||
#define SETVTBL(vtbl, class, fn) \
|
||||
do { (vtbl)->fn = (void *) class##_##fn; } while(0)
|
||||
|
||||
/*****************************************************************************
|
||||
* Forward Declarations
|
||||
****************************************************************************/
|
||||
typedef struct DShowPin DShowPin;
|
||||
typedef struct DShowMemInputPin DShowMemInputPin;
|
||||
typedef struct DShowEnumPins DShowEnumPins;
|
||||
typedef struct DShowEnumMediaTypes DShowEnumMediaTypes;
|
||||
typedef struct DShowFilter DShowFilter;
|
||||
typedef struct libAVPin libAVPin;
|
||||
typedef struct libAVMemInputPin libAVMemInputPin;
|
||||
typedef struct libAVEnumPins libAVEnumPins;
|
||||
typedef struct libAVEnumMediaTypes libAVEnumMediaTypes;
|
||||
typedef struct libAVFilter libAVFilter;
|
||||
|
||||
/*****************************************************************************
|
||||
* DShowPin
|
||||
* libAVPin
|
||||
****************************************************************************/
|
||||
struct DShowPin {
|
||||
struct libAVPin {
|
||||
IPinVtbl *vtbl;
|
||||
long ref;
|
||||
DShowFilter *filter;
|
||||
libAVFilter *filter;
|
||||
IPin *connectedto;
|
||||
AM_MEDIA_TYPE type;
|
||||
IMemInputPinVtbl *imemvtbl;
|
||||
};
|
||||
|
||||
long ff_dshow_pin_QueryInterface (DShowPin *, const GUID *, void **);
|
||||
unsigned long ff_dshow_pin_AddRef (DShowPin *);
|
||||
unsigned long ff_dshow_pin_Release (DShowPin *);
|
||||
long ff_dshow_pin_Connect (DShowPin *, IPin *, const AM_MEDIA_TYPE *);
|
||||
long ff_dshow_pin_ReceiveConnection (DShowPin *, IPin *, const AM_MEDIA_TYPE *);
|
||||
long ff_dshow_pin_Disconnect (DShowPin *);
|
||||
long ff_dshow_pin_ConnectedTo (DShowPin *, IPin **);
|
||||
long ff_dshow_pin_ConnectionMediaType (DShowPin *, AM_MEDIA_TYPE *);
|
||||
long ff_dshow_pin_QueryPinInfo (DShowPin *, PIN_INFO *);
|
||||
long ff_dshow_pin_QueryDirection (DShowPin *, PIN_DIRECTION *);
|
||||
long ff_dshow_pin_QueryId (DShowPin *, wchar_t **);
|
||||
long ff_dshow_pin_QueryAccept (DShowPin *, const AM_MEDIA_TYPE *);
|
||||
long ff_dshow_pin_EnumMediaTypes (DShowPin *, IEnumMediaTypes **);
|
||||
long ff_dshow_pin_QueryInternalConnections(DShowPin *, IPin **, unsigned long *);
|
||||
long ff_dshow_pin_EndOfStream (DShowPin *);
|
||||
long ff_dshow_pin_BeginFlush (DShowPin *);
|
||||
long ff_dshow_pin_EndFlush (DShowPin *);
|
||||
long ff_dshow_pin_NewSegment (DShowPin *, REFERENCE_TIME, REFERENCE_TIME, double);
|
||||
long WINAPI libAVPin_QueryInterface (libAVPin *, const GUID *, void **);
|
||||
unsigned long WINAPI libAVPin_AddRef (libAVPin *);
|
||||
unsigned long WINAPI libAVPin_Release (libAVPin *);
|
||||
long WINAPI libAVPin_Connect (libAVPin *, IPin *, const AM_MEDIA_TYPE *);
|
||||
long WINAPI libAVPin_ReceiveConnection (libAVPin *, IPin *, const AM_MEDIA_TYPE *);
|
||||
long WINAPI libAVPin_Disconnect (libAVPin *);
|
||||
long WINAPI libAVPin_ConnectedTo (libAVPin *, IPin **);
|
||||
long WINAPI libAVPin_ConnectionMediaType (libAVPin *, AM_MEDIA_TYPE *);
|
||||
long WINAPI libAVPin_QueryPinInfo (libAVPin *, PIN_INFO *);
|
||||
long WINAPI libAVPin_QueryDirection (libAVPin *, PIN_DIRECTION *);
|
||||
long WINAPI libAVPin_QueryId (libAVPin *, wchar_t **);
|
||||
long WINAPI libAVPin_QueryAccept (libAVPin *, const AM_MEDIA_TYPE *);
|
||||
long WINAPI libAVPin_EnumMediaTypes (libAVPin *, IEnumMediaTypes **);
|
||||
long WINAPI libAVPin_QueryInternalConnections(libAVPin *, IPin **, unsigned long *);
|
||||
long WINAPI libAVPin_EndOfStream (libAVPin *);
|
||||
long WINAPI libAVPin_BeginFlush (libAVPin *);
|
||||
long WINAPI libAVPin_EndFlush (libAVPin *);
|
||||
long WINAPI libAVPin_NewSegment (libAVPin *, REFERENCE_TIME, REFERENCE_TIME, double);
|
||||
|
||||
long ff_dshow_meminputpin_QueryInterface (DShowMemInputPin *, const GUID *, void **);
|
||||
unsigned long ff_dshow_meminputpin_AddRef (DShowMemInputPin *);
|
||||
unsigned long ff_dshow_meminputpin_Release (DShowMemInputPin *);
|
||||
long ff_dshow_meminputpin_GetAllocator (DShowMemInputPin *, IMemAllocator **);
|
||||
long ff_dshow_meminputpin_NotifyAllocator (DShowMemInputPin *, IMemAllocator *, BOOL);
|
||||
long ff_dshow_meminputpin_GetAllocatorRequirements(DShowMemInputPin *, ALLOCATOR_PROPERTIES *);
|
||||
long ff_dshow_meminputpin_Receive (DShowMemInputPin *, IMediaSample *);
|
||||
long ff_dshow_meminputpin_ReceiveMultiple (DShowMemInputPin *, IMediaSample **, long, long *);
|
||||
long ff_dshow_meminputpin_ReceiveCanBlock (DShowMemInputPin *);
|
||||
long WINAPI libAVMemInputPin_QueryInterface (libAVMemInputPin *, const GUID *, void **);
|
||||
unsigned long WINAPI libAVMemInputPin_AddRef (libAVMemInputPin *);
|
||||
unsigned long WINAPI libAVMemInputPin_Release (libAVMemInputPin *);
|
||||
long WINAPI libAVMemInputPin_GetAllocator (libAVMemInputPin *, IMemAllocator **);
|
||||
long WINAPI libAVMemInputPin_NotifyAllocator (libAVMemInputPin *, IMemAllocator *, BOOL);
|
||||
long WINAPI libAVMemInputPin_GetAllocatorRequirements(libAVMemInputPin *, ALLOCATOR_PROPERTIES *);
|
||||
long WINAPI libAVMemInputPin_Receive (libAVMemInputPin *, IMediaSample *);
|
||||
long WINAPI libAVMemInputPin_ReceiveMultiple (libAVMemInputPin *, IMediaSample **, long, long *);
|
||||
long WINAPI libAVMemInputPin_ReceiveCanBlock (libAVMemInputPin *);
|
||||
|
||||
void ff_dshow_pin_Destroy(DShowPin *);
|
||||
DShowPin *ff_dshow_pin_Create (DShowFilter *filter);
|
||||
void libAVPin_Destroy(libAVPin *);
|
||||
libAVPin *libAVPin_Create (libAVFilter *filter);
|
||||
|
||||
void ff_dshow_meminputpin_Destroy(DShowMemInputPin *);
|
||||
void libAVMemInputPin_Destroy(libAVMemInputPin *);
|
||||
|
||||
/*****************************************************************************
|
||||
* DShowEnumPins
|
||||
* libAVEnumPins
|
||||
****************************************************************************/
|
||||
struct DShowEnumPins {
|
||||
struct libAVEnumPins {
|
||||
IEnumPinsVtbl *vtbl;
|
||||
long ref;
|
||||
int pos;
|
||||
DShowPin *pin;
|
||||
DShowFilter *filter;
|
||||
libAVPin *pin;
|
||||
libAVFilter *filter;
|
||||
};
|
||||
|
||||
long ff_dshow_enumpins_QueryInterface(DShowEnumPins *, const GUID *, void **);
|
||||
unsigned long ff_dshow_enumpins_AddRef (DShowEnumPins *);
|
||||
unsigned long ff_dshow_enumpins_Release (DShowEnumPins *);
|
||||
long ff_dshow_enumpins_Next (DShowEnumPins *, unsigned long, IPin **, unsigned long *);
|
||||
long ff_dshow_enumpins_Skip (DShowEnumPins *, unsigned long);
|
||||
long ff_dshow_enumpins_Reset (DShowEnumPins *);
|
||||
long ff_dshow_enumpins_Clone (DShowEnumPins *, DShowEnumPins **);
|
||||
long WINAPI libAVEnumPins_QueryInterface(libAVEnumPins *, const GUID *, void **);
|
||||
unsigned long WINAPI libAVEnumPins_AddRef (libAVEnumPins *);
|
||||
unsigned long WINAPI libAVEnumPins_Release (libAVEnumPins *);
|
||||
long WINAPI libAVEnumPins_Next (libAVEnumPins *, unsigned long, IPin **, unsigned long *);
|
||||
long WINAPI libAVEnumPins_Skip (libAVEnumPins *, unsigned long);
|
||||
long WINAPI libAVEnumPins_Reset (libAVEnumPins *);
|
||||
long WINAPI libAVEnumPins_Clone (libAVEnumPins *, libAVEnumPins **);
|
||||
|
||||
void ff_dshow_enumpins_Destroy(DShowEnumPins *);
|
||||
DShowEnumPins *ff_dshow_enumpins_Create (DShowPin *pin, DShowFilter *filter);
|
||||
void libAVEnumPins_Destroy(libAVEnumPins *);
|
||||
libAVEnumPins *libAVEnumPins_Create (libAVPin *pin, libAVFilter *filter);
|
||||
|
||||
/*****************************************************************************
|
||||
* DShowEnumMediaTypes
|
||||
* libAVEnumMediaTypes
|
||||
****************************************************************************/
|
||||
struct DShowEnumMediaTypes {
|
||||
struct libAVEnumMediaTypes {
|
||||
IEnumMediaTypesVtbl *vtbl;
|
||||
long ref;
|
||||
int pos;
|
||||
AM_MEDIA_TYPE type;
|
||||
};
|
||||
|
||||
long ff_dshow_enummediatypes_QueryInterface(DShowEnumMediaTypes *, const GUID *, void **);
|
||||
unsigned long ff_dshow_enummediatypes_AddRef (DShowEnumMediaTypes *);
|
||||
unsigned long ff_dshow_enummediatypes_Release (DShowEnumMediaTypes *);
|
||||
long ff_dshow_enummediatypes_Next (DShowEnumMediaTypes *, unsigned long, AM_MEDIA_TYPE **, unsigned long *);
|
||||
long ff_dshow_enummediatypes_Skip (DShowEnumMediaTypes *, unsigned long);
|
||||
long ff_dshow_enummediatypes_Reset (DShowEnumMediaTypes *);
|
||||
long ff_dshow_enummediatypes_Clone (DShowEnumMediaTypes *, DShowEnumMediaTypes **);
|
||||
long WINAPI libAVEnumMediaTypes_QueryInterface(libAVEnumMediaTypes *, const GUID *, void **);
|
||||
unsigned long WINAPI libAVEnumMediaTypes_AddRef (libAVEnumMediaTypes *);
|
||||
unsigned long WINAPI libAVEnumMediaTypes_Release (libAVEnumMediaTypes *);
|
||||
long WINAPI libAVEnumMediaTypes_Next (libAVEnumMediaTypes *, unsigned long, AM_MEDIA_TYPE **, unsigned long *);
|
||||
long WINAPI libAVEnumMediaTypes_Skip (libAVEnumMediaTypes *, unsigned long);
|
||||
long WINAPI libAVEnumMediaTypes_Reset (libAVEnumMediaTypes *);
|
||||
long WINAPI libAVEnumMediaTypes_Clone (libAVEnumMediaTypes *, libAVEnumMediaTypes **);
|
||||
|
||||
void ff_dshow_enummediatypes_Destroy(DShowEnumMediaTypes *);
|
||||
DShowEnumMediaTypes *ff_dshow_enummediatypes_Create(const AM_MEDIA_TYPE *type);
|
||||
void libAVEnumMediaTypes_Destroy(libAVEnumMediaTypes *);
|
||||
libAVEnumMediaTypes *libAVEnumMediaTypes_Create(const AM_MEDIA_TYPE *type);
|
||||
|
||||
/*****************************************************************************
|
||||
* DShowFilter
|
||||
* libAVFilter
|
||||
****************************************************************************/
|
||||
struct DShowFilter {
|
||||
struct libAVFilter {
|
||||
IBaseFilterVtbl *vtbl;
|
||||
long ref;
|
||||
const wchar_t *name;
|
||||
DShowPin *pin;
|
||||
libAVPin *pin;
|
||||
FILTER_INFO info;
|
||||
FILTER_STATE state;
|
||||
IReferenceClock *clock;
|
||||
@@ -262,24 +261,24 @@ struct DShowFilter {
|
||||
void (*callback)(void *priv_data, int index, uint8_t *buf, int buf_size, int64_t time, enum dshowDeviceType type);
|
||||
};
|
||||
|
||||
long ff_dshow_filter_QueryInterface (DShowFilter *, const GUID *, void **);
|
||||
unsigned long ff_dshow_filter_AddRef (DShowFilter *);
|
||||
unsigned long ff_dshow_filter_Release (DShowFilter *);
|
||||
long ff_dshow_filter_GetClassID (DShowFilter *, CLSID *);
|
||||
long ff_dshow_filter_Stop (DShowFilter *);
|
||||
long ff_dshow_filter_Pause (DShowFilter *);
|
||||
long ff_dshow_filter_Run (DShowFilter *, REFERENCE_TIME);
|
||||
long ff_dshow_filter_GetState (DShowFilter *, DWORD, FILTER_STATE *);
|
||||
long ff_dshow_filter_SetSyncSource (DShowFilter *, IReferenceClock *);
|
||||
long ff_dshow_filter_GetSyncSource (DShowFilter *, IReferenceClock **);
|
||||
long ff_dshow_filter_EnumPins (DShowFilter *, IEnumPins **);
|
||||
long ff_dshow_filter_FindPin (DShowFilter *, const wchar_t *, IPin **);
|
||||
long ff_dshow_filter_QueryFilterInfo(DShowFilter *, FILTER_INFO *);
|
||||
long ff_dshow_filter_JoinFilterGraph(DShowFilter *, IFilterGraph *, const wchar_t *);
|
||||
long ff_dshow_filter_QueryVendorInfo(DShowFilter *, wchar_t **);
|
||||
long WINAPI libAVFilter_QueryInterface (libAVFilter *, const GUID *, void **);
|
||||
unsigned long WINAPI libAVFilter_AddRef (libAVFilter *);
|
||||
unsigned long WINAPI libAVFilter_Release (libAVFilter *);
|
||||
long WINAPI libAVFilter_GetClassID (libAVFilter *, CLSID *);
|
||||
long WINAPI libAVFilter_Stop (libAVFilter *);
|
||||
long WINAPI libAVFilter_Pause (libAVFilter *);
|
||||
long WINAPI libAVFilter_Run (libAVFilter *, REFERENCE_TIME);
|
||||
long WINAPI libAVFilter_GetState (libAVFilter *, DWORD, FILTER_STATE *);
|
||||
long WINAPI libAVFilter_SetSyncSource (libAVFilter *, IReferenceClock *);
|
||||
long WINAPI libAVFilter_GetSyncSource (libAVFilter *, IReferenceClock **);
|
||||
long WINAPI libAVFilter_EnumPins (libAVFilter *, IEnumPins **);
|
||||
long WINAPI libAVFilter_FindPin (libAVFilter *, const wchar_t *, IPin **);
|
||||
long WINAPI libAVFilter_QueryFilterInfo(libAVFilter *, FILTER_INFO *);
|
||||
long WINAPI libAVFilter_JoinFilterGraph(libAVFilter *, IFilterGraph *, const wchar_t *);
|
||||
long WINAPI libAVFilter_QueryVendorInfo(libAVFilter *, wchar_t **);
|
||||
|
||||
void ff_dshow_filter_Destroy(DShowFilter *);
|
||||
DShowFilter *ff_dshow_filter_Create (void *, void *, enum dshowDeviceType);
|
||||
void libAVFilter_Destroy(libAVFilter *);
|
||||
libAVFilter *libAVFilter_Create (void *, void *, enum dshowDeviceType);
|
||||
|
||||
/*****************************************************************************
|
||||
* dshow_ctx
|
||||
@@ -315,13 +314,13 @@ struct dshow_ctx {
|
||||
|
||||
IBaseFilter *device_filter[2];
|
||||
IPin *device_pin[2];
|
||||
DShowFilter *capture_filter[2];
|
||||
DShowPin *capture_pin[2];
|
||||
libAVFilter *capture_filter[2];
|
||||
libAVPin *capture_pin[2];
|
||||
|
||||
HANDLE mutex;
|
||||
HANDLE event[2]; /* event[0] is set by DirectShow
|
||||
* event[1] is set by callback() */
|
||||
PacketList *pktl;
|
||||
AVPacketList *pktl;
|
||||
|
||||
int eof;
|
||||
|
||||
@@ -347,9 +346,9 @@ struct dshow_ctx {
|
||||
/*****************************************************************************
|
||||
* CrossBar
|
||||
****************************************************************************/
|
||||
HRESULT ff_dshow_try_setup_crossbar_options(ICaptureGraphBuilder2 *graph_builder2,
|
||||
HRESULT dshow_try_setup_crossbar_options(ICaptureGraphBuilder2 *graph_builder2,
|
||||
IBaseFilter *device_filter, enum dshowDeviceType devtype, AVFormatContext *avctx);
|
||||
|
||||
void ff_dshow_show_filter_properties(IBaseFilter *pFilter, AVFormatContext *avctx);
|
||||
void dshow_show_filter_properties(IBaseFilter *pFilter, AVFormatContext *avctx);
|
||||
|
||||
#endif /* AVDEVICE_DSHOW_CAPTURE_H */
|
||||
|
@@ -137,7 +137,7 @@ setup_crossbar_options(IAMCrossbar *cross_bar, enum dshowDeviceType devtype, AVF
|
||||
* Given a fully constructed graph, check if there is a cross bar filter, and configure its pins if so.
|
||||
*/
|
||||
HRESULT
|
||||
ff_dshow_try_setup_crossbar_options(ICaptureGraphBuilder2 *graph_builder2,
|
||||
dshow_try_setup_crossbar_options(ICaptureGraphBuilder2 *graph_builder2,
|
||||
IBaseFilter *device_filter, enum dshowDeviceType devtype, AVFormatContext *avctx)
|
||||
{
|
||||
struct dshow_ctx *ctx = avctx->priv_data;
|
||||
@@ -163,7 +163,7 @@ ff_dshow_try_setup_crossbar_options(ICaptureGraphBuilder2 *graph_builder2,
|
||||
hr = IAMCrossbar_QueryInterface(cross_bar, &IID_IBaseFilter, (void **) &cross_bar_base_filter);
|
||||
if (hr != S_OK)
|
||||
goto end;
|
||||
ff_dshow_show_filter_properties(cross_bar_base_filter, avctx);
|
||||
dshow_show_filter_properties(cross_bar_base_filter, avctx);
|
||||
}
|
||||
|
||||
if (devtype == VideoDevice && ctx->show_analog_tv_tuner_dialog) {
|
||||
@@ -173,7 +173,7 @@ ff_dshow_try_setup_crossbar_options(ICaptureGraphBuilder2 *graph_builder2,
|
||||
hr = IAMCrossbar_QueryInterface(tv_tuner_filter, &IID_IBaseFilter, (void **) &tv_tuner_base_filter);
|
||||
if (hr != S_OK)
|
||||
goto end;
|
||||
ff_dshow_show_filter_properties(tv_tuner_base_filter, avctx);
|
||||
dshow_show_filter_properties(tv_tuner_base_filter, avctx);
|
||||
} else {
|
||||
av_log(avctx, AV_LOG_WARNING, "unable to find a tv tuner to display dialog for!");
|
||||
}
|
||||
@@ -185,7 +185,7 @@ ff_dshow_try_setup_crossbar_options(ICaptureGraphBuilder2 *graph_builder2,
|
||||
hr = IAMCrossbar_QueryInterface(tv_audio_filter, &IID_IBaseFilter, (void **) &tv_audio_base_filter);
|
||||
if (hr != S_OK)
|
||||
goto end;
|
||||
ff_dshow_show_filter_properties(tv_audio_base_filter, avctx);
|
||||
dshow_show_filter_properties(tv_audio_base_filter, avctx);
|
||||
} else {
|
||||
av_log(avctx, AV_LOG_WARNING, "unable to find a tv audio tuner to display dialog for!");
|
||||
}
|
||||
|
@@ -21,16 +21,17 @@
|
||||
|
||||
#include "dshow_capture.h"
|
||||
|
||||
DECLARE_QUERYINTERFACE(enummediatypes, DShowEnumMediaTypes,
|
||||
DECLARE_QUERYINTERFACE(libAVEnumMediaTypes,
|
||||
{ {&IID_IUnknown,0}, {&IID_IEnumMediaTypes,0} })
|
||||
DECLARE_ADDREF(enummediatypes, DShowEnumMediaTypes)
|
||||
DECLARE_RELEASE(enummediatypes, DShowEnumMediaTypes)
|
||||
DECLARE_ADDREF(libAVEnumMediaTypes)
|
||||
DECLARE_RELEASE(libAVEnumMediaTypes)
|
||||
|
||||
long ff_dshow_enummediatypes_Next(DShowEnumMediaTypes *this, unsigned long n,
|
||||
long WINAPI
|
||||
libAVEnumMediaTypes_Next(libAVEnumMediaTypes *this, unsigned long n,
|
||||
AM_MEDIA_TYPE **types, unsigned long *fetched)
|
||||
{
|
||||
int count = 0;
|
||||
dshowdebug("ff_dshow_enummediatypes_Next(%p)\n", this);
|
||||
dshowdebug("libAVEnumMediaTypes_Next(%p)\n", this);
|
||||
if (!types)
|
||||
return E_POINTER;
|
||||
if (!this->pos && n == 1) {
|
||||
@@ -50,26 +51,29 @@ long ff_dshow_enummediatypes_Next(DShowEnumMediaTypes *this, unsigned long n,
|
||||
return S_FALSE;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_enummediatypes_Skip(DShowEnumMediaTypes *this, unsigned long n)
|
||||
long WINAPI
|
||||
libAVEnumMediaTypes_Skip(libAVEnumMediaTypes *this, unsigned long n)
|
||||
{
|
||||
dshowdebug("ff_dshow_enummediatypes_Skip(%p)\n", this);
|
||||
dshowdebug("libAVEnumMediaTypes_Skip(%p)\n", this);
|
||||
if (n) /* Any skip will always fall outside of the only valid type. */
|
||||
return S_FALSE;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_enummediatypes_Reset(DShowEnumMediaTypes *this)
|
||||
long WINAPI
|
||||
libAVEnumMediaTypes_Reset(libAVEnumMediaTypes *this)
|
||||
{
|
||||
dshowdebug("ff_dshow_enummediatypes_Reset(%p)\n", this);
|
||||
dshowdebug("libAVEnumMediaTypes_Reset(%p)\n", this);
|
||||
this->pos = 0;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_enummediatypes_Clone(DShowEnumMediaTypes *this, DShowEnumMediaTypes **enums)
|
||||
long WINAPI
|
||||
libAVEnumMediaTypes_Clone(libAVEnumMediaTypes *this, libAVEnumMediaTypes **enums)
|
||||
{
|
||||
DShowEnumMediaTypes *new;
|
||||
dshowdebug("ff_dshow_enummediatypes_Clone(%p)\n", this);
|
||||
libAVEnumMediaTypes *new;
|
||||
dshowdebug("libAVEnumMediaTypes_Clone(%p)\n", this);
|
||||
if (!enums)
|
||||
return E_POINTER;
|
||||
new = ff_dshow_enummediatypes_Create(&this->type);
|
||||
new = libAVEnumMediaTypes_Create(&this->type);
|
||||
if (!new)
|
||||
return E_OUTOFMEMORY;
|
||||
new->pos = this->pos;
|
||||
@@ -77,16 +81,17 @@ long ff_dshow_enummediatypes_Clone(DShowEnumMediaTypes *this, DShowEnumMediaType
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
static int ff_dshow_enummediatypes_Setup(DShowEnumMediaTypes *this, const AM_MEDIA_TYPE *type)
|
||||
static int
|
||||
libAVEnumMediaTypes_Setup(libAVEnumMediaTypes *this, const AM_MEDIA_TYPE *type)
|
||||
{
|
||||
IEnumMediaTypesVtbl *vtbl = this->vtbl;
|
||||
SETVTBL(vtbl, enummediatypes, QueryInterface);
|
||||
SETVTBL(vtbl, enummediatypes, AddRef);
|
||||
SETVTBL(vtbl, enummediatypes, Release);
|
||||
SETVTBL(vtbl, enummediatypes, Next);
|
||||
SETVTBL(vtbl, enummediatypes, Skip);
|
||||
SETVTBL(vtbl, enummediatypes, Reset);
|
||||
SETVTBL(vtbl, enummediatypes, Clone);
|
||||
SETVTBL(vtbl, libAVEnumMediaTypes, QueryInterface);
|
||||
SETVTBL(vtbl, libAVEnumMediaTypes, AddRef);
|
||||
SETVTBL(vtbl, libAVEnumMediaTypes, Release);
|
||||
SETVTBL(vtbl, libAVEnumMediaTypes, Next);
|
||||
SETVTBL(vtbl, libAVEnumMediaTypes, Skip);
|
||||
SETVTBL(vtbl, libAVEnumMediaTypes, Reset);
|
||||
SETVTBL(vtbl, libAVEnumMediaTypes, Clone);
|
||||
|
||||
if (!type) {
|
||||
this->type.majortype = GUID_NULL;
|
||||
@@ -96,5 +101,5 @@ static int ff_dshow_enummediatypes_Setup(DShowEnumMediaTypes *this, const AM_MED
|
||||
|
||||
return 1;
|
||||
}
|
||||
DECLARE_CREATE(enummediatypes, DShowEnumMediaTypes, ff_dshow_enummediatypes_Setup(this, type), const AM_MEDIA_TYPE *type)
|
||||
DECLARE_DESTROY(enummediatypes, DShowEnumMediaTypes, nothing)
|
||||
DECLARE_CREATE(libAVEnumMediaTypes, libAVEnumMediaTypes_Setup(this, type), const AM_MEDIA_TYPE *type)
|
||||
DECLARE_DESTROY(libAVEnumMediaTypes, nothing)
|
||||
|
@@ -21,20 +21,21 @@
|
||||
|
||||
#include "dshow_capture.h"
|
||||
|
||||
DECLARE_QUERYINTERFACE(enumpins, DShowEnumPins,
|
||||
DECLARE_QUERYINTERFACE(libAVEnumPins,
|
||||
{ {&IID_IUnknown,0}, {&IID_IEnumPins,0} })
|
||||
DECLARE_ADDREF(enumpins, DShowEnumPins)
|
||||
DECLARE_RELEASE(enumpins, DShowEnumPins)
|
||||
DECLARE_ADDREF(libAVEnumPins)
|
||||
DECLARE_RELEASE(libAVEnumPins)
|
||||
|
||||
long ff_dshow_enumpins_Next(DShowEnumPins *this, unsigned long n, IPin **pins,
|
||||
long WINAPI
|
||||
libAVEnumPins_Next(libAVEnumPins *this, unsigned long n, IPin **pins,
|
||||
unsigned long *fetched)
|
||||
{
|
||||
int count = 0;
|
||||
dshowdebug("ff_dshow_enumpins_Next(%p)\n", this);
|
||||
dshowdebug("libAVEnumPins_Next(%p)\n", this);
|
||||
if (!pins)
|
||||
return E_POINTER;
|
||||
if (!this->pos && n == 1) {
|
||||
ff_dshow_pin_AddRef(this->pin);
|
||||
libAVPin_AddRef(this->pin);
|
||||
*pins = (IPin *) this->pin;
|
||||
count = 1;
|
||||
this->pos = 1;
|
||||
@@ -45,26 +46,29 @@ long ff_dshow_enumpins_Next(DShowEnumPins *this, unsigned long n, IPin **pins,
|
||||
return S_FALSE;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_enumpins_Skip(DShowEnumPins *this, unsigned long n)
|
||||
long WINAPI
|
||||
libAVEnumPins_Skip(libAVEnumPins *this, unsigned long n)
|
||||
{
|
||||
dshowdebug("ff_dshow_enumpins_Skip(%p)\n", this);
|
||||
dshowdebug("libAVEnumPins_Skip(%p)\n", this);
|
||||
if (n) /* Any skip will always fall outside of the only valid pin. */
|
||||
return S_FALSE;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_enumpins_Reset(DShowEnumPins *this)
|
||||
long WINAPI
|
||||
libAVEnumPins_Reset(libAVEnumPins *this)
|
||||
{
|
||||
dshowdebug("ff_dshow_enumpins_Reset(%p)\n", this);
|
||||
dshowdebug("libAVEnumPins_Reset(%p)\n", this);
|
||||
this->pos = 0;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_enumpins_Clone(DShowEnumPins *this, DShowEnumPins **pins)
|
||||
long WINAPI
|
||||
libAVEnumPins_Clone(libAVEnumPins *this, libAVEnumPins **pins)
|
||||
{
|
||||
DShowEnumPins *new;
|
||||
dshowdebug("ff_dshow_enumpins_Clone(%p)\n", this);
|
||||
libAVEnumPins *new;
|
||||
dshowdebug("libAVEnumPins_Clone(%p)\n", this);
|
||||
if (!pins)
|
||||
return E_POINTER;
|
||||
new = ff_dshow_enumpins_Create(this->pin, this->filter);
|
||||
new = libAVEnumPins_Create(this->pin, this->filter);
|
||||
if (!new)
|
||||
return E_OUTOFMEMORY;
|
||||
new->pos = this->pos;
|
||||
@@ -72,28 +76,30 @@ long ff_dshow_enumpins_Clone(DShowEnumPins *this, DShowEnumPins **pins)
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
static int ff_dshow_enumpins_Setup(DShowEnumPins *this, DShowPin *pin, DShowFilter *filter)
|
||||
static int
|
||||
libAVEnumPins_Setup(libAVEnumPins *this, libAVPin *pin, libAVFilter *filter)
|
||||
{
|
||||
IEnumPinsVtbl *vtbl = this->vtbl;
|
||||
SETVTBL(vtbl, enumpins, QueryInterface);
|
||||
SETVTBL(vtbl, enumpins, AddRef);
|
||||
SETVTBL(vtbl, enumpins, Release);
|
||||
SETVTBL(vtbl, enumpins, Next);
|
||||
SETVTBL(vtbl, enumpins, Skip);
|
||||
SETVTBL(vtbl, enumpins, Reset);
|
||||
SETVTBL(vtbl, enumpins, Clone);
|
||||
SETVTBL(vtbl, libAVEnumPins, QueryInterface);
|
||||
SETVTBL(vtbl, libAVEnumPins, AddRef);
|
||||
SETVTBL(vtbl, libAVEnumPins, Release);
|
||||
SETVTBL(vtbl, libAVEnumPins, Next);
|
||||
SETVTBL(vtbl, libAVEnumPins, Skip);
|
||||
SETVTBL(vtbl, libAVEnumPins, Reset);
|
||||
SETVTBL(vtbl, libAVEnumPins, Clone);
|
||||
|
||||
this->pin = pin;
|
||||
this->filter = filter;
|
||||
ff_dshow_filter_AddRef(this->filter);
|
||||
libAVFilter_AddRef(this->filter);
|
||||
|
||||
return 1;
|
||||
}
|
||||
static int ff_dshow_enumpins_Cleanup(DShowEnumPins *this)
|
||||
static int
|
||||
libAVEnumPins_Cleanup(libAVEnumPins *this)
|
||||
{
|
||||
ff_dshow_filter_Release(this->filter);
|
||||
libAVFilter_Release(this->filter);
|
||||
return 1;
|
||||
}
|
||||
DECLARE_CREATE(enumpins, DShowEnumPins, ff_dshow_enumpins_Setup(this, pin, filter),
|
||||
DShowPin *pin, DShowFilter *filter)
|
||||
DECLARE_DESTROY(enumpins, DShowEnumPins, ff_dshow_enumpins_Cleanup)
|
||||
DECLARE_CREATE(libAVEnumPins, libAVEnumPins_Setup(this, pin, filter),
|
||||
libAVPin *pin, libAVFilter *filter)
|
||||
DECLARE_DESTROY(libAVEnumPins, libAVEnumPins_Cleanup)
|
||||
|
117
externals/ffmpeg/ffmpeg/libavdevice/dshow_filter.c
vendored
117
externals/ffmpeg/ffmpeg/libavdevice/dshow_filter.c
vendored
@@ -21,47 +21,53 @@
|
||||
|
||||
#include "dshow_capture.h"
|
||||
|
||||
DECLARE_QUERYINTERFACE(filter, DShowFilter,
|
||||
DECLARE_QUERYINTERFACE(libAVFilter,
|
||||
{ {&IID_IUnknown,0}, {&IID_IBaseFilter,0} })
|
||||
DECLARE_ADDREF(filter, DShowFilter)
|
||||
DECLARE_RELEASE(filter, DShowFilter)
|
||||
DECLARE_ADDREF(libAVFilter)
|
||||
DECLARE_RELEASE(libAVFilter)
|
||||
|
||||
long ff_dshow_filter_GetClassID(DShowFilter *this, CLSID *id)
|
||||
long WINAPI
|
||||
libAVFilter_GetClassID(libAVFilter *this, CLSID *id)
|
||||
{
|
||||
dshowdebug("ff_dshow_filter_GetClassID(%p)\n", this);
|
||||
dshowdebug("libAVFilter_GetClassID(%p)\n", this);
|
||||
/* I'm not creating a ClassID just for this. */
|
||||
return E_FAIL;
|
||||
}
|
||||
long ff_dshow_filter_Stop(DShowFilter *this)
|
||||
long WINAPI
|
||||
libAVFilter_Stop(libAVFilter *this)
|
||||
{
|
||||
dshowdebug("ff_dshow_filter_Stop(%p)\n", this);
|
||||
dshowdebug("libAVFilter_Stop(%p)\n", this);
|
||||
this->state = State_Stopped;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_filter_Pause(DShowFilter *this)
|
||||
long WINAPI
|
||||
libAVFilter_Pause(libAVFilter *this)
|
||||
{
|
||||
dshowdebug("ff_dshow_filter_Pause(%p)\n", this);
|
||||
dshowdebug("libAVFilter_Pause(%p)\n", this);
|
||||
this->state = State_Paused;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_filter_Run(DShowFilter *this, REFERENCE_TIME start)
|
||||
long WINAPI
|
||||
libAVFilter_Run(libAVFilter *this, REFERENCE_TIME start)
|
||||
{
|
||||
dshowdebug("ff_dshow_filter_Run(%p) %"PRId64"\n", this, start);
|
||||
dshowdebug("libAVFilter_Run(%p) %"PRId64"\n", this, start);
|
||||
this->state = State_Running;
|
||||
this->start_time = start;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_filter_GetState(DShowFilter *this, DWORD ms, FILTER_STATE *state)
|
||||
long WINAPI
|
||||
libAVFilter_GetState(libAVFilter *this, DWORD ms, FILTER_STATE *state)
|
||||
{
|
||||
dshowdebug("ff_dshow_filter_GetState(%p)\n", this);
|
||||
dshowdebug("libAVFilter_GetState(%p)\n", this);
|
||||
if (!state)
|
||||
return E_POINTER;
|
||||
*state = this->state;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_filter_SetSyncSource(DShowFilter *this, IReferenceClock *clock)
|
||||
long WINAPI
|
||||
libAVFilter_SetSyncSource(libAVFilter *this, IReferenceClock *clock)
|
||||
{
|
||||
dshowdebug("ff_dshow_filter_SetSyncSource(%p)\n", this);
|
||||
dshowdebug("libAVFilter_SetSyncSource(%p)\n", this);
|
||||
|
||||
if (this->clock != clock) {
|
||||
if (this->clock)
|
||||
@@ -73,9 +79,10 @@ long ff_dshow_filter_SetSyncSource(DShowFilter *this, IReferenceClock *clock)
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_filter_GetSyncSource(DShowFilter *this, IReferenceClock **clock)
|
||||
long WINAPI
|
||||
libAVFilter_GetSyncSource(libAVFilter *this, IReferenceClock **clock)
|
||||
{
|
||||
dshowdebug("ff_dshow_filter_GetSyncSource(%p)\n", this);
|
||||
dshowdebug("libAVFilter_GetSyncSource(%p)\n", this);
|
||||
|
||||
if (!clock)
|
||||
return E_POINTER;
|
||||
@@ -85,30 +92,32 @@ long ff_dshow_filter_GetSyncSource(DShowFilter *this, IReferenceClock **clock)
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_filter_EnumPins(DShowFilter *this, IEnumPins **enumpin)
|
||||
long WINAPI
|
||||
libAVFilter_EnumPins(libAVFilter *this, IEnumPins **enumpin)
|
||||
{
|
||||
DShowEnumPins *new;
|
||||
dshowdebug("ff_dshow_filter_EnumPins(%p)\n", this);
|
||||
libAVEnumPins *new;
|
||||
dshowdebug("libAVFilter_EnumPins(%p)\n", this);
|
||||
|
||||
if (!enumpin)
|
||||
return E_POINTER;
|
||||
new = ff_dshow_enumpins_Create(this->pin, this);
|
||||
new = libAVEnumPins_Create(this->pin, this);
|
||||
if (!new)
|
||||
return E_OUTOFMEMORY;
|
||||
|
||||
*enumpin = (IEnumPins *) new;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_filter_FindPin(DShowFilter *this, const wchar_t *id, IPin **pin)
|
||||
long WINAPI
|
||||
libAVFilter_FindPin(libAVFilter *this, const wchar_t *id, IPin **pin)
|
||||
{
|
||||
DShowPin *found = NULL;
|
||||
dshowdebug("ff_dshow_filter_FindPin(%p)\n", this);
|
||||
libAVPin *found = NULL;
|
||||
dshowdebug("libAVFilter_FindPin(%p)\n", this);
|
||||
|
||||
if (!id || !pin)
|
||||
return E_POINTER;
|
||||
if (!wcscmp(id, L"In")) {
|
||||
found = this->pin;
|
||||
ff_dshow_pin_AddRef(found);
|
||||
libAVPin_AddRef(found);
|
||||
}
|
||||
*pin = (IPin *) found;
|
||||
if (!found)
|
||||
@@ -116,9 +125,10 @@ long ff_dshow_filter_FindPin(DShowFilter *this, const wchar_t *id, IPin **pin)
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_filter_QueryFilterInfo(DShowFilter *this, FILTER_INFO *info)
|
||||
long WINAPI
|
||||
libAVFilter_QueryFilterInfo(libAVFilter *this, FILTER_INFO *info)
|
||||
{
|
||||
dshowdebug("ff_dshow_filter_QueryFilterInfo(%p)\n", this);
|
||||
dshowdebug("libAVFilter_QueryFilterInfo(%p)\n", this);
|
||||
|
||||
if (!info)
|
||||
return E_POINTER;
|
||||
@@ -128,10 +138,11 @@ long ff_dshow_filter_QueryFilterInfo(DShowFilter *this, FILTER_INFO *info)
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_filter_JoinFilterGraph(DShowFilter *this, IFilterGraph *graph,
|
||||
long WINAPI
|
||||
libAVFilter_JoinFilterGraph(libAVFilter *this, IFilterGraph *graph,
|
||||
const wchar_t *name)
|
||||
{
|
||||
dshowdebug("ff_dshow_filter_JoinFilterGraph(%p)\n", this);
|
||||
dshowdebug("libAVFilter_JoinFilterGraph(%p)\n", this);
|
||||
|
||||
this->info.pGraph = graph;
|
||||
if (name)
|
||||
@@ -139,9 +150,10 @@ long ff_dshow_filter_JoinFilterGraph(DShowFilter *this, IFilterGraph *graph,
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_filter_QueryVendorInfo(DShowFilter *this, wchar_t **info)
|
||||
long WINAPI
|
||||
libAVFilter_QueryVendorInfo(libAVFilter *this, wchar_t **info)
|
||||
{
|
||||
dshowdebug("ff_dshow_filter_QueryVendorInfo(%p)\n", this);
|
||||
dshowdebug("libAVFilter_QueryVendorInfo(%p)\n", this);
|
||||
|
||||
if (!info)
|
||||
return E_POINTER;
|
||||
@@ -149,27 +161,27 @@ long ff_dshow_filter_QueryVendorInfo(DShowFilter *this, wchar_t **info)
|
||||
}
|
||||
|
||||
static int
|
||||
ff_dshow_filter_Setup(DShowFilter *this, void *priv_data, void *callback,
|
||||
libAVFilter_Setup(libAVFilter *this, void *priv_data, void *callback,
|
||||
enum dshowDeviceType type)
|
||||
{
|
||||
IBaseFilterVtbl *vtbl = this->vtbl;
|
||||
SETVTBL(vtbl, filter, QueryInterface);
|
||||
SETVTBL(vtbl, filter, AddRef);
|
||||
SETVTBL(vtbl, filter, Release);
|
||||
SETVTBL(vtbl, filter, GetClassID);
|
||||
SETVTBL(vtbl, filter, Stop);
|
||||
SETVTBL(vtbl, filter, Pause);
|
||||
SETVTBL(vtbl, filter, Run);
|
||||
SETVTBL(vtbl, filter, GetState);
|
||||
SETVTBL(vtbl, filter, SetSyncSource);
|
||||
SETVTBL(vtbl, filter, GetSyncSource);
|
||||
SETVTBL(vtbl, filter, EnumPins);
|
||||
SETVTBL(vtbl, filter, FindPin);
|
||||
SETVTBL(vtbl, filter, QueryFilterInfo);
|
||||
SETVTBL(vtbl, filter, JoinFilterGraph);
|
||||
SETVTBL(vtbl, filter, QueryVendorInfo);
|
||||
SETVTBL(vtbl, libAVFilter, QueryInterface);
|
||||
SETVTBL(vtbl, libAVFilter, AddRef);
|
||||
SETVTBL(vtbl, libAVFilter, Release);
|
||||
SETVTBL(vtbl, libAVFilter, GetClassID);
|
||||
SETVTBL(vtbl, libAVFilter, Stop);
|
||||
SETVTBL(vtbl, libAVFilter, Pause);
|
||||
SETVTBL(vtbl, libAVFilter, Run);
|
||||
SETVTBL(vtbl, libAVFilter, GetState);
|
||||
SETVTBL(vtbl, libAVFilter, SetSyncSource);
|
||||
SETVTBL(vtbl, libAVFilter, GetSyncSource);
|
||||
SETVTBL(vtbl, libAVFilter, EnumPins);
|
||||
SETVTBL(vtbl, libAVFilter, FindPin);
|
||||
SETVTBL(vtbl, libAVFilter, QueryFilterInfo);
|
||||
SETVTBL(vtbl, libAVFilter, JoinFilterGraph);
|
||||
SETVTBL(vtbl, libAVFilter, QueryVendorInfo);
|
||||
|
||||
this->pin = ff_dshow_pin_Create(this);
|
||||
this->pin = libAVPin_Create(this);
|
||||
|
||||
this->priv_data = priv_data;
|
||||
this->callback = callback;
|
||||
@@ -177,11 +189,12 @@ ff_dshow_filter_Setup(DShowFilter *this, void *priv_data, void *callback,
|
||||
|
||||
return 1;
|
||||
}
|
||||
static int ff_dshow_filter_Cleanup(DShowFilter *this)
|
||||
static int
|
||||
libAVFilter_Cleanup(libAVFilter *this)
|
||||
{
|
||||
ff_dshow_pin_Release(this->pin);
|
||||
libAVPin_Release(this->pin);
|
||||
return 1;
|
||||
}
|
||||
DECLARE_CREATE(filter, DShowFilter, ff_dshow_filter_Setup(this, priv_data, callback, type),
|
||||
DECLARE_CREATE(libAVFilter, libAVFilter_Setup(this, priv_data, callback, type),
|
||||
void *priv_data, void *callback, enum dshowDeviceType type)
|
||||
DECLARE_DESTROY(filter, DShowFilter, ff_dshow_filter_Cleanup)
|
||||
DECLARE_DESTROY(libAVFilter, libAVFilter_Cleanup)
|
||||
|
225
externals/ffmpeg/ffmpeg/libavdevice/dshow_pin.c
vendored
225
externals/ffmpeg/ffmpeg/libavdevice/dshow_pin.c
vendored
@@ -22,24 +22,26 @@
|
||||
#include "dshow_capture.h"
|
||||
|
||||
#include <stddef.h>
|
||||
#define imemoffset offsetof(DShowPin, imemvtbl)
|
||||
#define imemoffset offsetof(libAVPin, imemvtbl)
|
||||
|
||||
DECLARE_QUERYINTERFACE(pin, DShowPin,
|
||||
DECLARE_QUERYINTERFACE(libAVPin,
|
||||
{ {&IID_IUnknown,0}, {&IID_IPin,0}, {&IID_IMemInputPin,imemoffset} })
|
||||
DECLARE_ADDREF(pin, DShowPin)
|
||||
DECLARE_RELEASE(pin, DShowPin)
|
||||
DECLARE_ADDREF(libAVPin)
|
||||
DECLARE_RELEASE(libAVPin)
|
||||
|
||||
long ff_dshow_pin_Connect(DShowPin *this, IPin *pin, const AM_MEDIA_TYPE *type)
|
||||
long WINAPI
|
||||
libAVPin_Connect(libAVPin *this, IPin *pin, const AM_MEDIA_TYPE *type)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_Connect(%p, %p, %p)\n", this, pin, type);
|
||||
dshowdebug("libAVPin_Connect(%p, %p, %p)\n", this, pin, type);
|
||||
/* Input pins receive connections. */
|
||||
return S_FALSE;
|
||||
}
|
||||
long ff_dshow_pin_ReceiveConnection(DShowPin *this, IPin *pin,
|
||||
long WINAPI
|
||||
libAVPin_ReceiveConnection(libAVPin *this, IPin *pin,
|
||||
const AM_MEDIA_TYPE *type)
|
||||
{
|
||||
enum dshowDeviceType devtype = this->filter->type;
|
||||
dshowdebug("ff_dshow_pin_ReceiveConnection(%p)\n", this);
|
||||
dshowdebug("libAVPin_ReceiveConnection(%p)\n", this);
|
||||
|
||||
if (!pin)
|
||||
return E_POINTER;
|
||||
@@ -62,9 +64,10 @@ long ff_dshow_pin_ReceiveConnection(DShowPin *this, IPin *pin,
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_pin_Disconnect(DShowPin *this)
|
||||
long WINAPI
|
||||
libAVPin_Disconnect(libAVPin *this)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_Disconnect(%p)\n", this);
|
||||
dshowdebug("libAVPin_Disconnect(%p)\n", this);
|
||||
|
||||
if (this->filter->state != State_Stopped)
|
||||
return VFW_E_NOT_STOPPED;
|
||||
@@ -75,9 +78,10 @@ long ff_dshow_pin_Disconnect(DShowPin *this)
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_pin_ConnectedTo(DShowPin *this, IPin **pin)
|
||||
long WINAPI
|
||||
libAVPin_ConnectedTo(libAVPin *this, IPin **pin)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_ConnectedTo(%p)\n", this);
|
||||
dshowdebug("libAVPin_ConnectedTo(%p)\n", this);
|
||||
|
||||
if (!pin)
|
||||
return E_POINTER;
|
||||
@@ -88,9 +92,10 @@ long ff_dshow_pin_ConnectedTo(DShowPin *this, IPin **pin)
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_pin_ConnectionMediaType(DShowPin *this, AM_MEDIA_TYPE *type)
|
||||
long WINAPI
|
||||
libAVPin_ConnectionMediaType(libAVPin *this, AM_MEDIA_TYPE *type)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_ConnectionMediaType(%p)\n", this);
|
||||
dshowdebug("libAVPin_ConnectionMediaType(%p)\n", this);
|
||||
|
||||
if (!type)
|
||||
return E_POINTER;
|
||||
@@ -99,15 +104,16 @@ long ff_dshow_pin_ConnectionMediaType(DShowPin *this, AM_MEDIA_TYPE *type)
|
||||
|
||||
return ff_copy_dshow_media_type(type, &this->type);
|
||||
}
|
||||
long ff_dshow_pin_QueryPinInfo(DShowPin *this, PIN_INFO *info)
|
||||
long WINAPI
|
||||
libAVPin_QueryPinInfo(libAVPin *this, PIN_INFO *info)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_QueryPinInfo(%p)\n", this);
|
||||
dshowdebug("libAVPin_QueryPinInfo(%p)\n", this);
|
||||
|
||||
if (!info)
|
||||
return E_POINTER;
|
||||
|
||||
if (this->filter)
|
||||
ff_dshow_filter_AddRef(this->filter);
|
||||
libAVFilter_AddRef(this->filter);
|
||||
|
||||
info->pFilter = (IBaseFilter *) this->filter;
|
||||
info->dir = PINDIR_INPUT;
|
||||
@@ -115,17 +121,19 @@ long ff_dshow_pin_QueryPinInfo(DShowPin *this, PIN_INFO *info)
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_pin_QueryDirection(DShowPin *this, PIN_DIRECTION *dir)
|
||||
long WINAPI
|
||||
libAVPin_QueryDirection(libAVPin *this, PIN_DIRECTION *dir)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_QueryDirection(%p)\n", this);
|
||||
dshowdebug("libAVPin_QueryDirection(%p)\n", this);
|
||||
if (!dir)
|
||||
return E_POINTER;
|
||||
*dir = PINDIR_INPUT;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_pin_QueryId(DShowPin *this, wchar_t **id)
|
||||
long WINAPI
|
||||
libAVPin_QueryId(libAVPin *this, wchar_t **id)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_QueryId(%p)\n", this);
|
||||
dshowdebug("libAVPin_QueryId(%p)\n", this);
|
||||
|
||||
if (!id)
|
||||
return E_POINTER;
|
||||
@@ -134,59 +142,67 @@ long ff_dshow_pin_QueryId(DShowPin *this, wchar_t **id)
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_pin_QueryAccept(DShowPin *this, const AM_MEDIA_TYPE *type)
|
||||
long WINAPI
|
||||
libAVPin_QueryAccept(libAVPin *this, const AM_MEDIA_TYPE *type)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_QueryAccept(%p)\n", this);
|
||||
dshowdebug("libAVPin_QueryAccept(%p)\n", this);
|
||||
return S_FALSE;
|
||||
}
|
||||
long ff_dshow_pin_EnumMediaTypes(DShowPin *this, IEnumMediaTypes **enumtypes)
|
||||
long WINAPI
|
||||
libAVPin_EnumMediaTypes(libAVPin *this, IEnumMediaTypes **enumtypes)
|
||||
{
|
||||
const AM_MEDIA_TYPE *type = NULL;
|
||||
DShowEnumMediaTypes *new;
|
||||
dshowdebug("ff_dshow_pin_EnumMediaTypes(%p)\n", this);
|
||||
libAVEnumMediaTypes *new;
|
||||
dshowdebug("libAVPin_EnumMediaTypes(%p)\n", this);
|
||||
|
||||
if (!enumtypes)
|
||||
return E_POINTER;
|
||||
new = ff_dshow_enummediatypes_Create(type);
|
||||
new = libAVEnumMediaTypes_Create(type);
|
||||
if (!new)
|
||||
return E_OUTOFMEMORY;
|
||||
|
||||
*enumtypes = (IEnumMediaTypes *) new;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_pin_QueryInternalConnections(DShowPin *this, IPin **pin,
|
||||
long WINAPI
|
||||
libAVPin_QueryInternalConnections(libAVPin *this, IPin **pin,
|
||||
unsigned long *npin)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_QueryInternalConnections(%p)\n", this);
|
||||
dshowdebug("libAVPin_QueryInternalConnections(%p)\n", this);
|
||||
return E_NOTIMPL;
|
||||
}
|
||||
long ff_dshow_pin_EndOfStream(DShowPin *this)
|
||||
long WINAPI
|
||||
libAVPin_EndOfStream(libAVPin *this)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_EndOfStream(%p)\n", this);
|
||||
dshowdebug("libAVPin_EndOfStream(%p)\n", this);
|
||||
/* I don't care. */
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_pin_BeginFlush(DShowPin *this)
|
||||
long WINAPI
|
||||
libAVPin_BeginFlush(libAVPin *this)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_BeginFlush(%p)\n", this);
|
||||
dshowdebug("libAVPin_BeginFlush(%p)\n", this);
|
||||
/* I don't care. */
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_pin_EndFlush(DShowPin *this)
|
||||
long WINAPI
|
||||
libAVPin_EndFlush(libAVPin *this)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_EndFlush(%p)\n", this);
|
||||
dshowdebug("libAVPin_EndFlush(%p)\n", this);
|
||||
/* I don't care. */
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_pin_NewSegment(DShowPin *this, REFERENCE_TIME start, REFERENCE_TIME stop,
|
||||
long WINAPI
|
||||
libAVPin_NewSegment(libAVPin *this, REFERENCE_TIME start, REFERENCE_TIME stop,
|
||||
double rate)
|
||||
{
|
||||
dshowdebug("ff_dshow_pin_NewSegment(%p)\n", this);
|
||||
dshowdebug("libAVPin_NewSegment(%p)\n", this);
|
||||
/* I don't care. */
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
static int ff_dshow_pin_Setup(DShowPin *this, DShowFilter *filter)
|
||||
static int
|
||||
libAVPin_Setup(libAVPin *this, libAVFilter *filter)
|
||||
{
|
||||
IPinVtbl *vtbl = this->vtbl;
|
||||
IMemInputPinVtbl *imemvtbl;
|
||||
@@ -198,43 +214,44 @@ static int ff_dshow_pin_Setup(DShowPin *this, DShowFilter *filter)
|
||||
if (!imemvtbl)
|
||||
return 0;
|
||||
|
||||
SETVTBL(imemvtbl, meminputpin, QueryInterface);
|
||||
SETVTBL(imemvtbl, meminputpin, AddRef);
|
||||
SETVTBL(imemvtbl, meminputpin, Release);
|
||||
SETVTBL(imemvtbl, meminputpin, GetAllocator);
|
||||
SETVTBL(imemvtbl, meminputpin, NotifyAllocator);
|
||||
SETVTBL(imemvtbl, meminputpin, GetAllocatorRequirements);
|
||||
SETVTBL(imemvtbl, meminputpin, Receive);
|
||||
SETVTBL(imemvtbl, meminputpin, ReceiveMultiple);
|
||||
SETVTBL(imemvtbl, meminputpin, ReceiveCanBlock);
|
||||
SETVTBL(imemvtbl, libAVMemInputPin, QueryInterface);
|
||||
SETVTBL(imemvtbl, libAVMemInputPin, AddRef);
|
||||
SETVTBL(imemvtbl, libAVMemInputPin, Release);
|
||||
SETVTBL(imemvtbl, libAVMemInputPin, GetAllocator);
|
||||
SETVTBL(imemvtbl, libAVMemInputPin, NotifyAllocator);
|
||||
SETVTBL(imemvtbl, libAVMemInputPin, GetAllocatorRequirements);
|
||||
SETVTBL(imemvtbl, libAVMemInputPin, Receive);
|
||||
SETVTBL(imemvtbl, libAVMemInputPin, ReceiveMultiple);
|
||||
SETVTBL(imemvtbl, libAVMemInputPin, ReceiveCanBlock);
|
||||
|
||||
this->imemvtbl = imemvtbl;
|
||||
|
||||
SETVTBL(vtbl, pin, QueryInterface);
|
||||
SETVTBL(vtbl, pin, AddRef);
|
||||
SETVTBL(vtbl, pin, Release);
|
||||
SETVTBL(vtbl, pin, Connect);
|
||||
SETVTBL(vtbl, pin, ReceiveConnection);
|
||||
SETVTBL(vtbl, pin, Disconnect);
|
||||
SETVTBL(vtbl, pin, ConnectedTo);
|
||||
SETVTBL(vtbl, pin, ConnectionMediaType);
|
||||
SETVTBL(vtbl, pin, QueryPinInfo);
|
||||
SETVTBL(vtbl, pin, QueryDirection);
|
||||
SETVTBL(vtbl, pin, QueryId);
|
||||
SETVTBL(vtbl, pin, QueryAccept);
|
||||
SETVTBL(vtbl, pin, EnumMediaTypes);
|
||||
SETVTBL(vtbl, pin, QueryInternalConnections);
|
||||
SETVTBL(vtbl, pin, EndOfStream);
|
||||
SETVTBL(vtbl, pin, BeginFlush);
|
||||
SETVTBL(vtbl, pin, EndFlush);
|
||||
SETVTBL(vtbl, pin, NewSegment);
|
||||
SETVTBL(vtbl, libAVPin, QueryInterface);
|
||||
SETVTBL(vtbl, libAVPin, AddRef);
|
||||
SETVTBL(vtbl, libAVPin, Release);
|
||||
SETVTBL(vtbl, libAVPin, Connect);
|
||||
SETVTBL(vtbl, libAVPin, ReceiveConnection);
|
||||
SETVTBL(vtbl, libAVPin, Disconnect);
|
||||
SETVTBL(vtbl, libAVPin, ConnectedTo);
|
||||
SETVTBL(vtbl, libAVPin, ConnectionMediaType);
|
||||
SETVTBL(vtbl, libAVPin, QueryPinInfo);
|
||||
SETVTBL(vtbl, libAVPin, QueryDirection);
|
||||
SETVTBL(vtbl, libAVPin, QueryId);
|
||||
SETVTBL(vtbl, libAVPin, QueryAccept);
|
||||
SETVTBL(vtbl, libAVPin, EnumMediaTypes);
|
||||
SETVTBL(vtbl, libAVPin, QueryInternalConnections);
|
||||
SETVTBL(vtbl, libAVPin, EndOfStream);
|
||||
SETVTBL(vtbl, libAVPin, BeginFlush);
|
||||
SETVTBL(vtbl, libAVPin, EndFlush);
|
||||
SETVTBL(vtbl, libAVPin, NewSegment);
|
||||
|
||||
this->filter = filter;
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
static void ff_dshow_pin_Free(DShowPin *this)
|
||||
static void
|
||||
libAVPin_Free(libAVPin *this)
|
||||
{
|
||||
if (!this)
|
||||
return;
|
||||
@@ -244,51 +261,58 @@ static void ff_dshow_pin_Free(DShowPin *this)
|
||||
this->type.pbFormat = NULL;
|
||||
}
|
||||
}
|
||||
DECLARE_CREATE(pin, DShowPin, ff_dshow_pin_Setup(this, filter), DShowFilter *filter)
|
||||
DECLARE_DESTROY(pin, DShowPin, ff_dshow_pin_Free)
|
||||
DECLARE_CREATE(libAVPin, libAVPin_Setup(this, filter), libAVFilter *filter)
|
||||
DECLARE_DESTROY(libAVPin, libAVPin_Free)
|
||||
|
||||
/*****************************************************************************
|
||||
* DShowMemInputPin
|
||||
* libAVMemInputPin
|
||||
****************************************************************************/
|
||||
long ff_dshow_meminputpin_QueryInterface(DShowMemInputPin *this, const GUID *riid,
|
||||
long WINAPI
|
||||
libAVMemInputPin_QueryInterface(libAVMemInputPin *this, const GUID *riid,
|
||||
void **ppvObject)
|
||||
{
|
||||
DShowPin *pin = (DShowPin *) ((uint8_t *) this - imemoffset);
|
||||
dshowdebug("ff_dshow_meminputpin_QueryInterface(%p)\n", this);
|
||||
return ff_dshow_pin_QueryInterface(pin, riid, ppvObject);
|
||||
libAVPin *pin = (libAVPin *) ((uint8_t *) this - imemoffset);
|
||||
dshowdebug("libAVMemInputPin_QueryInterface(%p)\n", this);
|
||||
return libAVPin_QueryInterface(pin, riid, ppvObject);
|
||||
}
|
||||
unsigned long ff_dshow_meminputpin_AddRef(DShowMemInputPin *this)
|
||||
unsigned long WINAPI
|
||||
libAVMemInputPin_AddRef(libAVMemInputPin *this)
|
||||
{
|
||||
DShowPin *pin = (DShowPin *) ((uint8_t *) this - imemoffset);
|
||||
dshowdebug("ff_dshow_meminputpin_AddRef(%p)\n", this);
|
||||
return ff_dshow_pin_AddRef(pin);
|
||||
libAVPin *pin = (libAVPin *) ((uint8_t *) this - imemoffset);
|
||||
dshowdebug("libAVMemInputPin_AddRef(%p)\n", this);
|
||||
return libAVPin_AddRef(pin);
|
||||
}
|
||||
unsigned long ff_dshow_meminputpin_Release(DShowMemInputPin *this)
|
||||
unsigned long WINAPI
|
||||
libAVMemInputPin_Release(libAVMemInputPin *this)
|
||||
{
|
||||
DShowPin *pin = (DShowPin *) ((uint8_t *) this - imemoffset);
|
||||
dshowdebug("ff_dshow_meminputpin_Release(%p)\n", this);
|
||||
return ff_dshow_pin_Release(pin);
|
||||
libAVPin *pin = (libAVPin *) ((uint8_t *) this - imemoffset);
|
||||
dshowdebug("libAVMemInputPin_Release(%p)\n", this);
|
||||
return libAVPin_Release(pin);
|
||||
}
|
||||
long ff_dshow_meminputpin_GetAllocator(DShowMemInputPin *this, IMemAllocator **alloc)
|
||||
long WINAPI
|
||||
libAVMemInputPin_GetAllocator(libAVMemInputPin *this, IMemAllocator **alloc)
|
||||
{
|
||||
dshowdebug("ff_dshow_meminputpin_GetAllocator(%p)\n", this);
|
||||
dshowdebug("libAVMemInputPin_GetAllocator(%p)\n", this);
|
||||
return VFW_E_NO_ALLOCATOR;
|
||||
}
|
||||
long ff_dshow_meminputpin_NotifyAllocator(DShowMemInputPin *this, IMemAllocator *alloc,
|
||||
long WINAPI
|
||||
libAVMemInputPin_NotifyAllocator(libAVMemInputPin *this, IMemAllocator *alloc,
|
||||
BOOL rdwr)
|
||||
{
|
||||
dshowdebug("ff_dshow_meminputpin_NotifyAllocator(%p)\n", this);
|
||||
dshowdebug("libAVMemInputPin_NotifyAllocator(%p)\n", this);
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_meminputpin_GetAllocatorRequirements(DShowMemInputPin *this,
|
||||
long WINAPI
|
||||
libAVMemInputPin_GetAllocatorRequirements(libAVMemInputPin *this,
|
||||
ALLOCATOR_PROPERTIES *props)
|
||||
{
|
||||
dshowdebug("ff_dshow_meminputpin_GetAllocatorRequirements(%p)\n", this);
|
||||
dshowdebug("libAVMemInputPin_GetAllocatorRequirements(%p)\n", this);
|
||||
return E_NOTIMPL;
|
||||
}
|
||||
long ff_dshow_meminputpin_Receive(DShowMemInputPin *this, IMediaSample *sample)
|
||||
long WINAPI
|
||||
libAVMemInputPin_Receive(libAVMemInputPin *this, IMediaSample *sample)
|
||||
{
|
||||
DShowPin *pin = (DShowPin *) ((uint8_t *) this - imemoffset);
|
||||
libAVPin *pin = (libAVPin *) ((uint8_t *) this - imemoffset);
|
||||
enum dshowDeviceType devtype = pin->filter->type;
|
||||
void *priv_data;
|
||||
AVFormatContext *s;
|
||||
@@ -304,7 +328,7 @@ long ff_dshow_meminputpin_Receive(DShowMemInputPin *this, IMediaSample *sample)
|
||||
struct dshow_ctx *ctx;
|
||||
|
||||
|
||||
dshowdebug("ff_dshow_meminputpin_Receive(%p)\n", this);
|
||||
dshowdebug("libAVMemInputPin_Receive(%p)\n", this);
|
||||
|
||||
if (!sample)
|
||||
return E_POINTER;
|
||||
@@ -342,28 +366,31 @@ long ff_dshow_meminputpin_Receive(DShowMemInputPin *this, IMediaSample *sample)
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_meminputpin_ReceiveMultiple(DShowMemInputPin *this,
|
||||
long WINAPI
|
||||
libAVMemInputPin_ReceiveMultiple(libAVMemInputPin *this,
|
||||
IMediaSample **samples, long n, long *nproc)
|
||||
{
|
||||
int i;
|
||||
dshowdebug("ff_dshow_meminputpin_ReceiveMultiple(%p)\n", this);
|
||||
dshowdebug("libAVMemInputPin_ReceiveMultiple(%p)\n", this);
|
||||
|
||||
for (i = 0; i < n; i++)
|
||||
ff_dshow_meminputpin_Receive(this, samples[i]);
|
||||
libAVMemInputPin_Receive(this, samples[i]);
|
||||
|
||||
*nproc = n;
|
||||
return S_OK;
|
||||
}
|
||||
long ff_dshow_meminputpin_ReceiveCanBlock(DShowMemInputPin *this)
|
||||
long WINAPI
|
||||
libAVMemInputPin_ReceiveCanBlock(libAVMemInputPin *this)
|
||||
{
|
||||
dshowdebug("ff_dshow_meminputpin_ReceiveCanBlock(%p)\n", this);
|
||||
dshowdebug("libAVMemInputPin_ReceiveCanBlock(%p)\n", this);
|
||||
/* I swear I will not block. */
|
||||
return S_FALSE;
|
||||
}
|
||||
|
||||
void ff_dshow_meminputpin_Destroy(DShowMemInputPin *this)
|
||||
void
|
||||
libAVMemInputPin_Destroy(libAVMemInputPin *this)
|
||||
{
|
||||
DShowPin *pin = (DShowPin *) ((uint8_t *) this - imemoffset);
|
||||
dshowdebug("ff_dshow_meminputpin_Destroy(%p)\n", this);
|
||||
ff_dshow_pin_Destroy(pin);
|
||||
libAVPin *pin = (libAVPin *) ((uint8_t *) this - imemoffset);
|
||||
dshowdebug("libAVMemInputPin_Destroy(%p)\n", this);
|
||||
libAVPin_Destroy(pin);
|
||||
}
|
||||
|
@@ -157,11 +157,11 @@ static int fbdev_read_packet(AVFormatContext *avctx, AVPacket *pkt)
|
||||
uint8_t *pin, *pout;
|
||||
|
||||
if (fbdev->time_frame == AV_NOPTS_VALUE)
|
||||
fbdev->time_frame = av_gettime_relative();
|
||||
fbdev->time_frame = av_gettime();
|
||||
|
||||
/* wait based on the frame rate */
|
||||
while (1) {
|
||||
curtime = av_gettime_relative();
|
||||
curtime = av_gettime();
|
||||
delay = fbdev->time_frame - curtime;
|
||||
av_log(avctx, AV_LOG_TRACE,
|
||||
"time_frame:%"PRId64" curtime:%"PRId64" delay:%"PRId64"\n",
|
||||
@@ -186,7 +186,7 @@ static int fbdev_read_packet(AVFormatContext *avctx, AVPacket *pkt)
|
||||
"Error refreshing variable info: %s\n", av_err2str(AVERROR(errno)));
|
||||
}
|
||||
|
||||
pkt->pts = av_gettime();
|
||||
pkt->pts = curtime;
|
||||
|
||||
/* compute visible data offset */
|
||||
pin = fbdev->data + fbdev->bytes_per_pixel * fbdev->varinfo.xoffset +
|
||||
|
@@ -394,7 +394,7 @@ gdigrab_read_header(AVFormatContext *s1)
|
||||
gdigrab->header_size = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER) +
|
||||
(bpp <= 8 ? (1 << bpp) : 0) * sizeof(RGBQUAD) /* palette size */;
|
||||
gdigrab->time_base = av_inv_q(gdigrab->framerate);
|
||||
gdigrab->time_frame = av_gettime_relative() / av_q2d(gdigrab->time_base);
|
||||
gdigrab->time_frame = av_gettime() / av_q2d(gdigrab->time_base);
|
||||
|
||||
gdigrab->hwnd = hwnd;
|
||||
gdigrab->source_hdc = source_hdc;
|
||||
@@ -551,7 +551,7 @@ static int gdigrab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
||||
|
||||
/* wait based on the frame rate */
|
||||
for (;;) {
|
||||
curtime = av_gettime_relative();
|
||||
curtime = av_gettime();
|
||||
delay = time_frame * av_q2d(time_base) - curtime;
|
||||
if (delay <= 0) {
|
||||
if (delay < INT64_C(-1000000) * av_q2d(time_base)) {
|
||||
@@ -568,7 +568,7 @@ static int gdigrab_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
||||
|
||||
if (av_new_packet(pkt, file_size) < 0)
|
||||
return AVERROR(ENOMEM);
|
||||
pkt->pts = av_gettime();
|
||||
pkt->pts = curtime;
|
||||
|
||||
/* Blit screen grab */
|
||||
if (!BitBlt(dest_hdc, 0, 0,
|
||||
|
438
externals/ffmpeg/ffmpeg/libavdevice/kmsgrab.c
vendored
438
externals/ffmpeg/ffmpeg/libavdevice/kmsgrab.c
vendored
@@ -27,11 +27,6 @@
|
||||
#include <xf86drm.h>
|
||||
#include <xf86drmMode.h>
|
||||
|
||||
// Required for compatibility when building against libdrm < 2.4.83.
|
||||
#ifndef DRM_FORMAT_MOD_INVALID
|
||||
#define DRM_FORMAT_MOD_INVALID ((1ULL << 56) - 1)
|
||||
#endif
|
||||
|
||||
#include "libavutil/hwcontext.h"
|
||||
#include "libavutil/hwcontext_drm.h"
|
||||
#include "libavutil/internal.h"
|
||||
@@ -50,7 +45,6 @@ typedef struct KMSGrabContext {
|
||||
AVBufferRef *device_ref;
|
||||
AVHWDeviceContext *device;
|
||||
AVDRMDeviceContext *hwctx;
|
||||
int fb2_available;
|
||||
|
||||
AVBufferRef *frames_ref;
|
||||
AVHWFramesContext *frames;
|
||||
@@ -74,10 +68,8 @@ typedef struct KMSGrabContext {
|
||||
static void kmsgrab_free_desc(void *opaque, uint8_t *data)
|
||||
{
|
||||
AVDRMFrameDescriptor *desc = (AVDRMFrameDescriptor*)data;
|
||||
int i;
|
||||
|
||||
for (i = 0; i < desc->nb_objects; i++)
|
||||
close(desc->objects[i].fd);
|
||||
close(desc->objects[0].fd);
|
||||
|
||||
av_free(desc);
|
||||
}
|
||||
@@ -89,44 +81,70 @@ static void kmsgrab_free_frame(void *opaque, uint8_t *data)
|
||||
av_frame_free(&frame);
|
||||
}
|
||||
|
||||
static int kmsgrab_get_fb(AVFormatContext *avctx,
|
||||
drmModePlane *plane,
|
||||
AVDRMFrameDescriptor *desc)
|
||||
static int kmsgrab_read_packet(AVFormatContext *avctx, AVPacket *pkt)
|
||||
{
|
||||
KMSGrabContext *ctx = avctx->priv_data;
|
||||
drmModeFB *fb = NULL;
|
||||
drmModePlane *plane;
|
||||
drmModeFB *fb;
|
||||
AVDRMFrameDescriptor *desc;
|
||||
AVFrame *frame;
|
||||
int64_t now;
|
||||
int err, fd;
|
||||
|
||||
now = av_gettime();
|
||||
if (ctx->frame_last) {
|
||||
int64_t delay;
|
||||
while (1) {
|
||||
delay = ctx->frame_last + ctx->frame_delay - now;
|
||||
if (delay <= 0)
|
||||
break;
|
||||
av_usleep(delay);
|
||||
now = av_gettime();
|
||||
}
|
||||
}
|
||||
ctx->frame_last = now;
|
||||
|
||||
plane = drmModeGetPlane(ctx->hwctx->fd, ctx->plane_id);
|
||||
if (!plane) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Failed to get plane "
|
||||
"%"PRIu32".\n", ctx->plane_id);
|
||||
return AVERROR(EIO);
|
||||
}
|
||||
if (!plane->fb_id) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Plane %"PRIu32" no longer has "
|
||||
"an associated framebuffer.\n", ctx->plane_id);
|
||||
return AVERROR(EIO);
|
||||
}
|
||||
|
||||
fb = drmModeGetFB(ctx->hwctx->fd, plane->fb_id);
|
||||
if (!fb) {
|
||||
err = errno;
|
||||
av_log(avctx, AV_LOG_ERROR, "Failed to get framebuffer "
|
||||
"%"PRIu32": %s.\n", plane->fb_id, strerror(err));
|
||||
err = AVERROR(err);
|
||||
goto fail;
|
||||
"%"PRIu32".\n", plane->fb_id);
|
||||
return AVERROR(EIO);
|
||||
}
|
||||
if (fb->width != ctx->width || fb->height != ctx->height) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Plane %"PRIu32" framebuffer "
|
||||
"dimensions changed: now %"PRIu32"x%"PRIu32".\n",
|
||||
ctx->plane_id, fb->width, fb->height);
|
||||
err = AVERROR(EIO);
|
||||
goto fail;
|
||||
return AVERROR(EIO);
|
||||
}
|
||||
if (!fb->handle) {
|
||||
av_log(avctx, AV_LOG_ERROR, "No handle set on framebuffer.\n");
|
||||
err = AVERROR(EIO);
|
||||
goto fail;
|
||||
return AVERROR(EIO);
|
||||
}
|
||||
|
||||
err = drmPrimeHandleToFD(ctx->hwctx->fd, fb->handle, O_RDONLY, &fd);
|
||||
if (err < 0) {
|
||||
err = errno;
|
||||
av_log(avctx, AV_LOG_ERROR, "Failed to get PRIME fd from "
|
||||
"framebuffer handle: %s.\n", strerror(err));
|
||||
err = AVERROR(err);
|
||||
goto fail;
|
||||
"framebuffer handle: %s.\n", strerror(errno));
|
||||
return AVERROR(err);
|
||||
}
|
||||
|
||||
desc = av_mallocz(sizeof(*desc));
|
||||
if (!desc)
|
||||
return AVERROR(ENOMEM);
|
||||
|
||||
*desc = (AVDRMFrameDescriptor) {
|
||||
.nb_objects = 1,
|
||||
.objects[0] = {
|
||||
@@ -146,206 +164,31 @@ static int kmsgrab_get_fb(AVFormatContext *avctx,
|
||||
},
|
||||
};
|
||||
|
||||
err = 0;
|
||||
fail:
|
||||
drmModeFreeFB(fb);
|
||||
return err;
|
||||
}
|
||||
|
||||
#if HAVE_LIBDRM_GETFB2
|
||||
static int kmsgrab_get_fb2(AVFormatContext *avctx,
|
||||
drmModePlane *plane,
|
||||
AVDRMFrameDescriptor *desc)
|
||||
{
|
||||
KMSGrabContext *ctx = avctx->priv_data;
|
||||
drmModeFB2 *fb;
|
||||
int err, i, nb_objects;
|
||||
uint64_t modifier = ctx->drm_format_modifier;
|
||||
|
||||
fb = drmModeGetFB2(ctx->hwctx->fd, plane->fb_id);
|
||||
if (!fb) {
|
||||
err = errno;
|
||||
av_log(avctx, AV_LOG_ERROR, "Failed to get framebuffer "
|
||||
"%"PRIu32": %s.\n", plane->fb_id, strerror(err));
|
||||
return AVERROR(err);
|
||||
}
|
||||
if (fb->pixel_format != ctx->drm_format) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Plane %"PRIu32" framebuffer "
|
||||
"format changed: now %"PRIx32".\n",
|
||||
ctx->plane_id, fb->pixel_format);
|
||||
err = AVERROR(EIO);
|
||||
goto fail;
|
||||
}
|
||||
if (fb->width != ctx->width || fb->height != ctx->height) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Plane %"PRIu32" framebuffer "
|
||||
"dimensions changed: now %"PRIu32"x%"PRIu32".\n",
|
||||
ctx->plane_id, fb->width, fb->height);
|
||||
err = AVERROR(EIO);
|
||||
goto fail;
|
||||
}
|
||||
if (!fb->handles[0]) {
|
||||
av_log(avctx, AV_LOG_ERROR, "No handle set on framebuffer.\n");
|
||||
err = AVERROR(EIO);
|
||||
goto fail;
|
||||
}
|
||||
|
||||
if (fb->flags & DRM_MODE_FB_MODIFIERS)
|
||||
modifier = fb->modifier;
|
||||
|
||||
*desc = (AVDRMFrameDescriptor) {
|
||||
.nb_layers = 1,
|
||||
.layers[0] = {
|
||||
.format = ctx->drm_format,
|
||||
},
|
||||
};
|
||||
|
||||
nb_objects = 0;
|
||||
for (i = 0; i < 4 && fb->handles[i]; i++) {
|
||||
size_t size;
|
||||
int dup = 0, j, obj;
|
||||
|
||||
size = fb->offsets[i] + fb->height * fb->pitches[i];
|
||||
|
||||
for (j = 0; j < i; j++) {
|
||||
if (fb->handles[i] == fb->handles[j]) {
|
||||
dup = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (dup) {
|
||||
obj = desc->layers[0].planes[j].object_index;
|
||||
|
||||
if (desc->objects[j].size < size)
|
||||
desc->objects[j].size = size;
|
||||
|
||||
desc->layers[0].planes[i] = (AVDRMPlaneDescriptor) {
|
||||
.object_index = obj,
|
||||
.offset = fb->offsets[i],
|
||||
.pitch = fb->pitches[i],
|
||||
};
|
||||
|
||||
} else {
|
||||
int fd;
|
||||
err = drmPrimeHandleToFD(ctx->hwctx->fd, fb->handles[i],
|
||||
O_RDONLY, &fd);
|
||||
if (err < 0) {
|
||||
err = errno;
|
||||
av_log(avctx, AV_LOG_ERROR, "Failed to get PRIME fd from "
|
||||
"framebuffer handle: %s.\n", strerror(err));
|
||||
err = AVERROR(err);
|
||||
goto fail;
|
||||
}
|
||||
|
||||
obj = nb_objects++;
|
||||
desc->objects[obj] = (AVDRMObjectDescriptor) {
|
||||
.fd = fd,
|
||||
.size = size,
|
||||
.format_modifier = modifier,
|
||||
};
|
||||
desc->layers[0].planes[i] = (AVDRMPlaneDescriptor) {
|
||||
.object_index = obj,
|
||||
.offset = fb->offsets[i],
|
||||
.pitch = fb->pitches[i],
|
||||
};
|
||||
}
|
||||
}
|
||||
desc->nb_objects = nb_objects;
|
||||
desc->layers[0].nb_planes = i;
|
||||
|
||||
err = 0;
|
||||
fail:
|
||||
drmModeFreeFB2(fb);
|
||||
return err;
|
||||
}
|
||||
#endif
|
||||
|
||||
static int kmsgrab_read_packet(AVFormatContext *avctx, AVPacket *pkt)
|
||||
{
|
||||
KMSGrabContext *ctx = avctx->priv_data;
|
||||
drmModePlane *plane = NULL;
|
||||
AVDRMFrameDescriptor *desc = NULL;
|
||||
AVFrame *frame = NULL;
|
||||
int64_t now;
|
||||
int err;
|
||||
|
||||
now = av_gettime_relative();
|
||||
if (ctx->frame_last) {
|
||||
int64_t delay;
|
||||
while (1) {
|
||||
delay = ctx->frame_last + ctx->frame_delay - now;
|
||||
if (delay <= 0)
|
||||
break;
|
||||
av_usleep(delay);
|
||||
now = av_gettime_relative();
|
||||
}
|
||||
}
|
||||
ctx->frame_last = now;
|
||||
now = av_gettime();
|
||||
|
||||
plane = drmModeGetPlane(ctx->hwctx->fd, ctx->plane_id);
|
||||
if (!plane) {
|
||||
err = errno;
|
||||
av_log(avctx, AV_LOG_ERROR, "Failed to get plane "
|
||||
"%"PRIu32": %s.\n", ctx->plane_id, strerror(err));
|
||||
err = AVERROR(err);
|
||||
goto fail;
|
||||
}
|
||||
if (!plane->fb_id) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Plane %"PRIu32" no longer has "
|
||||
"an associated framebuffer.\n", ctx->plane_id);
|
||||
err = AVERROR(EIO);
|
||||
goto fail;
|
||||
}
|
||||
|
||||
desc = av_mallocz(sizeof(*desc));
|
||||
if (!desc) {
|
||||
err = AVERROR(ENOMEM);
|
||||
goto fail;
|
||||
}
|
||||
|
||||
#if HAVE_LIBDRM_GETFB2
|
||||
if (ctx->fb2_available)
|
||||
err = kmsgrab_get_fb2(avctx, plane, desc);
|
||||
else
|
||||
#endif
|
||||
err = kmsgrab_get_fb(avctx, plane, desc);
|
||||
if (err < 0)
|
||||
goto fail;
|
||||
|
||||
frame = av_frame_alloc();
|
||||
if (!frame) {
|
||||
err = AVERROR(ENOMEM);
|
||||
goto fail;
|
||||
}
|
||||
if (!frame)
|
||||
return AVERROR(ENOMEM);
|
||||
|
||||
frame->hw_frames_ctx = av_buffer_ref(ctx->frames_ref);
|
||||
if (!frame->hw_frames_ctx) {
|
||||
err = AVERROR(ENOMEM);
|
||||
goto fail;
|
||||
}
|
||||
if (!frame->hw_frames_ctx)
|
||||
return AVERROR(ENOMEM);
|
||||
|
||||
frame->buf[0] = av_buffer_create((uint8_t*)desc, sizeof(*desc),
|
||||
&kmsgrab_free_desc, avctx, 0);
|
||||
if (!frame->buf[0]) {
|
||||
err = AVERROR(ENOMEM);
|
||||
goto fail;
|
||||
}
|
||||
if (!frame->buf[0])
|
||||
return AVERROR(ENOMEM);
|
||||
|
||||
frame->data[0] = (uint8_t*)desc;
|
||||
frame->format = AV_PIX_FMT_DRM_PRIME;
|
||||
frame->width = ctx->width;
|
||||
frame->height = ctx->height;
|
||||
frame->width = fb->width;
|
||||
frame->height = fb->height;
|
||||
|
||||
drmModeFreeFB(fb);
|
||||
drmModeFreePlane(plane);
|
||||
plane = NULL;
|
||||
desc = NULL;
|
||||
|
||||
pkt->buf = av_buffer_create((uint8_t*)frame, sizeof(*frame),
|
||||
&kmsgrab_free_frame, avctx, 0);
|
||||
if (!pkt->buf) {
|
||||
err = AVERROR(ENOMEM);
|
||||
goto fail;
|
||||
}
|
||||
if (!pkt->buf)
|
||||
return AVERROR(ENOMEM);
|
||||
|
||||
pkt->data = (uint8_t*)frame;
|
||||
pkt->size = sizeof(*frame);
|
||||
@@ -353,19 +196,12 @@ static int kmsgrab_read_packet(AVFormatContext *avctx, AVPacket *pkt)
|
||||
pkt->flags |= AV_PKT_FLAG_TRUSTED;
|
||||
|
||||
return 0;
|
||||
|
||||
fail:
|
||||
drmModeFreePlane(plane);
|
||||
av_freep(&desc);
|
||||
av_frame_free(&frame);
|
||||
return err;
|
||||
}
|
||||
|
||||
static const struct {
|
||||
enum AVPixelFormat pixfmt;
|
||||
uint32_t drm_format;
|
||||
} kmsgrab_formats[] = {
|
||||
// Monochrome.
|
||||
#ifdef DRM_FORMAT_R8
|
||||
{ AV_PIX_FMT_GRAY8, DRM_FORMAT_R8 },
|
||||
#endif
|
||||
@@ -373,7 +209,6 @@ static const struct {
|
||||
{ AV_PIX_FMT_GRAY16LE, DRM_FORMAT_R16 },
|
||||
{ AV_PIX_FMT_GRAY16BE, DRM_FORMAT_R16 | DRM_FORMAT_BIG_ENDIAN },
|
||||
#endif
|
||||
// <8-bit RGB.
|
||||
{ AV_PIX_FMT_BGR8, DRM_FORMAT_BGR233 },
|
||||
{ AV_PIX_FMT_RGB555LE, DRM_FORMAT_XRGB1555 },
|
||||
{ AV_PIX_FMT_RGB555BE, DRM_FORMAT_XRGB1555 | DRM_FORMAT_BIG_ENDIAN },
|
||||
@@ -383,7 +218,6 @@ static const struct {
|
||||
{ AV_PIX_FMT_RGB565BE, DRM_FORMAT_RGB565 | DRM_FORMAT_BIG_ENDIAN },
|
||||
{ AV_PIX_FMT_BGR565LE, DRM_FORMAT_BGR565 },
|
||||
{ AV_PIX_FMT_BGR565BE, DRM_FORMAT_BGR565 | DRM_FORMAT_BIG_ENDIAN },
|
||||
// 8-bit RGB.
|
||||
{ AV_PIX_FMT_RGB24, DRM_FORMAT_RGB888 },
|
||||
{ AV_PIX_FMT_BGR24, DRM_FORMAT_BGR888 },
|
||||
{ AV_PIX_FMT_0RGB, DRM_FORMAT_BGRX8888 },
|
||||
@@ -394,12 +228,6 @@ static const struct {
|
||||
{ AV_PIX_FMT_ABGR, DRM_FORMAT_RGBA8888 },
|
||||
{ AV_PIX_FMT_RGBA, DRM_FORMAT_ABGR8888 },
|
||||
{ AV_PIX_FMT_BGRA, DRM_FORMAT_ARGB8888 },
|
||||
// 10-bit RGB.
|
||||
{ AV_PIX_FMT_X2RGB10LE, DRM_FORMAT_XRGB2101010 },
|
||||
{ AV_PIX_FMT_X2RGB10BE, DRM_FORMAT_XRGB2101010 | DRM_FORMAT_BIG_ENDIAN },
|
||||
// 8-bit YUV 4:2:0.
|
||||
{ AV_PIX_FMT_NV12, DRM_FORMAT_NV12 },
|
||||
// 8-bit YUV 4:2:2.
|
||||
{ AV_PIX_FMT_YUYV422, DRM_FORMAT_YUYV },
|
||||
{ AV_PIX_FMT_YVYU422, DRM_FORMAT_YVYU },
|
||||
{ AV_PIX_FMT_UYVY422, DRM_FORMAT_UYVY },
|
||||
@@ -411,12 +239,21 @@ static av_cold int kmsgrab_read_header(AVFormatContext *avctx)
|
||||
drmModePlaneRes *plane_res = NULL;
|
||||
drmModePlane *plane = NULL;
|
||||
drmModeFB *fb = NULL;
|
||||
#if HAVE_LIBDRM_GETFB2
|
||||
drmModeFB2 *fb2 = NULL;
|
||||
#endif
|
||||
AVStream *stream;
|
||||
int err, i;
|
||||
|
||||
for (i = 0; i < FF_ARRAY_ELEMS(kmsgrab_formats); i++) {
|
||||
if (kmsgrab_formats[i].pixfmt == ctx->format) {
|
||||
ctx->drm_format = kmsgrab_formats[i].drm_format;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (i >= FF_ARRAY_ELEMS(kmsgrab_formats)) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Unsupported format %s.\n",
|
||||
av_get_pix_fmt_name(ctx->format));
|
||||
return AVERROR(EINVAL);
|
||||
}
|
||||
|
||||
err = av_hwdevice_ctx_create(&ctx->device_ref, AV_HWDEVICE_TYPE_DRM,
|
||||
ctx->device_path, NULL, 0);
|
||||
if (err < 0) {
|
||||
@@ -452,10 +289,9 @@ static av_cold int kmsgrab_read_header(AVFormatContext *avctx)
|
||||
} else {
|
||||
plane_res = drmModeGetPlaneResources(ctx->hwctx->fd);
|
||||
if (!plane_res) {
|
||||
err = errno;
|
||||
av_log(avctx, AV_LOG_ERROR, "Failed to get plane "
|
||||
"resources: %s.\n", strerror(err));
|
||||
err = AVERROR(err);
|
||||
"resources: %s.\n", strerror(errno));
|
||||
err = AVERROR(EINVAL);
|
||||
goto fail;
|
||||
}
|
||||
|
||||
@@ -504,119 +340,28 @@ static av_cold int kmsgrab_read_header(AVFormatContext *avctx)
|
||||
|
||||
ctx->plane_id = plane->plane_id;
|
||||
|
||||
#if HAVE_LIBDRM_GETFB2
|
||||
fb2 = drmModeGetFB2(ctx->hwctx->fd, plane->fb_id);
|
||||
if (!fb2 && errno == ENOSYS) {
|
||||
av_log(avctx, AV_LOG_INFO, "GETFB2 not supported, "
|
||||
"will try to use GETFB instead.\n");
|
||||
} else if (!fb2) {
|
||||
fb = drmModeGetFB(ctx->hwctx->fd, plane->fb_id);
|
||||
if (!fb) {
|
||||
err = errno;
|
||||
av_log(avctx, AV_LOG_ERROR, "Failed to get "
|
||||
"framebuffer %"PRIu32": %s.\n",
|
||||
plane->fb_id, strerror(err));
|
||||
err = AVERROR(err);
|
||||
goto fail;
|
||||
} else {
|
||||
av_log(avctx, AV_LOG_INFO, "Template framebuffer is "
|
||||
"%"PRIu32": %"PRIu32"x%"PRIu32" "
|
||||
"format %"PRIx32" modifier %"PRIx64" flags %"PRIx32".\n",
|
||||
fb2->fb_id, fb2->width, fb2->height,
|
||||
fb2->pixel_format, fb2->modifier, fb2->flags);
|
||||
|
||||
ctx->width = fb2->width;
|
||||
ctx->height = fb2->height;
|
||||
|
||||
if (!fb2->handles[0]) {
|
||||
av_log(avctx, AV_LOG_ERROR, "No handle set on framebuffer: "
|
||||
"maybe you need some additional capabilities?\n");
|
||||
err = AVERROR(EINVAL);
|
||||
goto fail;
|
||||
}
|
||||
|
||||
for (i = 0; i < FF_ARRAY_ELEMS(kmsgrab_formats); i++) {
|
||||
if (kmsgrab_formats[i].drm_format == fb2->pixel_format) {
|
||||
if (ctx->format != AV_PIX_FMT_NONE &&
|
||||
ctx->format != kmsgrab_formats[i].pixfmt) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Framebuffer pixel format "
|
||||
"%"PRIx32" does not match expected format.\n",
|
||||
fb2->pixel_format);
|
||||
err = AVERROR(EINVAL);
|
||||
goto fail;
|
||||
}
|
||||
ctx->drm_format = fb2->pixel_format;
|
||||
ctx->format = kmsgrab_formats[i].pixfmt;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (i == FF_ARRAY_ELEMS(kmsgrab_formats)) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Framebuffer pixel format "
|
||||
"%"PRIx32" is not a known supported format.\n",
|
||||
fb2->pixel_format);
|
||||
err = AVERROR(EINVAL);
|
||||
goto fail;
|
||||
}
|
||||
|
||||
if (fb2->flags & DRM_MODE_FB_MODIFIERS) {
|
||||
if (ctx->drm_format_modifier != DRM_FORMAT_MOD_INVALID &&
|
||||
ctx->drm_format_modifier != fb2->modifier) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Framebuffer format modifier "
|
||||
"%"PRIx64" does not match expected modifier.\n",
|
||||
fb2->modifier);
|
||||
err = AVERROR(EINVAL);
|
||||
goto fail;
|
||||
} else {
|
||||
ctx->drm_format_modifier = fb2->modifier;
|
||||
}
|
||||
}
|
||||
av_log(avctx, AV_LOG_VERBOSE, "Format is %s, from "
|
||||
"DRM format %"PRIx32" modifier %"PRIx64".\n",
|
||||
av_get_pix_fmt_name(ctx->format),
|
||||
ctx->drm_format, ctx->drm_format_modifier);
|
||||
|
||||
ctx->fb2_available = 1;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!ctx->fb2_available) {
|
||||
if (ctx->format == AV_PIX_FMT_NONE) {
|
||||
// Backward compatibility: assume BGR0 if no format supplied.
|
||||
ctx->format = AV_PIX_FMT_BGR0;
|
||||
}
|
||||
for (i = 0; i < FF_ARRAY_ELEMS(kmsgrab_formats); i++) {
|
||||
if (kmsgrab_formats[i].pixfmt == ctx->format) {
|
||||
ctx->drm_format = kmsgrab_formats[i].drm_format;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (i >= FF_ARRAY_ELEMS(kmsgrab_formats)) {
|
||||
av_log(avctx, AV_LOG_ERROR, "Unsupported format %s.\n",
|
||||
av_get_pix_fmt_name(ctx->format));
|
||||
return AVERROR(EINVAL);
|
||||
}
|
||||
av_log(avctx, AV_LOG_INFO, "Template framebuffer is %"PRIu32": "
|
||||
"%"PRIu32"x%"PRIu32" %"PRIu32"bpp %"PRIu32"b depth.\n",
|
||||
fb->fb_id, fb->width, fb->height, fb->bpp, fb->depth);
|
||||
|
||||
fb = drmModeGetFB(ctx->hwctx->fd, plane->fb_id);
|
||||
if (!fb) {
|
||||
err = errno;
|
||||
av_log(avctx, AV_LOG_ERROR, "Failed to get "
|
||||
"framebuffer %"PRIu32": %s.\n",
|
||||
plane->fb_id, strerror(err));
|
||||
err = AVERROR(err);
|
||||
goto fail;
|
||||
}
|
||||
ctx->width = fb->width;
|
||||
ctx->height = fb->height;
|
||||
|
||||
av_log(avctx, AV_LOG_INFO, "Template framebuffer is %"PRIu32": "
|
||||
"%"PRIu32"x%"PRIu32" %"PRIu32"bpp %"PRIu32"b depth.\n",
|
||||
fb->fb_id, fb->width, fb->height, fb->bpp, fb->depth);
|
||||
|
||||
ctx->width = fb->width;
|
||||
ctx->height = fb->height;
|
||||
|
||||
if (!fb->handle) {
|
||||
av_log(avctx, AV_LOG_ERROR, "No handle set on framebuffer: "
|
||||
"maybe you need some additional capabilities?\n");
|
||||
err = AVERROR(EINVAL);
|
||||
goto fail;
|
||||
}
|
||||
if (!fb->handle) {
|
||||
av_log(avctx, AV_LOG_ERROR, "No handle set on framebuffer: "
|
||||
"maybe you need some additional capabilities?\n");
|
||||
err = AVERROR(EINVAL);
|
||||
goto fail;
|
||||
}
|
||||
|
||||
stream = avformat_new_stream(avctx, NULL);
|
||||
@@ -627,8 +372,8 @@ static av_cold int kmsgrab_read_header(AVFormatContext *avctx)
|
||||
|
||||
stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
|
||||
stream->codecpar->codec_id = AV_CODEC_ID_WRAPPED_AVFRAME;
|
||||
stream->codecpar->width = ctx->width;
|
||||
stream->codecpar->height = ctx->height;
|
||||
stream->codecpar->width = fb->width;
|
||||
stream->codecpar->height = fb->height;
|
||||
stream->codecpar->format = AV_PIX_FMT_DRM_PRIME;
|
||||
|
||||
avpriv_set_pts_info(stream, 64, 1, 1000000);
|
||||
@@ -642,8 +387,8 @@ static av_cold int kmsgrab_read_header(AVFormatContext *avctx)
|
||||
|
||||
ctx->frames->format = AV_PIX_FMT_DRM_PRIME;
|
||||
ctx->frames->sw_format = ctx->format,
|
||||
ctx->frames->width = ctx->width;
|
||||
ctx->frames->height = ctx->height;
|
||||
ctx->frames->width = fb->width;
|
||||
ctx->frames->height = fb->height;
|
||||
|
||||
err = av_hwframe_ctx_init(ctx->frames_ref);
|
||||
if (err < 0) {
|
||||
@@ -657,12 +402,13 @@ static av_cold int kmsgrab_read_header(AVFormatContext *avctx)
|
||||
|
||||
err = 0;
|
||||
fail:
|
||||
drmModeFreePlaneResources(plane_res);
|
||||
drmModeFreePlane(plane);
|
||||
drmModeFreeFB(fb);
|
||||
#if HAVE_LIBDRM_GETFB2
|
||||
drmModeFreeFB2(fb2);
|
||||
#endif
|
||||
if (plane_res)
|
||||
drmModeFreePlaneResources(plane_res);
|
||||
if (plane)
|
||||
drmModeFreePlane(plane);
|
||||
if (fb)
|
||||
drmModeFreeFB(fb);
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
@@ -684,10 +430,10 @@ static const AVOption options[] = {
|
||||
{ .str = "/dev/dri/card0" }, 0, 0, FLAGS },
|
||||
{ "format", "Pixel format for framebuffer",
|
||||
OFFSET(format), AV_OPT_TYPE_PIXEL_FMT,
|
||||
{ .i64 = AV_PIX_FMT_NONE }, -1, INT32_MAX, FLAGS },
|
||||
{ .i64 = AV_PIX_FMT_BGR0 }, 0, UINT32_MAX, FLAGS },
|
||||
{ "format_modifier", "DRM format modifier for framebuffer",
|
||||
OFFSET(drm_format_modifier), AV_OPT_TYPE_INT64,
|
||||
{ .i64 = DRM_FORMAT_MOD_INVALID }, 0, INT64_MAX, FLAGS },
|
||||
{ .i64 = DRM_FORMAT_MOD_NONE }, 0, INT64_MAX, FLAGS },
|
||||
{ "crtc_id", "CRTC ID to define capture source",
|
||||
OFFSET(source_crtc), AV_OPT_TYPE_INT64,
|
||||
{ .i64 = 0 }, 0, UINT32_MAX, FLAGS },
|
||||
|
59
externals/ffmpeg/ffmpeg/libavdevice/lavfi.c
vendored
59
externals/ffmpeg/ffmpeg/libavdevice/lavfi.c
vendored
@@ -69,7 +69,7 @@ static int *create_all_formats(int n)
|
||||
count++;
|
||||
}
|
||||
|
||||
if (!(fmts = av_malloc_array(count + 1, sizeof(*fmts))))
|
||||
if (!(fmts = av_malloc((count+1) * sizeof(int))))
|
||||
return NULL;
|
||||
for (j = 0, i = 0; i < n; i++) {
|
||||
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(i);
|
||||
@@ -362,12 +362,16 @@ static int create_subcc_packet(AVFormatContext *avctx, AVFrame *frame,
|
||||
{
|
||||
LavfiContext *lavfi = avctx->priv_data;
|
||||
AVFrameSideData *sd;
|
||||
int stream_idx, ret;
|
||||
int stream_idx, i, ret;
|
||||
|
||||
if ((stream_idx = lavfi->sink_stream_subcc_map[sink_idx]) < 0)
|
||||
return 0;
|
||||
if (!(sd = av_frame_get_side_data(frame, AV_FRAME_DATA_A53_CC)))
|
||||
for (i = 0; i < frame->nb_side_data; i++)
|
||||
if (frame->side_data[i]->type == AV_FRAME_DATA_A53_CC)
|
||||
break;
|
||||
if (i >= frame->nb_side_data)
|
||||
return 0;
|
||||
sd = frame->side_data[i];
|
||||
if ((ret = av_new_packet(&lavfi->subcc_packet, sd->size)) < 0)
|
||||
return ret;
|
||||
memcpy(lavfi->subcc_packet.data, sd->data, sd->size);
|
||||
@@ -386,10 +390,12 @@ static int lavfi_read_packet(AVFormatContext *avctx, AVPacket *pkt)
|
||||
AVDictionary *frame_metadata;
|
||||
int ret, i;
|
||||
int size = 0;
|
||||
AVStream *st;
|
||||
|
||||
if (lavfi->subcc_packet.size) {
|
||||
av_packet_move_ref(pkt, &lavfi->subcc_packet);
|
||||
*pkt = lavfi->subcc_packet;
|
||||
av_init_packet(&lavfi->subcc_packet);
|
||||
lavfi->subcc_packet.size = 0;
|
||||
lavfi->subcc_packet.data = NULL;
|
||||
return pkt->size;
|
||||
}
|
||||
|
||||
@@ -427,52 +433,57 @@ static int lavfi_read_packet(AVFormatContext *avctx, AVPacket *pkt)
|
||||
|
||||
av_buffersink_get_frame_flags(lavfi->sinks[min_pts_sink_idx], frame, 0);
|
||||
stream_idx = lavfi->sink_stream_map[min_pts_sink_idx];
|
||||
st = avctx->streams[stream_idx];
|
||||
|
||||
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
||||
if (frame->width /* FIXME best way of testing a video */) {
|
||||
size = av_image_get_buffer_size(frame->format, frame->width, frame->height, 1);
|
||||
if ((ret = av_new_packet(pkt, size)) < 0)
|
||||
goto fail;
|
||||
return ret;
|
||||
|
||||
av_image_copy_to_buffer(pkt->data, size, (const uint8_t **)frame->data, frame->linesize,
|
||||
frame->format, frame->width, frame->height, 1);
|
||||
} else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
|
||||
} else if (frame->channels /* FIXME test audio */) {
|
||||
size = frame->nb_samples * av_get_bytes_per_sample(frame->format) *
|
||||
frame->channels;
|
||||
if ((ret = av_new_packet(pkt, size)) < 0)
|
||||
goto fail;
|
||||
return ret;
|
||||
memcpy(pkt->data, frame->data[0], size);
|
||||
}
|
||||
|
||||
frame_metadata = frame->metadata;
|
||||
if (frame_metadata) {
|
||||
buffer_size_t size;
|
||||
uint8_t *metadata = av_packet_pack_dictionary(frame_metadata, &size);
|
||||
uint8_t *metadata;
|
||||
AVDictionaryEntry *e = NULL;
|
||||
AVBPrint meta_buf;
|
||||
|
||||
if (!metadata) {
|
||||
ret = AVERROR(ENOMEM);
|
||||
goto fail;
|
||||
av_bprint_init(&meta_buf, 0, AV_BPRINT_SIZE_UNLIMITED);
|
||||
while ((e = av_dict_get(frame_metadata, "", e, AV_DICT_IGNORE_SUFFIX))) {
|
||||
av_bprintf(&meta_buf, "%s", e->key);
|
||||
av_bprint_chars(&meta_buf, '\0', 1);
|
||||
av_bprintf(&meta_buf, "%s", e->value);
|
||||
av_bprint_chars(&meta_buf, '\0', 1);
|
||||
}
|
||||
if ((ret = av_packet_add_side_data(pkt, AV_PKT_DATA_STRINGS_METADATA,
|
||||
metadata, size)) < 0) {
|
||||
av_freep(&metadata);
|
||||
goto fail;
|
||||
if (!av_bprint_is_complete(&meta_buf) ||
|
||||
!(metadata = av_packet_new_side_data(pkt, AV_PKT_DATA_STRINGS_METADATA,
|
||||
meta_buf.len))) {
|
||||
av_bprint_finalize(&meta_buf, NULL);
|
||||
return AVERROR(ENOMEM);
|
||||
}
|
||||
memcpy(metadata, meta_buf.str, meta_buf.len);
|
||||
av_bprint_finalize(&meta_buf, NULL);
|
||||
}
|
||||
|
||||
if ((ret = create_subcc_packet(avctx, frame, min_pts_sink_idx)) < 0) {
|
||||
goto fail;
|
||||
av_frame_unref(frame);
|
||||
av_packet_unref(pkt);
|
||||
return ret;
|
||||
}
|
||||
|
||||
pkt->stream_index = stream_idx;
|
||||
pkt->pts = frame->pts;
|
||||
pkt->pos = frame->pkt_pos;
|
||||
pkt->size = size;
|
||||
av_frame_unref(frame);
|
||||
return size;
|
||||
fail:
|
||||
av_frame_unref(frame);
|
||||
return ret;
|
||||
|
||||
}
|
||||
|
||||
#define OFFSET(x) offsetof(LavfiContext, x)
|
||||
|
@@ -48,7 +48,6 @@ typedef struct PulseData {
|
||||
pa_threaded_mainloop *mainloop;
|
||||
pa_context *context;
|
||||
pa_stream *stream;
|
||||
size_t pa_frame_size;
|
||||
|
||||
TimeFilter *timefilter;
|
||||
int last_period;
|
||||
@@ -150,7 +149,6 @@ static av_cold int pulse_read_header(AVFormatContext *s)
|
||||
|
||||
pa_buffer_attr attr = { -1 };
|
||||
pa_channel_map cmap;
|
||||
const pa_buffer_attr *queried_attr;
|
||||
|
||||
pa_channel_map_init_extend(&cmap, pd->channels, PA_CHANNEL_MAP_WAVEEX);
|
||||
|
||||
@@ -219,7 +217,7 @@ static av_cold int pulse_read_header(AVFormatContext *s)
|
||||
|
||||
ret = pa_stream_connect_record(pd->stream, device, &attr,
|
||||
PA_STREAM_INTERPOLATE_TIMING
|
||||
| (pd->fragment_size == -1 ? PA_STREAM_ADJUST_LATENCY : 0)
|
||||
|PA_STREAM_ADJUST_LATENCY
|
||||
|PA_STREAM_AUTO_TIMING_UPDATE);
|
||||
|
||||
if (ret < 0) {
|
||||
@@ -244,15 +242,6 @@ static av_cold int pulse_read_header(AVFormatContext *s)
|
||||
pa_threaded_mainloop_wait(pd->mainloop);
|
||||
}
|
||||
|
||||
/* Query actual fragment size */
|
||||
queried_attr = pa_stream_get_buffer_attr(pd->stream);
|
||||
if (!queried_attr || queried_attr->fragsize > INT_MAX/100) {
|
||||
ret = AVERROR_EXTERNAL;
|
||||
goto unlock_and_fail;
|
||||
}
|
||||
pd->fragment_size = queried_attr->fragsize;
|
||||
pd->pa_frame_size = pa_frame_size(&ss);
|
||||
|
||||
pa_threaded_mainloop_unlock(pd->mainloop);
|
||||
|
||||
/* take real parameters */
|
||||
@@ -263,7 +252,7 @@ static av_cold int pulse_read_header(AVFormatContext *s)
|
||||
avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
|
||||
|
||||
pd->timefilter = ff_timefilter_new(1000000.0 / pd->sample_rate,
|
||||
pd->fragment_size / pd->pa_frame_size, 1.5E-6);
|
||||
1000, 1.5E-6);
|
||||
|
||||
if (!pd->timefilter) {
|
||||
pulse_close(s);
|
||||
@@ -288,13 +277,12 @@ static int pulse_read_packet(AVFormatContext *s, AVPacket *pkt)
|
||||
int64_t dts;
|
||||
pa_usec_t latency;
|
||||
int negative;
|
||||
ptrdiff_t pos = 0;
|
||||
|
||||
pa_threaded_mainloop_lock(pd->mainloop);
|
||||
|
||||
CHECK_DEAD_GOTO(pd, ret, unlock_and_fail);
|
||||
|
||||
while (pos < pd->fragment_size) {
|
||||
while (!read_data) {
|
||||
int r;
|
||||
|
||||
r = pa_stream_peek(pd->stream, &read_data, &read_length);
|
||||
@@ -308,51 +296,43 @@ static int pulse_read_packet(AVFormatContext *s, AVPacket *pkt)
|
||||
* silence, but that wouldn't work for compressed streams. */
|
||||
r = pa_stream_drop(pd->stream);
|
||||
CHECK_SUCCESS_GOTO(ret, r == 0, unlock_and_fail);
|
||||
} else {
|
||||
if (!pos) {
|
||||
if (av_new_packet(pkt, pd->fragment_size) < 0) {
|
||||
ret = AVERROR(ENOMEM);
|
||||
goto unlock_and_fail;
|
||||
}
|
||||
|
||||
dts = av_gettime();
|
||||
pa_operation_unref(pa_stream_update_timing_info(pd->stream, NULL, NULL));
|
||||
|
||||
if (pa_stream_get_latency(pd->stream, &latency, &negative) >= 0) {
|
||||
if (negative) {
|
||||
dts += latency;
|
||||
} else
|
||||
dts -= latency;
|
||||
} else {
|
||||
av_log(s, AV_LOG_WARNING, "pa_stream_get_latency() failed\n");
|
||||
}
|
||||
}
|
||||
if (pkt->size - pos < read_length) {
|
||||
if (pos)
|
||||
break;
|
||||
pa_stream_drop(pd->stream);
|
||||
/* Oversized fragment??? */
|
||||
ret = AVERROR_EXTERNAL;
|
||||
goto unlock_and_fail;
|
||||
}
|
||||
memcpy(pkt->data + pos, read_data, read_length);
|
||||
pos += read_length;
|
||||
pa_stream_drop(pd->stream);
|
||||
}
|
||||
}
|
||||
|
||||
if (av_new_packet(pkt, read_length) < 0) {
|
||||
ret = AVERROR(ENOMEM);
|
||||
goto unlock_and_fail;
|
||||
}
|
||||
|
||||
dts = av_gettime();
|
||||
pa_operation_unref(pa_stream_update_timing_info(pd->stream, NULL, NULL));
|
||||
|
||||
if (pa_stream_get_latency(pd->stream, &latency, &negative) >= 0) {
|
||||
enum AVCodecID codec_id =
|
||||
s->audio_codec_id == AV_CODEC_ID_NONE ? DEFAULT_CODEC_ID : s->audio_codec_id;
|
||||
int frame_size = ((av_get_bits_per_sample(codec_id) >> 3) * pd->channels);
|
||||
int frame_duration = read_length / frame_size;
|
||||
|
||||
|
||||
if (negative) {
|
||||
dts += latency;
|
||||
} else
|
||||
dts -= latency;
|
||||
if (pd->wallclock)
|
||||
pkt->pts = ff_timefilter_update(pd->timefilter, dts, pd->last_period);
|
||||
|
||||
pd->last_period = frame_duration;
|
||||
} else {
|
||||
av_log(s, AV_LOG_WARNING, "pa_stream_get_latency() failed\n");
|
||||
}
|
||||
|
||||
memcpy(pkt->data, read_data, read_length);
|
||||
pa_stream_drop(pd->stream);
|
||||
|
||||
pa_threaded_mainloop_unlock(pd->mainloop);
|
||||
|
||||
av_shrink_packet(pkt, pos);
|
||||
|
||||
if (pd->wallclock)
|
||||
pkt->pts = ff_timefilter_update(pd->timefilter, dts, pd->last_period);
|
||||
pd->last_period = pkt->size / pd->pa_frame_size;
|
||||
|
||||
return 0;
|
||||
|
||||
unlock_and_fail:
|
||||
av_packet_unref(pkt);
|
||||
pa_threaded_mainloop_unlock(pd->mainloop);
|
||||
return ret;
|
||||
}
|
||||
|
15
externals/ffmpeg/ffmpeg/libavdevice/timefilter.h
vendored
15
externals/ffmpeg/ffmpeg/libavdevice/timefilter.h
vendored
@@ -39,6 +39,16 @@ typedef struct TimeFilter TimeFilter;
|
||||
/**
|
||||
* Create a new Delay Locked Loop time filter
|
||||
*
|
||||
* feedback2_factor and feedback3_factor are the factors used for the
|
||||
* multiplications that are respectively performed in the second and third
|
||||
* feedback paths of the loop.
|
||||
*
|
||||
* Unless you know what you are doing, you should set these as follow:
|
||||
*
|
||||
* o = 2 * M_PI * bandwidth * period_in_seconds
|
||||
* feedback2_factor = sqrt(2) * o
|
||||
* feedback3_factor = o * o
|
||||
*
|
||||
* Where bandwidth is up to you to choose. Smaller values will filter out more
|
||||
* of the jitter, but also take a longer time for the loop to settle. A good
|
||||
* starting point is something between 0.3 and 3 Hz.
|
||||
@@ -49,8 +59,11 @@ typedef struct TimeFilter TimeFilter;
|
||||
* @param brandwidth filtering bandwidth, in Hz
|
||||
*
|
||||
* @return a pointer to a TimeFilter struct, or NULL on error
|
||||
*
|
||||
* For more details about these parameters and background concepts please see:
|
||||
* http://www.kokkinizita.net/papers/usingdll.pdf
|
||||
*/
|
||||
TimeFilter * ff_timefilter_new(double time_base, double period, double bandwidth);
|
||||
TimeFilter * ff_timefilter_new(double clock_period, double feedback2_factor, double feedback3_factor);
|
||||
|
||||
/**
|
||||
* Update the filter
|
||||
|
@@ -28,7 +28,7 @@
|
||||
#include "libavutil/version.h"
|
||||
|
||||
#define LIBAVDEVICE_VERSION_MAJOR 58
|
||||
#define LIBAVDEVICE_VERSION_MINOR 13
|
||||
#define LIBAVDEVICE_VERSION_MINOR 10
|
||||
#define LIBAVDEVICE_VERSION_MICRO 100
|
||||
|
||||
#define LIBAVDEVICE_VERSION_INT AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, \
|
||||
@@ -46,8 +46,5 @@
|
||||
* dropped at a future version bump. The defines themselves are not part of
|
||||
* the public API and may change, break or disappear at any time.
|
||||
*/
|
||||
#ifndef FF_API_DEVICE_CAPABILITIES
|
||||
#define FF_API_DEVICE_CAPABILITIES (LIBAVDEVICE_VERSION_MAJOR < 60)
|
||||
#endif
|
||||
|
||||
#endif /* AVDEVICE_VERSION_H */
|
||||
|
13
externals/ffmpeg/ffmpeg/libavdevice/vfwcap.c
vendored
13
externals/ffmpeg/ffmpeg/libavdevice/vfwcap.c
vendored
@@ -24,7 +24,6 @@
|
||||
#include "libavutil/opt.h"
|
||||
#include "libavutil/parseutils.h"
|
||||
|
||||
#include "libavcodec/packet_internal.h"
|
||||
#include "libavformat/internal.h"
|
||||
|
||||
// windows.h must no be included before winsock2.h, and libavformat internal
|
||||
@@ -45,7 +44,7 @@ struct vfw_ctx {
|
||||
HWND hwnd;
|
||||
HANDLE mutex;
|
||||
HANDLE event;
|
||||
PacketList *pktl;
|
||||
AVPacketList *pktl;
|
||||
unsigned int curbufsize;
|
||||
unsigned int frame_num;
|
||||
char *video_size; /**< A string describing video size, set by a private option. */
|
||||
@@ -179,7 +178,7 @@ static LRESULT CALLBACK videostream_cb(HWND hwnd, LPVIDEOHDR vdhdr)
|
||||
{
|
||||
AVFormatContext *s;
|
||||
struct vfw_ctx *ctx;
|
||||
PacketList **ppktl, *pktl_next;
|
||||
AVPacketList **ppktl, *pktl_next;
|
||||
|
||||
s = (AVFormatContext *) GetWindowLongPtr(hwnd, GWLP_USERDATA);
|
||||
ctx = s->priv_data;
|
||||
@@ -191,7 +190,7 @@ static LRESULT CALLBACK videostream_cb(HWND hwnd, LPVIDEOHDR vdhdr)
|
||||
|
||||
WaitForSingleObject(ctx->mutex, INFINITE);
|
||||
|
||||
pktl_next = av_mallocz(sizeof(PacketList));
|
||||
pktl_next = av_mallocz(sizeof(AVPacketList));
|
||||
if(!pktl_next)
|
||||
goto fail;
|
||||
|
||||
@@ -220,7 +219,7 @@ fail:
|
||||
static int vfw_read_close(AVFormatContext *s)
|
||||
{
|
||||
struct vfw_ctx *ctx = s->priv_data;
|
||||
PacketList *pktl;
|
||||
AVPacketList *pktl;
|
||||
|
||||
if(ctx->hwnd) {
|
||||
SendMessage(ctx->hwnd, WM_CAP_SET_CALLBACK_VIDEOSTREAM, 0, 0);
|
||||
@@ -234,7 +233,7 @@ static int vfw_read_close(AVFormatContext *s)
|
||||
|
||||
pktl = ctx->pktl;
|
||||
while (pktl) {
|
||||
PacketList *next = pktl->next;
|
||||
AVPacketList *next = pktl->next;
|
||||
av_packet_unref(&pktl->pkt);
|
||||
av_free(pktl);
|
||||
pktl = next;
|
||||
@@ -440,7 +439,7 @@ fail:
|
||||
static int vfw_read_packet(AVFormatContext *s, AVPacket *pkt)
|
||||
{
|
||||
struct vfw_ctx *ctx = s->priv_data;
|
||||
PacketList *pktl = NULL;
|
||||
AVPacketList *pktl = NULL;
|
||||
|
||||
while(!pktl) {
|
||||
WaitForSingleObject(ctx->mutex, INFINITE);
|
||||
|
229
externals/ffmpeg/ffmpeg/libavdevice/xcbgrab.c
vendored
229
externals/ffmpeg/ffmpeg/libavdevice/xcbgrab.c
vendored
@@ -22,7 +22,6 @@
|
||||
#include "config.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <xcb/xcb.h>
|
||||
|
||||
#if CONFIG_LIBXCB_XFIXES
|
||||
@@ -60,7 +59,6 @@ typedef struct XCBGrabContext {
|
||||
AVRational time_base;
|
||||
int64_t frame_duration;
|
||||
|
||||
xcb_window_t window_id;
|
||||
int x, y;
|
||||
int width, height;
|
||||
int frame_size;
|
||||
@@ -71,7 +69,6 @@ typedef struct XCBGrabContext {
|
||||
int show_region;
|
||||
int region_border;
|
||||
int centered;
|
||||
int select_region;
|
||||
|
||||
const char *framerate;
|
||||
|
||||
@@ -83,7 +80,6 @@ typedef struct XCBGrabContext {
|
||||
#define OFFSET(x) offsetof(XCBGrabContext, x)
|
||||
#define D AV_OPT_FLAG_DECODING_PARAM
|
||||
static const AVOption options[] = {
|
||||
{ "window_id", "Window to capture.", OFFSET(window_id), AV_OPT_TYPE_INT, { .i64 = XCB_NONE }, 0, UINT32_MAX, D },
|
||||
{ "x", "Initial x coordinate.", OFFSET(x), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, D },
|
||||
{ "y", "Initial y coordinate.", OFFSET(y), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, D },
|
||||
{ "grab_x", "Initial x coordinate.", OFFSET(x), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, D },
|
||||
@@ -96,7 +92,6 @@ static const AVOption options[] = {
|
||||
{ "centered", "Keep the mouse pointer at the center of grabbing region when following.", 0, AV_OPT_TYPE_CONST, { .i64 = -1 }, INT_MIN, INT_MAX, D, "follow_mouse" },
|
||||
{ "show_region", "Show the grabbing region.", OFFSET(show_region), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, D },
|
||||
{ "region_border", "Set the region border thickness.", OFFSET(region_border), AV_OPT_TYPE_INT, { .i64 = 3 }, 1, 128, D },
|
||||
{ "select_region", "Select the grabbing region graphically using the pointer.", OFFSET(select_region), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, D },
|
||||
{ NULL },
|
||||
};
|
||||
|
||||
@@ -159,7 +154,7 @@ static int xcbgrab_frame(AVFormatContext *s, AVPacket *pkt)
|
||||
XCBGrabContext *c = s->priv_data;
|
||||
xcb_get_image_cookie_t iq;
|
||||
xcb_get_image_reply_t *img;
|
||||
xcb_drawable_t drawable = c->window_id;
|
||||
xcb_drawable_t drawable = c->screen->root;
|
||||
xcb_generic_error_t *e = NULL;
|
||||
uint8_t *data;
|
||||
int length;
|
||||
@@ -186,6 +181,8 @@ static int xcbgrab_frame(AVFormatContext *s, AVPacket *pkt)
|
||||
data = xcb_get_image_data(img);
|
||||
length = xcb_get_image_data_length(img);
|
||||
|
||||
av_init_packet(pkt);
|
||||
|
||||
pkt->buf = av_buffer_create(data, length, xcbgrab_image_reply_free, img, 0);
|
||||
if (!pkt->buf) {
|
||||
free(img);
|
||||
@@ -206,7 +203,7 @@ static int64_t wait_frame(AVFormatContext *s, AVPacket *pkt)
|
||||
c->time_frame += c->frame_duration;
|
||||
|
||||
for (;;) {
|
||||
curtime = av_gettime_relative();
|
||||
curtime = av_gettime();
|
||||
delay = c->time_frame - curtime;
|
||||
if (delay <= 0)
|
||||
break;
|
||||
@@ -236,7 +233,7 @@ static void free_shm_buffer(void *opaque, uint8_t *data)
|
||||
shmdt(data);
|
||||
}
|
||||
|
||||
static AVBufferRef *allocate_shm_buffer(void *opaque, buffer_size_t size)
|
||||
static AVBufferRef *allocate_shm_buffer(void *opaque, int size)
|
||||
{
|
||||
xcb_connection_t *conn = opaque;
|
||||
xcb_shm_seg_t segment;
|
||||
@@ -267,7 +264,7 @@ static int xcbgrab_frame_shm(AVFormatContext *s, AVPacket *pkt)
|
||||
XCBGrabContext *c = s->priv_data;
|
||||
xcb_shm_get_image_cookie_t iq;
|
||||
xcb_shm_get_image_reply_t *img;
|
||||
xcb_drawable_t drawable = c->window_id;
|
||||
xcb_drawable_t drawable = c->screen->root;
|
||||
xcb_generic_error_t *e = NULL;
|
||||
AVBufferRef *buf;
|
||||
xcb_shm_seg_t segment;
|
||||
@@ -301,6 +298,8 @@ static int xcbgrab_frame_shm(AVFormatContext *s, AVPacket *pkt)
|
||||
|
||||
free(img);
|
||||
|
||||
av_init_packet(pkt);
|
||||
|
||||
pkt->buf = buf;
|
||||
pkt->data = buf->data;
|
||||
pkt->size = c->frame_size;
|
||||
@@ -331,8 +330,7 @@ static int check_xfixes(xcb_connection_t *conn)
|
||||
|
||||
static void xcbgrab_draw_mouse(AVFormatContext *s, AVPacket *pkt,
|
||||
xcb_query_pointer_reply_t *p,
|
||||
xcb_get_geometry_reply_t *geo,
|
||||
int win_x, int win_y)
|
||||
xcb_get_geometry_reply_t *geo)
|
||||
{
|
||||
XCBGrabContext *gr = s->priv_data;
|
||||
uint32_t *cursor;
|
||||
@@ -354,17 +352,17 @@ static void xcbgrab_draw_mouse(AVFormatContext *s, AVPacket *pkt,
|
||||
cx = ci->x - ci->xhot;
|
||||
cy = ci->y - ci->yhot;
|
||||
|
||||
x = FFMAX(cx, win_x + gr->x);
|
||||
y = FFMAX(cy, win_y + gr->y);
|
||||
x = FFMAX(cx, gr->x);
|
||||
y = FFMAX(cy, gr->y);
|
||||
|
||||
w = FFMIN(cx + ci->width, win_x + gr->x + gr->width) - x;
|
||||
h = FFMIN(cy + ci->height, win_y + gr->y + gr->height) - y;
|
||||
w = FFMIN(cx + ci->width, gr->x + gr->width) - x;
|
||||
h = FFMIN(cy + ci->height, gr->y + gr->height) - y;
|
||||
|
||||
c_off = x - cx;
|
||||
i_off = x - gr->x - win_x;
|
||||
i_off = x - gr->x;
|
||||
|
||||
cursor += (y - cy) * ci->width;
|
||||
image += (y - gr->y - win_y) * gr->width * stride;
|
||||
image += (y - gr->y) * gr->width * stride;
|
||||
|
||||
for (y = 0; y < h; y++) {
|
||||
cursor += c_off;
|
||||
@@ -399,11 +397,11 @@ static void xcbgrab_draw_mouse(AVFormatContext *s, AVPacket *pkt,
|
||||
}
|
||||
#endif /* CONFIG_LIBXCB_XFIXES */
|
||||
|
||||
static void xcbgrab_update_region(AVFormatContext *s, int win_x, int win_y)
|
||||
static void xcbgrab_update_region(AVFormatContext *s)
|
||||
{
|
||||
XCBGrabContext *c = s->priv_data;
|
||||
const uint32_t args[] = { win_x + c->x - c->region_border,
|
||||
win_y + c->y - c->region_border };
|
||||
const uint32_t args[] = { c->x - c->region_border,
|
||||
c->y - c->region_border };
|
||||
|
||||
xcb_configure_window(c->conn,
|
||||
c->window,
|
||||
@@ -416,51 +414,25 @@ static int xcbgrab_read_packet(AVFormatContext *s, AVPacket *pkt)
|
||||
XCBGrabContext *c = s->priv_data;
|
||||
xcb_query_pointer_cookie_t pc;
|
||||
xcb_get_geometry_cookie_t gc;
|
||||
xcb_translate_coordinates_cookie_t tc;
|
||||
xcb_query_pointer_reply_t *p = NULL;
|
||||
xcb_get_geometry_reply_t *geo = NULL;
|
||||
xcb_translate_coordinates_reply_t *translate = NULL;
|
||||
int ret = 0;
|
||||
int64_t pts;
|
||||
int win_x = 0, win_y = 0;
|
||||
|
||||
wait_frame(s, pkt);
|
||||
pts = av_gettime();
|
||||
pts = wait_frame(s, pkt);
|
||||
|
||||
if (c->follow_mouse || c->draw_mouse) {
|
||||
pc = xcb_query_pointer(c->conn, c->window_id);
|
||||
gc = xcb_get_geometry(c->conn, c->window_id);
|
||||
pc = xcb_query_pointer(c->conn, c->screen->root);
|
||||
gc = xcb_get_geometry(c->conn, c->screen->root);
|
||||
p = xcb_query_pointer_reply(c->conn, pc, NULL);
|
||||
if (!p) {
|
||||
av_log(s, AV_LOG_ERROR, "Failed to query xcb pointer\n");
|
||||
return AVERROR_EXTERNAL;
|
||||
}
|
||||
geo = xcb_get_geometry_reply(c->conn, gc, NULL);
|
||||
if (!geo) {
|
||||
av_log(s, AV_LOG_ERROR, "Failed to get xcb geometry\n");
|
||||
free(p);
|
||||
return AVERROR_EXTERNAL;
|
||||
}
|
||||
}
|
||||
if (c->window_id != c->screen->root) {
|
||||
tc = xcb_translate_coordinates(c->conn, c->window_id, c->screen->root, 0, 0);
|
||||
translate = xcb_translate_coordinates_reply(c->conn, tc, NULL);
|
||||
if (!translate) {
|
||||
free(p);
|
||||
free(geo);
|
||||
av_log(s, AV_LOG_ERROR, "Failed to translate xcb geometry\n");
|
||||
return AVERROR_EXTERNAL;
|
||||
}
|
||||
win_x = translate->dst_x;
|
||||
win_y = translate->dst_y;
|
||||
free(translate);
|
||||
}
|
||||
|
||||
if (c->follow_mouse && p->same_screen)
|
||||
xcbgrab_reposition(s, p, geo);
|
||||
|
||||
if (c->show_region)
|
||||
xcbgrab_update_region(s, win_x, win_y);
|
||||
xcbgrab_update_region(s);
|
||||
|
||||
#if CONFIG_LIBXCB_SHM
|
||||
if (c->has_shm && xcbgrab_frame_shm(s, pkt) < 0) {
|
||||
@@ -475,7 +447,7 @@ static int xcbgrab_read_packet(AVFormatContext *s, AVPacket *pkt)
|
||||
|
||||
#if CONFIG_LIBXCB_XFIXES
|
||||
if (ret >= 0 && c->draw_mouse && p->same_screen)
|
||||
xcbgrab_draw_mouse(s, pkt, p, geo, win_x, win_y);
|
||||
xcbgrab_draw_mouse(s, pkt, p, geo);
|
||||
#endif
|
||||
|
||||
free(p);
|
||||
@@ -529,26 +501,21 @@ static int pixfmt_from_pixmap_format(AVFormatContext *s, int depth,
|
||||
switch (depth) {
|
||||
case 32:
|
||||
if (fmt->bits_per_pixel == 32)
|
||||
*pix_fmt = setup->image_byte_order == XCB_IMAGE_ORDER_LSB_FIRST ?
|
||||
AV_PIX_FMT_BGR0 : AV_PIX_FMT_0RGB;
|
||||
*pix_fmt = AV_PIX_FMT_0RGB;
|
||||
break;
|
||||
case 24:
|
||||
if (fmt->bits_per_pixel == 32)
|
||||
*pix_fmt = setup->image_byte_order == XCB_IMAGE_ORDER_LSB_FIRST ?
|
||||
AV_PIX_FMT_BGR0 : AV_PIX_FMT_0RGB;
|
||||
*pix_fmt = AV_PIX_FMT_0RGB32;
|
||||
else if (fmt->bits_per_pixel == 24)
|
||||
*pix_fmt = setup->image_byte_order == XCB_IMAGE_ORDER_LSB_FIRST ?
|
||||
AV_PIX_FMT_BGR24 : AV_PIX_FMT_RGB24;
|
||||
*pix_fmt = AV_PIX_FMT_RGB24;
|
||||
break;
|
||||
case 16:
|
||||
if (fmt->bits_per_pixel == 16)
|
||||
*pix_fmt = setup->image_byte_order == XCB_IMAGE_ORDER_LSB_FIRST ?
|
||||
AV_PIX_FMT_RGB565LE : AV_PIX_FMT_RGB565BE;
|
||||
*pix_fmt = AV_PIX_FMT_RGB565;
|
||||
break;
|
||||
case 15:
|
||||
if (fmt->bits_per_pixel == 16)
|
||||
*pix_fmt = setup->image_byte_order == XCB_IMAGE_ORDER_LSB_FIRST ?
|
||||
AV_PIX_FMT_RGB555LE : AV_PIX_FMT_RGB555BE;
|
||||
*pix_fmt = AV_PIX_FMT_RGB555;
|
||||
break;
|
||||
case 8:
|
||||
if (fmt->bits_per_pixel == 8)
|
||||
@@ -587,12 +554,10 @@ static int create_stream(AVFormatContext *s)
|
||||
|
||||
avpriv_set_pts_info(st, 64, 1, 1000000);
|
||||
|
||||
gc = xcb_get_geometry(c->conn, c->window_id);
|
||||
gc = xcb_get_geometry(c->conn, c->screen->root);
|
||||
geo = xcb_get_geometry_reply(c->conn, gc, NULL);
|
||||
if (!geo) {
|
||||
av_log(s, AV_LOG_ERROR, "Can't find window '0x%x', aborting.\n", c->window_id);
|
||||
if (!geo)
|
||||
return AVERROR_EXTERNAL;
|
||||
}
|
||||
|
||||
if (!c->width || !c->height) {
|
||||
c->width = geo->width;
|
||||
@@ -614,7 +579,7 @@ static int create_stream(AVFormatContext *s)
|
||||
c->time_base = (AVRational){ st->avg_frame_rate.den,
|
||||
st->avg_frame_rate.num };
|
||||
c->frame_duration = av_rescale_q(1, c->time_base, AV_TIME_BASE_Q);
|
||||
c->time_frame = av_gettime_relative();
|
||||
c->time_frame = av_gettime();
|
||||
|
||||
ret = pixfmt_from_pixmap_format(s, geo->depth, &st->codecpar->format, &c->bpp);
|
||||
free(geo);
|
||||
@@ -703,117 +668,6 @@ static void setup_window(AVFormatContext *s)
|
||||
draw_rectangle(s);
|
||||
}
|
||||
|
||||
#define CROSSHAIR_CURSOR 34
|
||||
|
||||
static xcb_rectangle_t rectangle_from_corners(xcb_point_t *corner_a,
|
||||
xcb_point_t *corner_b)
|
||||
{
|
||||
xcb_rectangle_t rectangle;
|
||||
rectangle.x = FFMIN(corner_a->x, corner_b->x);
|
||||
rectangle.y = FFMIN(corner_a->y, corner_b->y);
|
||||
rectangle.width = FFABS(corner_a->x - corner_b->x);
|
||||
rectangle.height = FFABS(corner_a->y - corner_b->y);
|
||||
return rectangle;
|
||||
}
|
||||
|
||||
static int select_region(AVFormatContext *s)
|
||||
{
|
||||
XCBGrabContext *c = s->priv_data;
|
||||
xcb_connection_t *conn = c->conn;
|
||||
xcb_screen_t *screen = c->screen;
|
||||
|
||||
int ret = 0, done = 0, was_pressed = 0;
|
||||
xcb_cursor_t cursor;
|
||||
xcb_font_t cursor_font;
|
||||
xcb_point_t press_position;
|
||||
xcb_generic_event_t *event;
|
||||
xcb_rectangle_t rectangle = { 0 };
|
||||
xcb_grab_pointer_reply_t *reply;
|
||||
xcb_grab_pointer_cookie_t cookie;
|
||||
|
||||
xcb_window_t root_window = screen->root;
|
||||
xcb_gcontext_t gc = xcb_generate_id(conn);
|
||||
uint32_t mask = XCB_GC_FUNCTION | XCB_GC_SUBWINDOW_MODE;
|
||||
uint32_t values[] = { XCB_GX_INVERT, XCB_SUBWINDOW_MODE_INCLUDE_INFERIORS };
|
||||
xcb_create_gc(conn, gc, root_window, mask, values);
|
||||
|
||||
cursor_font = xcb_generate_id(conn);
|
||||
xcb_open_font(conn, cursor_font, strlen("cursor"), "cursor");
|
||||
cursor = xcb_generate_id(conn);
|
||||
xcb_create_glyph_cursor(conn, cursor, cursor_font, cursor_font,
|
||||
CROSSHAIR_CURSOR, CROSSHAIR_CURSOR + 1, 0, 0, 0,
|
||||
0xFFFF, 0xFFFF, 0xFFFF);
|
||||
cookie = xcb_grab_pointer(conn, 0, root_window,
|
||||
XCB_EVENT_MASK_BUTTON_PRESS |
|
||||
XCB_EVENT_MASK_BUTTON_RELEASE |
|
||||
XCB_EVENT_MASK_BUTTON_MOTION,
|
||||
XCB_GRAB_MODE_ASYNC, XCB_GRAB_MODE_ASYNC,
|
||||
root_window, cursor, XCB_CURRENT_TIME);
|
||||
reply = xcb_grab_pointer_reply(conn, cookie, NULL);
|
||||
if (!reply || reply->status != XCB_GRAB_STATUS_SUCCESS) {
|
||||
av_log(s, AV_LOG_ERROR,
|
||||
"Failed to select region. Could not grab pointer.\n");
|
||||
ret = AVERROR(EIO);
|
||||
free(reply);
|
||||
goto fail;
|
||||
}
|
||||
free(reply);
|
||||
|
||||
xcb_grab_server(conn);
|
||||
|
||||
while (!done && (event = xcb_wait_for_event(conn))) {
|
||||
switch (event->response_type & ~0x80) {
|
||||
case XCB_BUTTON_PRESS: {
|
||||
xcb_button_press_event_t *press = (xcb_button_press_event_t *)event;
|
||||
press_position = (xcb_point_t){ press->event_x, press->event_y };
|
||||
rectangle.x = press_position.x;
|
||||
rectangle.y = press_position.y;
|
||||
xcb_poly_rectangle(conn, root_window, gc, 1, &rectangle);
|
||||
was_pressed = 1;
|
||||
break;
|
||||
}
|
||||
case XCB_MOTION_NOTIFY: {
|
||||
if (was_pressed) {
|
||||
xcb_motion_notify_event_t *motion =
|
||||
(xcb_motion_notify_event_t *)event;
|
||||
xcb_point_t cursor_position = { motion->event_x, motion->event_y };
|
||||
xcb_poly_rectangle(conn, root_window, gc, 1, &rectangle);
|
||||
rectangle = rectangle_from_corners(&press_position, &cursor_position);
|
||||
xcb_poly_rectangle(conn, root_window, gc, 1, &rectangle);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case XCB_BUTTON_RELEASE: {
|
||||
xcb_poly_rectangle(conn, root_window, gc, 1, &rectangle);
|
||||
done = 1;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
xcb_flush(conn);
|
||||
free(event);
|
||||
}
|
||||
c->width = rectangle.width;
|
||||
c->height = rectangle.height;
|
||||
if (c->width && c->height) {
|
||||
c->x = rectangle.x;
|
||||
c->y = rectangle.y;
|
||||
} else {
|
||||
c->x = 0;
|
||||
c->y = 0;
|
||||
}
|
||||
xcb_ungrab_server(conn);
|
||||
xcb_ungrab_pointer(conn, XCB_CURRENT_TIME);
|
||||
xcb_flush(conn);
|
||||
|
||||
fail:
|
||||
xcb_free_cursor(conn, cursor);
|
||||
xcb_close_font(conn, cursor_font);
|
||||
xcb_free_gc(conn, gc);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static av_cold int xcbgrab_read_header(AVFormatContext *s)
|
||||
{
|
||||
XCBGrabContext *c = s->priv_data;
|
||||
@@ -848,27 +702,6 @@ static av_cold int xcbgrab_read_header(AVFormatContext *s)
|
||||
return AVERROR(EIO);
|
||||
}
|
||||
|
||||
if (c->window_id == XCB_NONE)
|
||||
c->window_id = c->screen->root;
|
||||
else {
|
||||
if (c->select_region) {
|
||||
av_log(s, AV_LOG_WARNING, "select_region ignored with window_id.\n");
|
||||
c->select_region = 0;
|
||||
}
|
||||
if (c->follow_mouse) {
|
||||
av_log(s, AV_LOG_WARNING, "follow_mouse ignored with window_id.\n");
|
||||
c->follow_mouse = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (c->select_region) {
|
||||
ret = select_region(s);
|
||||
if (ret < 0) {
|
||||
xcbgrab_read_close(s);
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
ret = create_stream(s);
|
||||
|
||||
if (ret < 0) {
|
||||
|
Reference in New Issue
Block a user