upgrade ffmpeg to v6.0

pull/799/head
Sue N. Cooper 2 years ago
parent be28fa1010
commit 50b13ca2e1
  1. 2
      android/README.md
  2. 2
      android/ffmpeg-kit-android-lib/Doxyfile
  3. 8
      android/ffmpeg-kit-android-lib/build.gradle
  4. 2
      android/ffmpeg-kit-android-lib/consumer-rules.pro
  5. 8
      android/ffmpeg-kit-android-lib/src/main/cpp/ffmpegkit.c
  6. 2
      android/ffmpeg-kit-android-lib/src/main/cpp/ffmpegkit.h
  7. 46
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_cmdutils.c
  8. 25
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_cmdutils.h
  9. 2592
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_ffmpeg.c
  10. 370
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_ffmpeg.h
  11. 1148
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_ffmpeg_demux.c
  12. 287
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_ffmpeg_filter.c
  13. 27
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_ffmpeg_hw.c
  14. 680
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_ffmpeg_mux.c
  15. 165
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_ffmpeg_mux.h
  16. 2414
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_ffmpeg_mux_init.c
  17. 2688
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_ffmpeg_opt.c
  18. 115
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_ffprobe.c
  19. 145
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_objpool.c
  20. 50
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_objpool.h
  21. 9
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_opt_common.c
  22. 462
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_sync_queue.c
  23. 122
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_sync_queue.h
  24. 259
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_thread_queue.c
  25. 94
      android/ffmpeg-kit-android-lib/src/main/cpp/fftools_thread_queue.h
  26. 2
      android/ffmpeg-kit-android-lib/src/main/java/com/arthenica/ffmpegkit/FFmpegKitConfig.java
  27. 2
      android/ffmpeg-kit-android-lib/src/main/java/com/arthenica/ffmpegkit/NativeLoader.java
  28. 8
      android/ffmpeg-kit-android-lib/src/main/java/com/arthenica/ffmpegkit/Statistics.java
  29. 2
      android/jni/Android.mk
  30. 2
      apple/Doxyfile
  31. 6
      apple/README.md
  32. 4
      apple/configure.ac
  33. 14
      apple/src/FFmpegKitConfig.m
  34. 13
      apple/src/Makefile.am
  35. 77
      apple/src/Makefile.in
  36. 4
      apple/src/Statistics.h
  37. 6
      apple/src/Statistics.m
  38. 44
      apple/src/fftools_cmdutils.c
  39. 23
      apple/src/fftools_cmdutils.h
  40. 2590
      apple/src/fftools_ffmpeg.c
  41. 368
      apple/src/fftools_ffmpeg.h
  42. 1148
      apple/src/fftools_ffmpeg_demux.c
  43. 287
      apple/src/fftools_ffmpeg_filter.c
  44. 25
      apple/src/fftools_ffmpeg_hw.c
  45. 677
      apple/src/fftools_ffmpeg_mux.c
  46. 165
      apple/src/fftools_ffmpeg_mux.h
  47. 2414
      apple/src/fftools_ffmpeg_mux_init.c
  48. 2686
      apple/src/fftools_ffmpeg_opt.c
  49. 113
      apple/src/fftools_ffprobe.c
  50. 145
      apple/src/fftools_objpool.c
  51. 50
      apple/src/fftools_objpool.h
  52. 9
      apple/src/fftools_opt_common.c
  53. 462
      apple/src/fftools_sync_queue.c
  54. 122
      apple/src/fftools_sync_queue.h
  55. 259
      apple/src/fftools_thread_queue.c
  56. 94
      apple/src/fftools_thread_queue.h
  57. 2
      linux/Doxyfile
  58. 7
      linux/configure.ac
  59. 5
      linux/src/FFmpegKit.cpp
  60. 16
      linux/src/FFmpegKitConfig.cpp
  61. 2
      linux/src/FFmpegKitConfig.h
  62. 1
      linux/src/FFprobeKit.cpp
  63. 13
      linux/src/Makefile.am
  64. 77
      linux/src/Makefile.in
  65. 4
      linux/src/Statistics.cpp
  66. 6
      linux/src/Statistics.h
  67. 46
      linux/src/fftools_cmdutils.c
  68. 25
      linux/src/fftools_cmdutils.h
  69. 2590
      linux/src/fftools_ffmpeg.c
  70. 370
      linux/src/fftools_ffmpeg.h
  71. 1148
      linux/src/fftools_ffmpeg_demux.c
  72. 287
      linux/src/fftools_ffmpeg_filter.c
  73. 27
      linux/src/fftools_ffmpeg_hw.c
  74. 680
      linux/src/fftools_ffmpeg_mux.c
  75. 165
      linux/src/fftools_ffmpeg_mux.h
  76. 2414
      linux/src/fftools_ffmpeg_mux_init.c
  77. 2686
      linux/src/fftools_ffmpeg_opt.c
  78. 115
      linux/src/fftools_ffprobe.c
  79. 145
      linux/src/fftools_objpool.c
  80. 50
      linux/src/fftools_objpool.h
  81. 9
      linux/src/fftools_opt_common.c
  82. 462
      linux/src/fftools_sync_queue.c
  83. 122
      linux/src/fftools_sync_queue.h
  84. 259
      linux/src/fftools_thread_queue.c
  85. 94
      linux/src/fftools_thread_queue.h
  86. 2
      scripts/android/libiconv.sh
  87. 4
      scripts/source.sh
  88. 8
      tools/android/build.gradle
  89. 8
      tools/android/build.lts.gradle
  90. 24
      tools/protocols/libavformat_file.c

@ -77,7 +77,7 @@ All libraries created by `android.sh` can be found under the `prebuilt` director
} }
dependencies { dependencies {
implementation 'com.arthenica:ffmpeg-kit-full:5.1' implementation 'com.arthenica:ffmpeg-kit-full:6.0'
} }
``` ```

@ -38,7 +38,7 @@ PROJECT_NAME = "FFmpegKit Android API"
# could be handy for archiving the generated documentation or if some version # could be handy for archiving the generated documentation or if some version
# control system is used. # control system is used.
PROJECT_NUMBER = 5.1 PROJECT_NUMBER = 6.0
# Using the PROJECT_BRIEF tag one can provide an optional one line description # Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a # for a project that appears at the top of each page and should give viewer a

@ -2,14 +2,14 @@ apply plugin: 'com.android.library'
android { android {
namespace 'com.arthenica.ffmpegkit' namespace 'com.arthenica.ffmpegkit'
compileSdk 31 compileSdk 33
ndkVersion "22.1.7171670" ndkVersion "22.1.7171670"
defaultConfig { defaultConfig {
minSdk 24 minSdk 24
targetSdk 31 targetSdk 33
versionCode 240510 versionCode 240600
versionName "5.1" versionName "6.0"
project.archivesBaseName = "ffmpeg-kit" project.archivesBaseName = "ffmpeg-kit"
consumerProguardFiles "consumer-rules.pro" consumerProguardFiles "consumer-rules.pro"
} }

@ -1,7 +1,7 @@
-keep class com.arthenica.ffmpegkit.FFmpegKitConfig { -keep class com.arthenica.ffmpegkit.FFmpegKitConfig {
native <methods>; native <methods>;
void log(long, int, byte[]); void log(long, int, byte[]);
void statistics(long, int, float, float, long , int, double, double); void statistics(long, int, float, float, long , double, double, double);
int safOpen(int); int safOpen(int);
int safClose(int); int safClose(int);
} }

@ -45,7 +45,7 @@ struct CallbackData {
float statisticsFps; // statistics fps float statisticsFps; // statistics fps
float statisticsQuality; // statistics quality float statisticsQuality; // statistics quality
int64_t statisticsSize; // statistics size int64_t statisticsSize; // statistics size
int statisticsTime; // statistics time double statisticsTime; // statistics time
double statisticsBitrate; // statistics bitrate double statisticsBitrate; // statistics bitrate
double statisticsSpeed; // statistics speed double statisticsSpeed; // statistics speed
@ -312,7 +312,7 @@ void logCallbackDataAdd(int level, AVBPrint *data) {
/** /**
* Adds statistics data to the end of callback data list. * Adds statistics data to the end of callback data list.
*/ */
void statisticsCallbackDataAdd(int frameNumber, float fps, float quality, int64_t size, int time, double bitrate, double speed) { void statisticsCallbackDataAdd(int frameNumber, float fps, float quality, int64_t size, double time, double bitrate, double speed) {
// CREATE DATA STRUCT FIRST // CREATE DATA STRUCT FIRST
struct CallbackData *newData = (struct CallbackData*)av_malloc(sizeof(struct CallbackData)); struct CallbackData *newData = (struct CallbackData*)av_malloc(sizeof(struct CallbackData));
@ -491,7 +491,7 @@ void ffmpegkit_log_callback_function(void *ptr, int level, const char* format, v
* @param bitrate output bit rate in kbits/s * @param bitrate output bit rate in kbits/s
* @param speed processing speed = processed duration / operation duration * @param speed processing speed = processed duration / operation duration
*/ */
void ffmpegkit_statistics_callback_function(int frameNumber, float fps, float quality, int64_t size, int time, double bitrate, double speed) { void ffmpegkit_statistics_callback_function(int frameNumber, float fps, float quality, int64_t size, double time, double bitrate, double speed) {
statisticsCallbackDataAdd(frameNumber, fps, quality, size, time, bitrate, speed); statisticsCallbackDataAdd(frameNumber, fps, quality, size, time, bitrate, speed);
} }
@ -644,7 +644,7 @@ jint JNI_OnLoad(JavaVM *vm, void *reserved) {
return JNI_FALSE; return JNI_FALSE;
} }
statisticsMethod = (*env)->GetStaticMethodID(env, localConfigClass, "statistics", "(JIFFJIDD)V"); statisticsMethod = (*env)->GetStaticMethodID(env, localConfigClass, "statistics", "(JIFFJDDD)V");
if (statisticsMethod == NULL) { if (statisticsMethod == NULL) {
LOGE("OnLoad thread failed to GetStaticMethodID for %s.\n", "statistics"); LOGE("OnLoad thread failed to GetStaticMethodID for %s.\n", "statistics");
return JNI_FALSE; return JNI_FALSE;

@ -27,7 +27,7 @@
#include "libavutil/ffversion.h" #include "libavutil/ffversion.h"
/** Library version string */ /** Library version string */
#define FFMPEG_KIT_VERSION "5.1" #define FFMPEG_KIT_VERSION "6.0"
/** Defines tag used for Android logging. */ /** Defines tag used for Android logging. */
#define LIB_NAME "ffmpeg-kit" #define LIB_NAME "ffmpeg-kit"

@ -1,7 +1,8 @@
/* /*
* Various utilities for command line tools * Various utilities for command line tools
* Copyright (c) 2000-2003 Fabrice Bellard * Copyright (c) 2000-2003 Fabrice Bellard
* Copyright (c) 2018 Taner Sener * Copyright (c) 2018-2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -25,6 +26,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -129,11 +136,18 @@ void register_exit(void (*cb)(int ret))
program_exit = cb; program_exit = cb;
} }
void report_and_exit(int ret)
{
av_log(NULL, AV_LOG_FATAL, "%s\n", av_err2str(ret));
exit_program(AVUNERROR(ret));
}
void exit_program(int ret) void exit_program(int ret)
{ {
if (program_exit) if (program_exit)
program_exit(ret); program_exit(ret);
// FFmpegKit
// exit disabled and replaced with longjmp, exit value stored in longjmp_value // exit disabled and replaced with longjmp, exit value stored in longjmp_value
// exit(ret); // exit(ret);
longjmp_value = ret; longjmp_value = ret;
@ -696,7 +710,7 @@ static void init_parse_context(OptionParseContext *octx,
octx->nb_groups = nb_groups; octx->nb_groups = nb_groups;
octx->groups = av_calloc(octx->nb_groups, sizeof(*octx->groups)); octx->groups = av_calloc(octx->nb_groups, sizeof(*octx->groups));
if (!octx->groups) if (!octx->groups)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
for (i = 0; i < octx->nb_groups; i++) for (i = 0; i < octx->nb_groups; i++)
octx->groups[i].group_def = &groups[i]; octx->groups[i].group_def = &groups[i];
@ -843,12 +857,7 @@ do { \
void print_error(const char *filename, int err) void print_error(const char *filename, int err)
{ {
char errbuf[128]; av_log(NULL, AV_LOG_ERROR, "%s: %s\n", filename, av_err2str(err));
const char *errbuf_ptr = errbuf;
if (av_strerror(err, errbuf, sizeof(errbuf)) < 0)
errbuf_ptr = strerror(AVUNERROR(err));
av_log(NULL, AV_LOG_ERROR, "%s: %s\n", filename, errbuf_ptr);
} }
int read_yesno(void) int read_yesno(void)
@ -971,7 +980,7 @@ AVDictionary *filter_codec_opts(AVDictionary *opts, enum AVCodecID codec_id,
break; break;
} }
while ((t = av_dict_get(opts, "", t, AV_DICT_IGNORE_SUFFIX))) { while ((t = av_dict_iterate(opts, t))) {
const AVClass *priv_class; const AVClass *priv_class;
char *p = strchr(t->key, ':'); char *p = strchr(t->key, ':');
@ -1009,11 +1018,8 @@ AVDictionary **setup_find_stream_info_opts(AVFormatContext *s,
if (!s->nb_streams) if (!s->nb_streams)
return NULL; return NULL;
opts = av_calloc(s->nb_streams, sizeof(*opts)); opts = av_calloc(s->nb_streams, sizeof(*opts));
if (!opts) { if (!opts)
av_log(NULL, AV_LOG_ERROR, report_and_exit(AVERROR(ENOMEM));
"Could not alloc memory for stream options.\n");
exit_program(1);
}
for (i = 0; i < s->nb_streams; i++) for (i = 0; i < s->nb_streams; i++)
opts[i] = filter_codec_opts(codec_opts, s->streams[i]->codecpar->codec_id, opts[i] = filter_codec_opts(codec_opts, s->streams[i]->codecpar->codec_id,
s, s->streams[i], NULL); s, s->streams[i], NULL);
@ -1028,10 +1034,8 @@ void *grow_array(void *array, int elem_size, int *size, int new_size)
} }
if (*size < new_size) { if (*size < new_size) {
uint8_t *tmp = av_realloc_array(array, new_size, elem_size); uint8_t *tmp = av_realloc_array(array, new_size, elem_size);
if (!tmp) { if (!tmp)
av_log(NULL, AV_LOG_ERROR, "Could not alloc buffer.\n"); report_and_exit(AVERROR(ENOMEM));
exit_program(1);
}
memset(tmp + *size*elem_size, 0, (new_size-*size) * elem_size); memset(tmp + *size*elem_size, 0, (new_size-*size) * elem_size);
*size = new_size; *size = new_size;
return tmp; return tmp;
@ -1044,10 +1048,8 @@ void *allocate_array_elem(void *ptr, size_t elem_size, int *nb_elems)
void *new_elem; void *new_elem;
if (!(new_elem = av_mallocz(elem_size)) || if (!(new_elem = av_mallocz(elem_size)) ||
av_dynarray_add_nofree(ptr, nb_elems, new_elem) < 0) { av_dynarray_add_nofree(ptr, nb_elems, new_elem) < 0)
av_log(NULL, AV_LOG_ERROR, "Could not alloc buffer.\n"); report_and_exit(AVERROR(ENOMEM));
exit_program(1);
}
return new_elem; return new_elem;
} }

@ -1,7 +1,8 @@
/* /*
* Various utilities for command line tools * Various utilities for command line tools
* copyright (c) 2003 Fabrice Bellard * copyright (c) 2003 Fabrice Bellard
* copyright (c) 2018 Taner Sener * copyright (c) 2018-2022 Taner Sener
* copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -25,6 +26,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -95,6 +102,17 @@ extern __thread int find_stream_info;
*/ */
void register_exit(void (*cb)(int ret)); void register_exit(void (*cb)(int ret));
/**
* Reports an error corresponding to the provided
* AVERROR code and calls exit_program() with the
* corresponding POSIX error code.
* @note ret must be an AVERROR-value of a POSIX error code
* (i.e. AVERROR(EFOO) and not AVERROR_FOO).
* library functions can return both, so call this only
* with AVERROR(EFOO) of your own.
*/
void report_and_exit(int ret) av_noreturn;
/** /**
* Wraps exit with a program-specific cleanup routine. * Wraps exit with a program-specific cleanup routine.
*/ */
@ -232,11 +250,6 @@ void show_help_children(const AVClass *clazz, int flags);
void show_help_default_ffmpeg(const char *opt, const char *arg); void show_help_default_ffmpeg(const char *opt, const char *arg);
void show_help_default_ffprobe(const char *opt, const char *arg); void show_help_default_ffprobe(const char *opt, const char *arg);
/**
* Generic -h handler common to all fftools.
*/
int show_help(void *optctx, const char *opt, const char *arg);
/** /**
* Parse the command line arguments. * Parse the command line arguments.
* *

File diff suppressed because it is too large Load Diff

@ -1,6 +1,7 @@
/* /*
* This file is part of FFmpeg. * This file is part of FFmpeg.
* Copyright (c) 2018 Taner Sener * Copyright (c) 2018-2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* FFmpeg is free software; you can redistribute it and/or * FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public * modify it under the terms of the GNU Lesser General Public
@ -22,6 +23,16 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - WARN_MULTIPLE_OPT_USAGE, MATCH_PER_STREAM_OPT, MATCH_PER_TYPE_OPT, SPECIFIER_OPT_FMT declarations migrated to
* ffmpeg_mux.h
* - "class" member field renamed as clazz
* - time field in set_report_callback updated as double
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -59,11 +70,13 @@
#include "config.h" #include "config.h"
#include <stdatomic.h>
#include <stdint.h> #include <stdint.h>
#include <stdio.h> #include <stdio.h>
#include <signal.h> #include <signal.h>
#include "fftools_cmdutils.h" #include "fftools_cmdutils.h"
#include "fftools_sync_queue.h"
#include "libavformat/avformat.h" #include "libavformat/avformat.h"
#include "libavformat/avio.h" #include "libavformat/avio.h"
@ -85,6 +98,12 @@
#include "libswresample/swresample.h" #include "libswresample/swresample.h"
// deprecated features
#define FFMPEG_OPT_PSNR 1
#define FFMPEG_OPT_MAP_CHANNEL 1
#define FFMPEG_OPT_MAP_SYNC 1
#define FFMPEG_ROTATION_METADATA 1
enum VideoSyncMethod { enum VideoSyncMethod {
VSYNC_AUTO = -1, VSYNC_AUTO = -1,
VSYNC_PASSTHROUGH, VSYNC_PASSTHROUGH,
@ -113,15 +132,15 @@ typedef struct StreamMap {
int disabled; /* 1 is this mapping is disabled by a negative map */ int disabled; /* 1 is this mapping is disabled by a negative map */
int file_index; int file_index;
int stream_index; int stream_index;
int sync_file_index;
int sync_stream_index;
char *linklabel; /* name of an output link, for mapping lavfi outputs */ char *linklabel; /* name of an output link, for mapping lavfi outputs */
} StreamMap; } StreamMap;
#if FFMPEG_OPT_MAP_CHANNEL
typedef struct { typedef struct {
int file_idx, stream_idx, channel_idx; // input int file_idx, stream_idx, channel_idx; // input
int ofile_idx, ostream_idx; // output int ofile_idx, ostream_idx; // output
} AudioChannelMap; } AudioChannelMap;
#endif
typedef struct OptionsContext { typedef struct OptionsContext {
OptionGroup *g; OptionGroup *g;
@ -157,6 +176,7 @@ typedef struct OptionsContext {
int accurate_seek; int accurate_seek;
int thread_queue_size; int thread_queue_size;
int input_sync_ref; int input_sync_ref;
int find_stream_info;
SpecifierOpt *ts_scale; SpecifierOpt *ts_scale;
int nb_ts_scale; int nb_ts_scale;
@ -174,11 +194,10 @@ typedef struct OptionsContext {
/* output options */ /* output options */
StreamMap *stream_maps; StreamMap *stream_maps;
int nb_stream_maps; int nb_stream_maps;
#if FFMPEG_OPT_MAP_CHANNEL
AudioChannelMap *audio_channel_maps; /* one info entry per -map_channel */ AudioChannelMap *audio_channel_maps; /* one info entry per -map_channel */
int nb_audio_channel_maps; /* number of (valid) -map_channel settings */ int nb_audio_channel_maps; /* number of (valid) -map_channel settings */
int metadata_global_manual; #endif
int metadata_streams_manual;
int metadata_chapters_manual;
const char **attachments; const char **attachments;
int nb_attachments; int nb_attachments;
@ -186,9 +205,10 @@ typedef struct OptionsContext {
int64_t recording_time; int64_t recording_time;
int64_t stop_time; int64_t stop_time;
uint64_t limit_filesize; int64_t limit_filesize;
float mux_preload; float mux_preload;
float mux_max_delay; float mux_max_delay;
float shortest_buf_duration;
int shortest; int shortest;
int bitexact; int bitexact;
@ -221,6 +241,12 @@ typedef struct OptionsContext {
int nb_force_fps; int nb_force_fps;
SpecifierOpt *frame_aspect_ratios; SpecifierOpt *frame_aspect_ratios;
int nb_frame_aspect_ratios; int nb_frame_aspect_ratios;
SpecifierOpt *display_rotations;
int nb_display_rotations;
SpecifierOpt *display_hflips;
int nb_display_hflips;
SpecifierOpt *display_vflips;
int nb_display_vflips;
SpecifierOpt *rc_overrides; SpecifierOpt *rc_overrides;
int nb_rc_overrides; int nb_rc_overrides;
SpecifierOpt *intra_matrices; SpecifierOpt *intra_matrices;
@ -247,6 +273,8 @@ typedef struct OptionsContext {
int nb_reinit_filters; int nb_reinit_filters;
SpecifierOpt *fix_sub_duration; SpecifierOpt *fix_sub_duration;
int nb_fix_sub_duration; int nb_fix_sub_duration;
SpecifierOpt *fix_sub_duration_heartbeat;
int nb_fix_sub_duration_heartbeat;
SpecifierOpt *canvas_sizes; SpecifierOpt *canvas_sizes;
int nb_canvas_sizes; int nb_canvas_sizes;
SpecifierOpt *pass; SpecifierOpt *pass;
@ -275,6 +303,18 @@ typedef struct OptionsContext {
int nb_autoscale; int nb_autoscale;
SpecifierOpt *bits_per_raw_sample; SpecifierOpt *bits_per_raw_sample;
int nb_bits_per_raw_sample; int nb_bits_per_raw_sample;
SpecifierOpt *enc_stats_pre;
int nb_enc_stats_pre;
SpecifierOpt *enc_stats_post;
int nb_enc_stats_post;
SpecifierOpt *mux_stats;
int nb_mux_stats;
SpecifierOpt *enc_stats_pre_fmt;
int nb_enc_stats_pre_fmt;
SpecifierOpt *enc_stats_post_fmt;
int nb_enc_stats_post_fmt;
SpecifierOpt *mux_stats_fmt;
int nb_mux_stats_fmt;
} OptionsContext; } OptionsContext;
typedef struct InputFilter { typedef struct InputFilter {
@ -350,12 +390,22 @@ typedef struct InputStream {
#define DECODING_FOR_OST 1 #define DECODING_FOR_OST 1
#define DECODING_FOR_FILTER 2 #define DECODING_FOR_FILTER 2
int processing_needed; /* non zero if the packets must be processed */ int processing_needed; /* non zero if the packets must be processed */
// should attach FrameData as opaque_ref after decoding
int want_frame_data;
/**
* Codec parameters - to be used by the decoding/streamcopy code.
* st->codecpar should not be accessed, because it may be modified
* concurrently by the demuxing thread.
*/
AVCodecParameters *par;
AVCodecContext *dec_ctx; AVCodecContext *dec_ctx;
const AVCodec *dec; const AVCodec *dec;
AVFrame *decoded_frame; AVFrame *decoded_frame;
AVPacket *pkt; AVPacket *pkt;
AVRational framerate_guessed;
int64_t prev_pkt_pts; int64_t prev_pkt_pts;
int64_t start; /* time when read started */ int64_t start; /* time when read started */
/* predicted dts of the next packet read for this stream or (when there are /* predicted dts of the next packet read for this stream or (when there are
@ -368,6 +418,12 @@ typedef struct InputStream {
int64_t pts; ///< current pts of the decoded frame (in AV_TIME_BASE units) int64_t pts; ///< current pts of the decoded frame (in AV_TIME_BASE units)
int wrap_correction_done; int wrap_correction_done;
// the value of AVCodecParserContext.repeat_pict from the AVStream parser
// for the last packet returned from ifile_get_packet()
// -1 if unknown
// FIXME: this is a hack, the avstream parser should not be used
int last_pkt_repeat_pict;
int64_t filter_in_rescale_delta_last; int64_t filter_in_rescale_delta_last;
int64_t min_pts; /* pts with the smallest value in a current stream */ int64_t min_pts; /* pts with the smallest value in a current stream */
@ -417,12 +473,8 @@ typedef struct InputStream {
char *hwaccel_device; char *hwaccel_device;
enum AVPixelFormat hwaccel_output_format; enum AVPixelFormat hwaccel_output_format;
/* hwaccel context */
void *hwaccel_ctx;
void (*hwaccel_uninit)(AVCodecContext *s);
int (*hwaccel_retrieve_data)(AVCodecContext *s, AVFrame *frame); int (*hwaccel_retrieve_data)(AVCodecContext *s, AVFrame *frame);
enum AVPixelFormat hwaccel_pix_fmt; enum AVPixelFormat hwaccel_pix_fmt;
enum AVPixelFormat hwaccel_retrieved_pix_fmt;
/* stats */ /* stats */
// combined size of all the packets read // combined size of all the packets read
@ -439,38 +491,46 @@ typedef struct InputStream {
int got_output; int got_output;
} InputStream; } InputStream;
typedef struct LastFrameDuration {
int stream_idx;
int64_t duration;
} LastFrameDuration;
typedef struct InputFile { typedef struct InputFile {
int index;
AVFormatContext *ctx; AVFormatContext *ctx;
int eof_reached; /* true if eof reached */ int eof_reached; /* true if eof reached */
int eagain; /* true if last read attempt returned EAGAIN */ int eagain; /* true if last read attempt returned EAGAIN */
int ist_index; /* index of first stream in input_streams */
int loop; /* set number of times input stream should be looped */
int64_t duration; /* actual duration of the longest stream in a file
at the moment when looping happens */
AVRational time_base; /* time base of the duration */
int64_t input_ts_offset; int64_t input_ts_offset;
int input_sync_ref; int input_sync_ref;
/**
* Effective format start time based on enabled streams.
*/
int64_t start_time_effective;
int64_t ts_offset; int64_t ts_offset;
/**
* Extra timestamp offset added by discontinuity handling.
*/
int64_t ts_offset_discont;
int64_t last_ts; int64_t last_ts;
int64_t start_time; /* user-specified start time in AV_TIME_BASE or AV_NOPTS_VALUE */ int64_t start_time; /* user-specified start time in AV_TIME_BASE or AV_NOPTS_VALUE */
int64_t recording_time; int64_t recording_time;
int nb_streams; /* number of stream that ffmpeg is aware of; may be different
from ctx.nb_streams if new streams appear during av_read_frame() */ /* streams that ffmpeg is aware of;
int nb_streams_warn; /* number of streams that the user was warned of */ * there may be extra streams in ctx that are not mapped to an InputStream
* if new streams appear dynamically during demuxing */
InputStream **streams;
int nb_streams;
int rate_emu; int rate_emu;
float readrate; float readrate;
int accurate_seek; int accurate_seek;
AVPacket *pkt; /* when looping the input file, this queue is used by decoders to report
* the last frame duration back to the demuxer thread */
#if HAVE_THREADS AVThreadMessageQueue *audio_duration_queue;
AVThreadMessageQueue *in_thread_queue; int audio_duration_queue_size;
pthread_t thread; /* thread reading from this file */
int non_blocking; /* reading packets from the thread should not block */
int joined; /* the thread has been joined */
int thread_queue_size; /* maximum number of queued packets */
#endif
} InputFile; } InputFile;
enum forced_keyframes_const { enum forced_keyframes_const {
@ -485,6 +545,41 @@ enum forced_keyframes_const {
#define ABORT_ON_FLAG_EMPTY_OUTPUT (1 << 0) #define ABORT_ON_FLAG_EMPTY_OUTPUT (1 << 0)
#define ABORT_ON_FLAG_EMPTY_OUTPUT_STREAM (1 << 1) #define ABORT_ON_FLAG_EMPTY_OUTPUT_STREAM (1 << 1)
enum EncStatsType {
ENC_STATS_LITERAL = 0,
ENC_STATS_FILE_IDX,
ENC_STATS_STREAM_IDX,
ENC_STATS_FRAME_NUM,
ENC_STATS_FRAME_NUM_IN,
ENC_STATS_TIMEBASE,
ENC_STATS_TIMEBASE_IN,
ENC_STATS_PTS,
ENC_STATS_PTS_TIME,
ENC_STATS_PTS_IN,
ENC_STATS_PTS_TIME_IN,
ENC_STATS_DTS,
ENC_STATS_DTS_TIME,
ENC_STATS_SAMPLE_NUM,
ENC_STATS_NB_SAMPLES,
ENC_STATS_PKT_SIZE,
ENC_STATS_BITRATE,
ENC_STATS_AVG_BITRATE,
};
typedef struct EncStatsComponent {
enum EncStatsType type;
uint8_t *str;
size_t str_len;
} EncStatsComponent;
typedef struct EncStats {
EncStatsComponent *components;
int nb_components;
AVIOContext *io;
} EncStats;
extern const char *const forced_keyframes_const_names[]; extern const char *const forced_keyframes_const_names[];
typedef enum { typedef enum {
@ -492,68 +587,92 @@ typedef enum {
MUXER_FINISHED = 2, MUXER_FINISHED = 2,
} OSTFinished ; } OSTFinished ;
enum {
KF_FORCE_SOURCE = 1,
KF_FORCE_SOURCE_NO_DROP = 2,
};
typedef struct KeyframeForceCtx {
int type;
int64_t ref_pts;
// timestamps of the forced keyframes, in AV_TIME_BASE_Q
int64_t *pts;
int nb_pts;
int index;
AVExpr *pexpr;
double expr_const_values[FKF_NB];
int dropped_keyframe;
} KeyframeForceCtx;
typedef struct OutputStream { typedef struct OutputStream {
const AVClass *clazz;
int file_index; /* file index */ int file_index; /* file index */
int index; /* stream index in the output file */ int index; /* stream index in the output file */
int source_index; /* InputStream index */
/* input stream that is the source for this output stream;
* may be NULL for streams with no well-defined source, e.g.
* attachments or outputs from complex filtergraphs */
InputStream *ist;
AVStream *st; /* stream in the output file */ AVStream *st; /* stream in the output file */
int encoding_needed; /* true if encoding needed for this stream */ /* number of frames emitted by the video-encoding sync code */
int64_t frame_number; int64_t vsync_frame_number;
/* input pts and corresponding output pts /* predicted pts of the next frame to be encoded
for A/V sync */ * audio/video encoding only */
struct InputStream *sync_ist; /* input stream to sync against */ int64_t next_pts;
int64_t sync_opts; /* output frame counter, could be changed to some true timestamp */ // FIXME look at frame_number /* dts of the last packet sent to the muxing queue, in AV_TIME_BASE_Q */
/* pts of the first frame encoded for this stream, used for limiting
* recording time */
int64_t first_pts;
/* dts of the last packet sent to the muxer */
int64_t last_mux_dts; int64_t last_mux_dts;
/* pts of the last frame received from the filters, in AV_TIME_BASE_Q */
int64_t last_filter_pts;
// timestamp from which the streamcopied streams should start,
// in AV_TIME_BASE_Q;
// everything before it should be discarded
int64_t ts_copy_start;
// the timebase of the packets sent to the muxer // the timebase of the packets sent to the muxer
AVRational mux_timebase; AVRational mux_timebase;
AVRational enc_timebase; AVRational enc_timebase;
AVBSFContext *bsf_ctx;
AVCodecContext *enc_ctx; AVCodecContext *enc_ctx;
AVCodecParameters *ref_par; /* associated input codec parameters with encoders options applied */
const AVCodec *enc;
int64_t max_frames;
AVFrame *filtered_frame; AVFrame *filtered_frame;
AVFrame *last_frame; AVFrame *last_frame;
AVFrame *sq_frame;
AVPacket *pkt; AVPacket *pkt;
int64_t last_dropped; int64_t last_dropped;
int64_t last_nb0_frames[3]; int64_t last_nb0_frames[3];
void *hwaccel_ctx;
/* video only */ /* video only */
AVRational frame_rate; AVRational frame_rate;
AVRational max_frame_rate; AVRational max_frame_rate;
enum VideoSyncMethod vsync_method; enum VideoSyncMethod vsync_method;
int is_cfr; int is_cfr;
const char *fps_mode;
int force_fps; int force_fps;
int top_field_first; int top_field_first;
#if FFMPEG_ROTATION_METADATA
int rotate_overridden; int rotate_overridden;
#endif
int autoscale; int autoscale;
int bitexact;
int bits_per_raw_sample; int bits_per_raw_sample;
#if FFMPEG_ROTATION_METADATA
double rotate_override_value; double rotate_override_value;
#endif
AVRational frame_aspect_ratio; AVRational frame_aspect_ratio;
/* forced key frames */ KeyframeForceCtx kf;
int64_t forced_kf_ref_pts;
int64_t *forced_kf_pts;
int forced_kf_count;
int forced_kf_index;
char *forced_keyframes;
AVExpr *forced_keyframes_pexpr;
double forced_keyframes_expr_const_values[FKF_NB];
int dropped_keyframe;
/* audio only */ /* audio only */
#if FFMPEG_OPT_MAP_CHANNEL
int *audio_channels_map; /* list of the channels id to pick from the source stream */ int *audio_channels_map; /* list of the channels id to pick from the source stream */
int audio_channels_mapped; /* number of channels in audio_channels_map */ int audio_channels_mapped; /* number of channels in audio_channels_map */
#endif
char *logfile_prefix; char *logfile_prefix;
FILE *logfile; FILE *logfile;
@ -569,7 +688,6 @@ typedef struct OutputStream {
char *apad; char *apad;
OSTFinished finished; /* no more packets should be written for this stream */ OSTFinished finished; /* no more packets should be written for this stream */
int unavailable; /* true if the steram is unavailable (possibly temporarily) */ int unavailable; /* true if the steram is unavailable (possibly temporarily) */
int stream_copy;
// init_output_stream() has been called for this stream // init_output_stream() has been called for this stream
// The encoder and the bitstream filters have been initialized and the stream // The encoder and the bitstream filters have been initialized and the stream
@ -582,15 +700,16 @@ typedef struct OutputStream {
int streamcopy_started; int streamcopy_started;
int copy_initial_nonkeyframes; int copy_initial_nonkeyframes;
int copy_prior_start; int copy_prior_start;
char *disposition;
int keep_pix_fmt; int keep_pix_fmt;
/* stats */ /* stats */
// combined size of all the packets written // combined size of all the packets sent to the muxer
uint64_t data_size; uint64_t data_size_mux;
// combined size of all the packets received from the encoder
uint64_t data_size_enc;
// number of packets send to the muxer // number of packets send to the muxer
uint64_t packets_written; atomic_uint_least64_t packets_written;
// number of frames/samples sent to the encoder // number of frames/samples sent to the encoder
uint64_t frames_encoded; uint64_t frames_encoded;
uint64_t samples_encoded; uint64_t samples_encoded;
@ -600,51 +719,48 @@ typedef struct OutputStream {
/* packet quality factor */ /* packet quality factor */
int quality; int quality;
int max_muxing_queue_size;
/* the packets are buffered here until the muxer is ready to be initialized */
AVFifo *muxing_queue;
/*
* The size of the AVPackets' buffers in queue.
* Updated when a packet is either pushed or pulled from the queue.
*/
size_t muxing_queue_data_size;
/* Threshold after which max_muxing_queue_size will be in effect */
size_t muxing_queue_data_threshold;
/* packet picture type */ /* packet picture type */
int pict_type; int pict_type;
/* frame encode sum of squared error values */ /* frame encode sum of squared error values */
int64_t error[4]; int64_t error[4];
int sq_idx_encode;
int sq_idx_mux;
EncStats enc_stats_pre;
EncStats enc_stats_post;
/*
* bool on whether this stream should be utilized for splitting
* subtitles utilizing fix_sub_duration at random access points.
*/
unsigned int fix_sub_duration_heartbeat;
} OutputStream; } OutputStream;
typedef struct OutputFile { typedef struct OutputFile {
const AVClass *clazz;
int index; int index;
const AVOutputFormat *format; const AVOutputFormat *format;
const char *url;
OutputStream **streams;
int nb_streams;
SyncQueue *sq_encode;
AVFormatContext *ctx;
AVDictionary *opts;
int ost_index; /* index of the first stream in output_streams */
int64_t recording_time; ///< desired length of the resulting file in microseconds == AV_TIME_BASE units int64_t recording_time; ///< desired length of the resulting file in microseconds == AV_TIME_BASE units
int64_t start_time; ///< start time in microseconds == AV_TIME_BASE units int64_t start_time; ///< start time in microseconds == AV_TIME_BASE units
uint64_t limit_filesize; /* filesize limit expressed in bytes */
int shortest; int shortest;
int bitexact;
int header_written;
} OutputFile; } OutputFile;
extern __thread InputStream **input_streams;
extern __thread int nb_input_streams;
extern __thread InputFile **input_files; extern __thread InputFile **input_files;
extern __thread int nb_input_files; extern __thread int nb_input_files;
extern __thread OutputStream **output_streams;
extern __thread int nb_output_streams;
extern __thread OutputFile **output_files; extern __thread OutputFile **output_files;
extern __thread int nb_output_files; extern __thread int nb_output_files;
@ -658,13 +774,10 @@ extern __thread float audio_drift_threshold;
extern __thread float dts_delta_threshold; extern __thread float dts_delta_threshold;
extern __thread float dts_error_threshold; extern __thread float dts_error_threshold;
extern __thread int audio_volume;
extern __thread int audio_sync_method;
extern __thread enum VideoSyncMethod video_sync_method; extern __thread enum VideoSyncMethod video_sync_method;
extern __thread float frame_drop_threshold; extern __thread float frame_drop_threshold;
extern __thread int do_benchmark; extern __thread int do_benchmark;
extern __thread int do_benchmark_all; extern __thread int do_benchmark_all;
extern __thread int do_deinterlace;
extern __thread int do_hex_dump; extern __thread int do_hex_dump;
extern __thread int do_pkt_dump; extern __thread int do_pkt_dump;
extern __thread int copy_ts; extern __thread int copy_ts;
@ -677,7 +790,6 @@ extern __thread int print_stats;
extern __thread int64_t stats_period; extern __thread int64_t stats_period;
extern __thread int qp_hist; extern __thread int qp_hist;
extern __thread int stdin_interaction; extern __thread int stdin_interaction;
extern __thread int frame_bits_per_raw_sample;
extern __thread AVIOContext *progress_avio; extern __thread AVIOContext *progress_avio;
extern __thread float max_error_rate; extern __thread float max_error_rate;
@ -688,15 +800,20 @@ extern __thread int auto_conversion_filters;
extern __thread const AVIOInterruptCB int_cb; extern __thread const AVIOInterruptCB int_cb;
#if CONFIG_QSV
extern __thread char *qsv_device;
#endif
extern __thread HWDevice *filter_hw_device; extern __thread HWDevice *filter_hw_device;
extern __thread int want_sdp;
extern __thread unsigned nb_output_dumped; extern __thread unsigned nb_output_dumped;
extern __thread int main_ffmpeg_return_code; extern __thread int main_ffmpeg_return_code;
extern __thread int ignore_unknown_streams;
extern __thread int copy_unknown_streams;
extern __thread int recast_media;
#if FFMPEG_OPT_PSNR
extern __thread int do_psnr;
#endif
void term_init(void); void term_init(void);
void term_exit(void); void term_exit(void);
@ -705,7 +822,12 @@ void show_usage(void);
void remove_avoptions(AVDictionary **a, AVDictionary *b); void remove_avoptions(AVDictionary **a, AVDictionary *b);
void assert_avoptions(AVDictionary *m); void assert_avoptions(AVDictionary *m);
int guess_input_channel_layout(InputStream *ist); void assert_file_overwrite(const char *filename);
char *file_read(const char *filename);
AVDictionary *strip_specifiers(const AVDictionary *dict);
const AVCodec *find_codec_or_die(void *logctx, const char *name,
enum AVMediaType type, int encoder);
int parse_and_set_vsync(const char *arg, int *vsync_var, int file_idx, int st_idx, int is_global);
int configure_filtergraph(FilterGraph *fg); int configure_filtergraph(FilterGraph *fg);
void check_filter_outputs(void); void check_filter_outputs(void);
@ -719,8 +841,9 @@ int ifilter_parameters_from_frame(InputFilter *ifilter, const AVFrame *frame);
int ffmpeg_parse_options(int argc, char **argv); int ffmpeg_parse_options(int argc, char **argv);
int videotoolbox_init(AVCodecContext *s); void enc_stats_write(OutputStream *ost, EncStats *es,
int qsv_init(AVCodecContext *s); const AVFrame *frame, const AVPacket *pkt,
uint64_t frame_num);
HWDevice *hw_device_get_by_name(const char *name); HWDevice *hw_device_get_by_name(const char *name);
int hw_device_init_from_string(const char *arg, HWDevice **dev); int hw_device_init_from_string(const char *arg, HWDevice **dev);
@ -732,15 +855,58 @@ int hw_device_setup_for_filter(FilterGraph *fg);
int hwaccel_decode_init(AVCodecContext *avctx); int hwaccel_decode_init(AVCodecContext *avctx);
/* open the muxer when all the streams are initialized */ /*
int of_check_init(OutputFile *of); * Initialize muxing state for the given stream, should be called
* after the codec/streamcopy setup has been done.
*
* Open the muxer once all the streams have been initialized.
*/
int of_stream_init(OutputFile *of, OutputStream *ost);
int of_write_trailer(OutputFile *of); int of_write_trailer(OutputFile *of);
int of_open(const OptionsContext *o, const char *filename);
void of_close(OutputFile **pof); void of_close(OutputFile **pof);
void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, void of_enc_stats_close(void);
int unqueue);
/*
* Send a single packet to the output, applying any bitstream filters
* associated with the output stream. This may result in any number
* of packets actually being written, depending on what bitstream
* filters are applied. The supplied packet is consumed and will be
* blank (as if newly-allocated) when this function returns.
*
* If eof is set, instead indicate EOF to all bitstream filters and
* therefore flush any delayed packets to the output. A blank packet
* must be supplied in this case.
*/
void of_output_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, int eof);
int64_t of_filesize(OutputFile *of);
int ifile_open(const OptionsContext *o, const char *filename);
void ifile_close(InputFile **f);
/**
* Get next input packet from the demuxer.
*
* @param pkt the packet is written here when this function returns 0
* @return
* - 0 when a packet has been read successfully
* - 1 when stream end was reached, but the stream is looped;
* caller should flush decoders and read from this demuxer again
* - a negative error code on failure
*/
int ifile_get_packet(InputFile *f, AVPacket **pkt);
/* iterate over all input streams in all input files;
* pass NULL to start iteration */
InputStream *ist_iter(InputStream *prev);
extern const char * const opt_name_codec_names[];
extern const char * const opt_name_codec_tags[];
extern const char * const opt_name_frame_rates[];
extern const char * const opt_name_top_field_first[];
void set_report_callback(void (*callback)(int, float, float, int64_t, int, double, double)); void set_report_callback(void (*callback)(int, float, float, int64_t, double, double, double));
void cancel_operation(long id); void cancel_operation(long id);
#endif /* FFTOOLS_FFMPEG_H */ #endif /* FFTOOLS_FFMPEG_H */

@ -1,6 +1,7 @@
/* /*
* ffmpeg filter configuration * ffmpeg filter configuration
* Copyright (c) 2018 Taner Sener * Copyright (c) 2018 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -24,6 +25,12 @@
* We manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * We manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 08.2018 * 08.2018
@ -69,8 +76,9 @@ static const enum AVPixelFormat *get_compliance_normal_pix_fmts(const AVCodec *c
} }
} }
enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx, enum AVPixelFormat
const AVCodec *codec, enum AVPixelFormat target) choose_pixel_fmt(const AVCodec *codec, enum AVPixelFormat target,
int strict_std_compliance)
{ {
if (codec && codec->pix_fmts) { if (codec && codec->pix_fmts) {
const enum AVPixelFormat *p = codec->pix_fmts; const enum AVPixelFormat *p = codec->pix_fmts;
@ -79,7 +87,7 @@ enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx,
int has_alpha = desc ? desc->nb_components % 2 == 0 : 0; int has_alpha = desc ? desc->nb_components % 2 == 0 : 0;
enum AVPixelFormat best= AV_PIX_FMT_NONE; enum AVPixelFormat best= AV_PIX_FMT_NONE;
if (enc_ctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) { if (strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
p = get_compliance_normal_pix_fmts(codec, p); p = get_compliance_normal_pix_fmts(codec, p);
} }
for (; *p != AV_PIX_FMT_NONE; p++) { for (; *p != AV_PIX_FMT_NONE; p++) {
@ -106,6 +114,7 @@ enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx,
static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint) static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint)
{ {
OutputStream *ost = ofilter->ost; OutputStream *ost = ofilter->ost;
AVCodecContext *enc = ost->enc_ctx;
const AVDictionaryEntry *strict_dict = av_dict_get(ost->encoder_opts, "strict", NULL, 0); const AVDictionaryEntry *strict_dict = av_dict_get(ost->encoder_opts, "strict", NULL, 0);
if (strict_dict) if (strict_dict)
// used by choose_pixel_fmt() and below // used by choose_pixel_fmt() and below
@ -119,13 +128,14 @@ static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint)
return av_get_pix_fmt_name(ost->enc_ctx->pix_fmt); return av_get_pix_fmt_name(ost->enc_ctx->pix_fmt);
} }
if (ost->enc_ctx->pix_fmt != AV_PIX_FMT_NONE) { if (ost->enc_ctx->pix_fmt != AV_PIX_FMT_NONE) {
return av_get_pix_fmt_name(choose_pixel_fmt(ost->st, ost->enc_ctx, ost->enc, ost->enc_ctx->pix_fmt)); return av_get_pix_fmt_name(choose_pixel_fmt(enc->codec, enc->pix_fmt,
} else if (ost->enc && ost->enc->pix_fmts) { ost->enc_ctx->strict_std_compliance));
} else if (enc->codec->pix_fmts) {
const enum AVPixelFormat *p; const enum AVPixelFormat *p;
p = ost->enc->pix_fmts; p = enc->codec->pix_fmts;
if (ost->enc_ctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) { if (ost->enc_ctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
p = get_compliance_normal_pix_fmts(ost->enc, p); p = get_compliance_normal_pix_fmts(enc->codec, p);
} }
for (; *p != AV_PIX_FMT_NONE; p++) { for (; *p != AV_PIX_FMT_NONE; p++) {
@ -133,7 +143,7 @@ static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint)
av_bprintf(bprint, "%s%c", name, p[1] == AV_PIX_FMT_NONE ? '\0' : '|'); av_bprintf(bprint, "%s%c", name, p[1] == AV_PIX_FMT_NONE ? '\0' : '|');
} }
if (!av_bprint_is_complete(bprint)) if (!av_bprint_is_complete(bprint))
exit_program(1); report_and_exit(AVERROR(ENOMEM));
return bprint->str; return bprint->str;
} else } else
return NULL; return NULL;
@ -197,7 +207,7 @@ int init_simple_filtergraph(InputStream *ist, OutputStream *ost)
InputFilter *ifilter; InputFilter *ifilter;
if (!fg) if (!fg)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
fg->index = nb_filtergraphs; fg->index = nb_filtergraphs;
ofilter = ALLOC_ARRAY_ELEM(fg->outputs, fg->nb_outputs); ofilter = ALLOC_ARRAY_ELEM(fg->outputs, fg->nb_outputs);
@ -214,7 +224,7 @@ int init_simple_filtergraph(InputStream *ist, OutputStream *ost)
ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW); ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW);
if (!ifilter->frame_queue) if (!ifilter->frame_queue)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
GROW_ARRAY(ist->filters, ist->nb_filters); GROW_ARRAY(ist->filters, ist->nb_filters);
ist->filters[ist->nb_filters - 1] = ifilter; ist->filters[ist->nb_filters - 1] = ifilter;
@ -238,7 +248,7 @@ static char *describe_filter_link(FilterGraph *fg, AVFilterInOut *inout, int in)
res = av_asprintf("%s:%s", ctx->filter->name, res = av_asprintf("%s:%s", ctx->filter->name,
avfilter_pad_get_name(pads, inout->pad_idx)); avfilter_pad_get_name(pads, inout->pad_idx));
if (!res) if (!res)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
return res; return res;
} }
@ -285,7 +295,7 @@ static void init_input_filter(FilterGraph *fg, AVFilterInOut *in)
"matches no streams.\n", p, fg->graph_desc); "matches no streams.\n", p, fg->graph_desc);
exit_program(1); exit_program(1);
} }
ist = input_streams[input_files[file_idx]->ist_index + st->index]; ist = input_files[file_idx]->streams[st->index];
if (ist->user_set_discard == AVDISCARD_ALL) { if (ist->user_set_discard == AVDISCARD_ALL) {
av_log(NULL, AV_LOG_FATAL, "Stream specifier '%s' in filtergraph description %s " av_log(NULL, AV_LOG_FATAL, "Stream specifier '%s' in filtergraph description %s "
"matches a disabled input stream.\n", p, fg->graph_desc); "matches a disabled input stream.\n", p, fg->graph_desc);
@ -293,14 +303,13 @@ static void init_input_filter(FilterGraph *fg, AVFilterInOut *in)
} }
} else { } else {
/* find the first unused stream of corresponding type */ /* find the first unused stream of corresponding type */
for (i = 0; i < nb_input_streams; i++) { for (ist = ist_iter(NULL); ist; ist = ist_iter(ist)) {
ist = input_streams[i];
if (ist->user_set_discard == AVDISCARD_ALL) if (ist->user_set_discard == AVDISCARD_ALL)
continue; continue;
if (ist->dec_ctx->codec_type == type && ist->discard) if (ist->dec_ctx->codec_type == type && ist->discard)
break; break;
} }
if (i == nb_input_streams) { if (!ist) {
av_log(NULL, AV_LOG_FATAL, "Cannot find a matching stream for " av_log(NULL, AV_LOG_FATAL, "Cannot find a matching stream for "
"unlabeled input pad %d on filter %s\n", in->pad_idx, "unlabeled input pad %d on filter %s\n", in->pad_idx,
in->filter_ctx->name); in->filter_ctx->name);
@ -323,12 +332,162 @@ static void init_input_filter(FilterGraph *fg, AVFilterInOut *in)
ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW); ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW);
if (!ifilter->frame_queue) if (!ifilter->frame_queue)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
GROW_ARRAY(ist->filters, ist->nb_filters); GROW_ARRAY(ist->filters, ist->nb_filters);
ist->filters[ist->nb_filters - 1] = ifilter; ist->filters[ist->nb_filters - 1] = ifilter;
} }
static int read_binary(const char *path, uint8_t **data, int *len)
{
AVIOContext *io = NULL;
int64_t fsize;
int ret;
*data = NULL;
*len = 0;
ret = avio_open2(&io, path, AVIO_FLAG_READ, &int_cb, NULL);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Cannot open file '%s': %s\n",
path, av_err2str(ret));
return ret;
}
fsize = avio_size(io);
if (fsize < 0 || fsize > INT_MAX) {
av_log(NULL, AV_LOG_ERROR, "Cannot obtain size of file %s\n", path);
ret = AVERROR(EIO);
goto fail;
}
*data = av_malloc(fsize);
if (!*data) {
ret = AVERROR(ENOMEM);
goto fail;
}
ret = avio_read(io, *data, fsize);
if (ret != fsize) {
av_log(NULL, AV_LOG_ERROR, "Error reading file %s\n", path);
ret = ret < 0 ? ret : AVERROR(EIO);
goto fail;
}
*len = fsize;
return 0;
fail:
avio_close(io);
av_freep(data);
*len = 0;
return ret;
}
static int filter_opt_apply(AVFilterContext *f, const char *key, const char *val)
{
const AVOption *o = NULL;
int ret;
ret = av_opt_set(f, key, val, AV_OPT_SEARCH_CHILDREN);
if (ret >= 0)
return 0;
if (ret == AVERROR_OPTION_NOT_FOUND && key[0] == '/')
o = av_opt_find(f, key + 1, NULL, 0, AV_OPT_SEARCH_CHILDREN);
if (!o)
goto err_apply;
// key is a valid option name prefixed with '/'
// interpret value as a path from which to load the actual option value
key++;
if (o->type == AV_OPT_TYPE_BINARY) {
uint8_t *data;
int len;
ret = read_binary(val, &data, &len);
if (ret < 0)
goto err_load;
ret = av_opt_set_bin(f, key, data, len, AV_OPT_SEARCH_CHILDREN);
av_freep(&data);
} else {
char *data = file_read(val);
if (!data) {
ret = AVERROR(EIO);
goto err_load;
}
ret = av_opt_set(f, key, data, AV_OPT_SEARCH_CHILDREN);
av_freep(&data);
}
if (ret < 0)
goto err_apply;
return 0;
err_apply:
av_log(NULL, AV_LOG_ERROR,
"Error applying option '%s' to filter '%s': %s\n",
key, f->filter->name, av_err2str(ret));
return ret;
err_load:
av_log(NULL, AV_LOG_ERROR,
"Error loading value for option '%s' from file '%s'\n",
key, val);
return ret;
}
static int graph_opts_apply(AVFilterGraphSegment *seg)
{
for (size_t i = 0; i < seg->nb_chains; i++) {
AVFilterChain *ch = seg->chains[i];
for (size_t j = 0; j < ch->nb_filters; j++) {
AVFilterParams *p = ch->filters[j];
const AVDictionaryEntry *e = NULL;
av_assert0(p->filter);
while ((e = av_dict_iterate(p->opts, e))) {
int ret = filter_opt_apply(p->filter, e->key, e->value);
if (ret < 0)
return ret;
}
av_dict_free(&p->opts);
}
}
return 0;
}
static int graph_parse(AVFilterGraph *graph, const char *desc,
AVFilterInOut **inputs, AVFilterInOut **outputs)
{
AVFilterGraphSegment *seg;
int ret;
ret = avfilter_graph_segment_parse(graph, desc, 0, &seg);
if (ret < 0)
return ret;
ret = avfilter_graph_segment_create_filters(seg, 0);
if (ret < 0)
goto fail;
ret = graph_opts_apply(seg);
if (ret < 0)
goto fail;
ret = avfilter_graph_segment_apply(seg, 0, inputs, outputs);
fail:
avfilter_graph_segment_free(&seg);
return ret;
}
int init_complex_filtergraph(FilterGraph *fg) int init_complex_filtergraph(FilterGraph *fg)
{ {
AVFilterInOut *inputs, *outputs, *cur; AVFilterInOut *inputs, *outputs, *cur;
@ -342,7 +501,7 @@ int init_complex_filtergraph(FilterGraph *fg)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
graph->nb_threads = 1; graph->nb_threads = 1;
ret = avfilter_graph_parse2(graph, fg->graph_desc, &inputs, &outputs); ret = graph_parse(graph, fg->graph_desc, &inputs, &outputs);
if (ret < 0) if (ret < 0)
goto fail; goto fail;
@ -467,8 +626,7 @@ static int configure_output_video_filter(FilterGraph *fg, OutputFilter *ofilter,
snprintf(args, sizeof(args), "%d:%d", snprintf(args, sizeof(args), "%d:%d",
ofilter->width, ofilter->height); ofilter->width, ofilter->height);
while ((e = av_dict_get(ost->sws_dict, "", e, while ((e = av_dict_iterate(ost->sws_dict, e))) {
AV_DICT_IGNORE_SUFFIX))) {
av_strlcatf(args, sizeof(args), ":%s=%s", e->key, e->value); av_strlcatf(args, sizeof(args), ":%s=%s", e->key, e->value);
} }
@ -575,6 +733,7 @@ static int configure_output_audio_filter(FilterGraph *fg, OutputFilter *ofilter,
pad_idx = 0; \ pad_idx = 0; \
} while (0) } while (0)
av_bprint_init(&args, 0, AV_BPRINT_SIZE_UNLIMITED); av_bprint_init(&args, 0, AV_BPRINT_SIZE_UNLIMITED);
#if FFMPEG_OPT_MAP_CHANNEL
if (ost->audio_channels_mapped) { if (ost->audio_channels_mapped) {
AVChannelLayout mapped_layout = { 0 }; AVChannelLayout mapped_layout = { 0 };
int i; int i;
@ -587,6 +746,7 @@ static int configure_output_audio_filter(FilterGraph *fg, OutputFilter *ofilter,
AUTO_INSERT_FILTER("-map_channel", "pan", args.str); AUTO_INSERT_FILTER("-map_channel", "pan", args.str);
av_bprint_clear(&args); av_bprint_clear(&args);
} }
#endif
if (codec->ch_layout.order == AV_CHANNEL_ORDER_UNSPEC) if (codec->ch_layout.order == AV_CHANNEL_ORDER_UNSPEC)
av_channel_layout_default(&codec->ch_layout, codec->ch_layout.nb_channels); av_channel_layout_default(&codec->ch_layout, codec->ch_layout.nb_channels);
@ -620,11 +780,11 @@ static int configure_output_audio_filter(FilterGraph *fg, OutputFilter *ofilter,
if (ost->apad && of->shortest) { if (ost->apad && of->shortest) {
int i; int i;
for (i=0; i<of->ctx->nb_streams; i++) for (i = 0; i < of->nb_streams; i++)
if (of->ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) if (of->streams[i]->st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
break; break;
if (i<of->ctx->nb_streams) { if (i < of->nb_streams) {
AUTO_INSERT_FILTER("-apad", "apad", ost->apad); AUTO_INSERT_FILTER("-apad", "apad", ost->apad);
} }
} }
@ -751,7 +911,7 @@ static int configure_input_video_filter(FilterGraph *fg, InputFilter *ifilter,
} }
if (!fr.num) if (!fr.num)
fr = av_guess_frame_rate(input_files[ist->file_index]->ctx, ist->st, NULL); fr = ist->framerate_guessed;
if (ist->dec_ctx->codec_type == AVMEDIA_TYPE_SUBTITLE) { if (ist->dec_ctx->codec_type == AVMEDIA_TYPE_SUBTITLE) {
ret = sub2video_prepare(ist, ifilter); ret = sub2video_prepare(ist, ifilter);
@ -904,40 +1064,6 @@ static int configure_input_audio_filter(FilterGraph *fg, InputFilter *ifilter,
last_filter = filt_ctx; \ last_filter = filt_ctx; \
} while (0) } while (0)
if (audio_sync_method > 0) {
char args[256] = {0};
av_strlcatf(args, sizeof(args), "async=%d", audio_sync_method);
if (audio_drift_threshold != 0.1)
av_strlcatf(args, sizeof(args), ":min_hard_comp=%f", audio_drift_threshold);
if (!fg->reconfiguration)
av_strlcatf(args, sizeof(args), ":first_pts=0");
AUTO_INSERT_FILTER_INPUT("-async", "aresample", args);
}
// if (ost->audio_channels_mapped) {
// int i;
// AVBPrint pan_buf;
// av_bprint_init(&pan_buf, 256, 8192);
// av_bprintf(&pan_buf, "0x%"PRIx64,
// av_get_default_channel_layout(ost->audio_channels_mapped));
// for (i = 0; i < ost->audio_channels_mapped; i++)
// if (ost->audio_channels_map[i] != -1)
// av_bprintf(&pan_buf, ":c%d=c%d", i, ost->audio_channels_map[i]);
// AUTO_INSERT_FILTER_INPUT("-map_channel", "pan", pan_buf.str);
// av_bprint_finalize(&pan_buf, NULL);
// }
if (audio_volume != 256) {
char args[256];
av_log(NULL, AV_LOG_WARNING, "-vol has been deprecated. Use the volume "
"audio filter instead.\n");
snprintf(args, sizeof(args), "%f", audio_volume / 256.);
AUTO_INSERT_FILTER_INPUT("-vol", "volume", args);
}
snprintf(name, sizeof(name), "trim for input stream %d:%d", snprintf(name, sizeof(name), "trim for input stream %d:%d",
ist->file_index, ist->st->index); ist->file_index, ist->st->index);
if (copy_ts) { if (copy_ts) {
@ -1020,44 +1146,39 @@ int configure_filtergraph(FilterGraph *fg)
if (simple) { if (simple) {
OutputStream *ost = fg->outputs[0]->ost; OutputStream *ost = fg->outputs[0]->ost;
char args[512];
const AVDictionaryEntry *e = NULL;
if (filter_nbthreads) { if (filter_nbthreads) {
ret = av_opt_set(fg->graph, "threads", filter_nbthreads, 0); ret = av_opt_set(fg->graph, "threads", filter_nbthreads, 0);
if (ret < 0) if (ret < 0)
goto fail; goto fail;
} else { } else {
const AVDictionaryEntry *e = NULL;
e = av_dict_get(ost->encoder_opts, "threads", NULL, 0); e = av_dict_get(ost->encoder_opts, "threads", NULL, 0);
if (e) if (e)
av_opt_set(fg->graph, "threads", e->value, 0); av_opt_set(fg->graph, "threads", e->value, 0);
} }
args[0] = 0; if (av_dict_count(ost->sws_dict)) {
e = NULL; ret = av_dict_get_string(ost->sws_dict,
while ((e = av_dict_get(ost->sws_dict, "", e, &fg->graph->scale_sws_opts,
AV_DICT_IGNORE_SUFFIX))) { '=', ':');
av_strlcatf(args, sizeof(args), "%s=%s:", e->key, e->value); if (ret < 0)
} goto fail;
if (strlen(args)) {
args[strlen(args)-1] = 0;
fg->graph->scale_sws_opts = av_strdup(args);
} }
args[0] = 0; if (av_dict_count(ost->swr_opts)) {
e = NULL; char *args;
while ((e = av_dict_get(ost->swr_opts, "", e, ret = av_dict_get_string(ost->swr_opts, &args, '=', ':');
AV_DICT_IGNORE_SUFFIX))) { if (ret < 0)
av_strlcatf(args, sizeof(args), "%s=%s:", e->key, e->value); goto fail;
}
if (strlen(args))
args[strlen(args)-1] = 0;
av_opt_set(fg->graph, "aresample_swr_opts", args, 0); av_opt_set(fg->graph, "aresample_swr_opts", args, 0);
av_free(args);
}
} else { } else {
fg->graph->nb_threads = filter_complex_nbthreads; fg->graph->nb_threads = filter_complex_nbthreads;
} }
if ((ret = avfilter_graph_parse2(fg->graph, graph_desc, &inputs, &outputs)) < 0) if ((ret = graph_parse(fg->graph, graph_desc, &inputs, &outputs)) < 0)
goto fail; goto fail;
ret = hw_device_setup_for_filter(fg); ret = hw_device_setup_for_filter(fg);
@ -1131,16 +1252,8 @@ int configure_filtergraph(FilterGraph *fg)
for (i = 0; i < fg->nb_outputs; i++) { for (i = 0; i < fg->nb_outputs; i++) {
OutputStream *ost = fg->outputs[i]->ost; OutputStream *ost = fg->outputs[i]->ost;
if (!ost->enc) { if (ost->enc_ctx->codec_type == AVMEDIA_TYPE_AUDIO &&
/* identical to the same check in ffmpeg.c, needed because !(ost->enc_ctx->codec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE))
complex filter graphs are initialized earlier */
av_log(NULL, AV_LOG_ERROR, "Encoder (codec %s) not found for output stream #%d:%d\n",
avcodec_get_name(ost->st->codecpar->codec_id), ost->file_index, ost->index);
ret = AVERROR(EINVAL);
goto fail;
}
if (ost->enc->type == AVMEDIA_TYPE_AUDIO &&
!(ost->enc->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE))
av_buffersink_set_frame_size(ost->filter->filter, av_buffersink_set_frame_size(ost->filter->filter,
ost->enc_ctx->frame_size); ost->enc_ctx->frame_size);
} }

@ -1,5 +1,6 @@
/* /*
* Copyright (c) 2018 Taner Sener * Copyright (c) 2018-2019 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -23,6 +24,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 12.2019 * 12.2019
@ -357,7 +364,7 @@ int hw_device_setup_for_decode(InputStream *ist)
if (ist->hwaccel_id == HWACCEL_AUTO) { if (ist->hwaccel_id == HWACCEL_AUTO) {
ist->hwaccel_device_type = dev->type; ist->hwaccel_device_type = dev->type;
} else if (ist->hwaccel_device_type != dev->type) { } else if (ist->hwaccel_device_type != dev->type) {
av_log(ist->dec_ctx, AV_LOG_ERROR, "Invalid hwaccel device " av_log(NULL, AV_LOG_ERROR, "Invalid hwaccel device "
"specified for decoder: device %s of type %s is not " "specified for decoder: device %s of type %s is not "
"usable with hwaccel %s.\n", dev->name, "usable with hwaccel %s.\n", dev->name,
av_hwdevice_get_type_name(dev->type), av_hwdevice_get_type_name(dev->type),
@ -408,7 +415,7 @@ int hw_device_setup_for_decode(InputStream *ist)
type = config->device_type; type = config->device_type;
dev = hw_device_get_by_type(type); dev = hw_device_get_by_type(type);
if (dev) { if (dev) {
av_log(ist->dec_ctx, AV_LOG_INFO, "Using auto " av_log(NULL, AV_LOG_INFO, "Using auto "
"hwaccel type %s with existing device %s.\n", "hwaccel type %s with existing device %s.\n",
av_hwdevice_get_type_name(type), dev->name); av_hwdevice_get_type_name(type), dev->name);
} }
@ -426,12 +433,12 @@ int hw_device_setup_for_decode(InputStream *ist)
continue; continue;
} }
if (ist->hwaccel_device) { if (ist->hwaccel_device) {
av_log(ist->dec_ctx, AV_LOG_INFO, "Using auto " av_log(NULL, AV_LOG_INFO, "Using auto "
"hwaccel type %s with new device created " "hwaccel type %s with new device created "
"from %s.\n", av_hwdevice_get_type_name(type), "from %s.\n", av_hwdevice_get_type_name(type),
ist->hwaccel_device); ist->hwaccel_device);
} else { } else {
av_log(ist->dec_ctx, AV_LOG_INFO, "Using auto " av_log(NULL, AV_LOG_INFO, "Using auto "
"hwaccel type %s with new default device.\n", "hwaccel type %s with new default device.\n",
av_hwdevice_get_type_name(type)); av_hwdevice_get_type_name(type));
} }
@ -439,7 +446,7 @@ int hw_device_setup_for_decode(InputStream *ist)
if (dev) { if (dev) {
ist->hwaccel_device_type = type; ist->hwaccel_device_type = type;
} else { } else {
av_log(ist->dec_ctx, AV_LOG_INFO, "Auto hwaccel " av_log(NULL, AV_LOG_INFO, "Auto hwaccel "
"disabled: no device found.\n"); "disabled: no device found.\n");
ist->hwaccel_id = HWACCEL_NONE; ist->hwaccel_id = HWACCEL_NONE;
return 0; return 0;
@ -447,7 +454,7 @@ int hw_device_setup_for_decode(InputStream *ist)
} }
if (!dev) { if (!dev) {
av_log(ist->dec_ctx, AV_LOG_ERROR, "No device available " av_log(NULL, AV_LOG_ERROR, "No device available "
"for decoder: device type %s needed for codec %s.\n", "for decoder: device type %s needed for codec %s.\n",
av_hwdevice_get_type_name(type), ist->dec->name); av_hwdevice_get_type_name(type), ist->dec->name);
return err; return err;
@ -479,7 +486,7 @@ int hw_device_setup_for_encode(OutputStream *ost)
} }
for (i = 0;; i++) { for (i = 0;; i++) {
config = avcodec_get_hw_config(ost->enc, i); config = avcodec_get_hw_config(ost->enc_ctx->codec, i);
if (!config) if (!config)
break; break;
@ -490,7 +497,7 @@ int hw_device_setup_for_encode(OutputStream *ost)
av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using input " av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using input "
"frames context (format %s) with %s encoder.\n", "frames context (format %s) with %s encoder.\n",
av_get_pix_fmt_name(ost->enc_ctx->pix_fmt), av_get_pix_fmt_name(ost->enc_ctx->pix_fmt),
ost->enc->name); ost->enc_ctx->codec->name);
ost->enc_ctx->hw_frames_ctx = av_buffer_ref(frames_ref); ost->enc_ctx->hw_frames_ctx = av_buffer_ref(frames_ref);
if (!ost->enc_ctx->hw_frames_ctx) if (!ost->enc_ctx->hw_frames_ctx)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
@ -505,7 +512,7 @@ int hw_device_setup_for_encode(OutputStream *ost)
if (dev) { if (dev) {
av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using device %s " av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using device %s "
"(type %s) with %s encoder.\n", dev->name, "(type %s) with %s encoder.\n", dev->name,
av_hwdevice_get_type_name(dev->type), ost->enc->name); av_hwdevice_get_type_name(dev->type), ost->enc_ctx->codec->name);
ost->enc_ctx->hw_device_ctx = av_buffer_ref(dev->device_ref); ost->enc_ctx->hw_device_ctx = av_buffer_ref(dev->device_ref);
if (!ost->enc_ctx->hw_device_ctx) if (!ost->enc_ctx->hw_device_ctx)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);

@ -1,6 +1,7 @@
/* /*
* This file is part of FFmpeg. * This file is part of FFmpeg.
* Copyright (c) 2022 Taner Sener * Copyright (c) 2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* FFmpeg is free software; you can redistribute it and/or * FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public * modify it under the terms of the GNU Lesser General Public
@ -22,108 +23,101 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop the ffmpeg-kit library. * by us to develop the ffmpeg-kit library.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
* - want_sdp marked as thread-local
* - ms_from_ost migrated from ffmpeg_mux.c and marked as non-static
*
* ffmpeg-kit changes by Taner Sener * ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
* -------------------------------------------------------- * --------------------------------------------------------
* - fftools_ prefix added to fftools headers * - fftools_ prefix added to fftools headers
* - using main_ffmpeg_return_code instead of main_return_code * - using main_ffmpeg_return_code instead of main_return_code
* - printf replaced with av_log statements
*/ */
#include <stdatomic.h>
#include <stdio.h> #include <stdio.h>
#include <string.h> #include <string.h>
#include "fftools_ffmpeg.h" #include "fftools_ffmpeg.h"
#include "fftools_ffmpeg_mux.h"
#include "fftools_objpool.h"
#include "fftools_sync_queue.h"
#include "fftools_thread_queue.h"
#include "libavutil/fifo.h" #include "libavutil/fifo.h"
#include "libavutil/intreadwrite.h" #include "libavutil/intreadwrite.h"
#include "libavutil/log.h" #include "libavutil/log.h"
#include "libavutil/mem.h" #include "libavutil/mem.h"
#include "libavutil/timestamp.h" #include "libavutil/timestamp.h"
#include "libavutil/thread.h"
#include "libavcodec/packet.h" #include "libavcodec/packet.h"
#include "libavformat/avformat.h" #include "libavformat/avformat.h"
#include "libavformat/avio.h" #include "libavformat/avio.h"
static void close_all_output_streams(OutputStream *ost, OSTFinished this_stream, OSTFinished others) __thread int want_sdp = 1;
MuxStream *ms_from_ost(OutputStream *ost)
{ {
int i; return (MuxStream*)ost;
for (i = 0; i < nb_output_streams; i++) {
OutputStream *ost2 = output_streams[i];
ost2->finished |= ost == ost2 ? this_stream : others;
}
} }
void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, static Muxer *mux_from_of(OutputFile *of)
int unqueue)
{ {
AVFormatContext *s = of->ctx; return (Muxer*)of;
AVStream *st = ost->st; }
int ret;
/* static int64_t filesize(AVIOContext *pb)
* Audio encoders may split the packets -- #frames in != #packets out. {
* But there is no reordering, so we can limit the number of output packets int64_t ret = -1;
* by simply dropping them here.
* Counting encoded video frames needs to be done separately because of if (pb) {
* reordering, see do_video_out(). ret = avio_size(pb);
* Do not count the packet when unqueued because it has been counted when queued. if (ret <= 0) // FIXME improve avio_size() so it works with non seekable output too
*/ ret = avio_tell(pb);
if (!(st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && ost->encoding_needed) && !unqueue) {
if (ost->frame_number >= ost->max_frames) {
av_packet_unref(pkt);
return;
}
ost->frame_number++;
} }
if (!of->header_written) { return ret;
AVPacket *tmp_pkt; }
/* the muxer is not initialized yet, buffer the packet */
if (!av_fifo_can_write(ost->muxing_queue)) {
size_t cur_size = av_fifo_can_read(ost->muxing_queue);
unsigned int are_we_over_size =
(ost->muxing_queue_data_size + pkt->size) > ost->muxing_queue_data_threshold;
size_t limit = are_we_over_size ? ost->max_muxing_queue_size : SIZE_MAX;
size_t new_size = FFMIN(2 * cur_size, limit);
if (new_size <= cur_size) { static int write_packet(Muxer *mux, OutputStream *ost, AVPacket *pkt)
av_log(NULL, AV_LOG_ERROR, {
"Too many packets buffered for output stream %d:%d.\n", MuxStream *ms = ms_from_ost(ost);
ost->file_index, ost->st->index); AVFormatContext *s = mux->fc;
exit_program(1); AVStream *st = ost->st;
} int64_t fs;
ret = av_fifo_grow2(ost->muxing_queue, new_size - cur_size); uint64_t frame_num;
if (ret < 0) int ret;
exit_program(1);
} fs = filesize(s->pb);
ret = av_packet_make_refcounted(pkt); atomic_store(&mux->last_filesize, fs);
if (ret < 0) if (fs >= mux->limit_filesize) {
exit_program(1); ret = AVERROR_EOF;
tmp_pkt = av_packet_alloc(); goto fail;
if (!tmp_pkt)
exit_program(1);
av_packet_move_ref(tmp_pkt, pkt);
ost->muxing_queue_data_size += tmp_pkt->size;
av_fifo_write(ost->muxing_queue, &tmp_pkt, 1);
return;
} }
if ((st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && ost->vsync_method == VSYNC_DROP) || if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && ost->vsync_method == VSYNC_DROP)
(st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO && audio_sync_method < 0))
pkt->pts = pkt->dts = AV_NOPTS_VALUE; pkt->pts = pkt->dts = AV_NOPTS_VALUE;
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
if (ost->frame_rate.num && ost->is_cfr) { if (ost->frame_rate.num && ost->is_cfr) {
if (pkt->duration > 0) if (pkt->duration > 0)
av_log(NULL, AV_LOG_WARNING, "Overriding packet duration by frame rate, this should not happen\n"); av_log(ost, AV_LOG_WARNING, "Overriding packet duration by frame rate, this should not happen\n");
pkt->duration = av_rescale_q(1, av_inv_q(ost->frame_rate), pkt->duration = av_rescale_q(1, av_inv_q(ost->frame_rate),
ost->mux_timebase); pkt->time_base);
} }
} }
av_packet_rescale_ts(pkt, ost->mux_timebase, ost->st->time_base); av_packet_rescale_ts(pkt, pkt->time_base, ost->st->time_base);
pkt->time_base = ost->st->time_base;
if (!(s->oformat->flags & AVFMT_NOTIMESTAMPS)) { if (!(s->oformat->flags & AVFMT_NOTIMESTAMPS)) {
if (pkt->dts != AV_NOPTS_VALUE && if (pkt->dts != AV_NOPTS_VALUE &&
@ -133,25 +127,26 @@ void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost,
pkt->dts, pkt->pts, pkt->dts, pkt->pts,
ost->file_index, ost->st->index); ost->file_index, ost->st->index);
pkt->pts = pkt->pts =
pkt->dts = pkt->pts + pkt->dts + ost->last_mux_dts + 1 pkt->dts = pkt->pts + pkt->dts + ms->last_mux_dts + 1
- FFMIN3(pkt->pts, pkt->dts, ost->last_mux_dts + 1) - FFMIN3(pkt->pts, pkt->dts, ms->last_mux_dts + 1)
- FFMAX3(pkt->pts, pkt->dts, ost->last_mux_dts + 1); - FFMAX3(pkt->pts, pkt->dts, ms->last_mux_dts + 1);
} }
if ((st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO || st->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE) && if ((st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO || st->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE) &&
pkt->dts != AV_NOPTS_VALUE && pkt->dts != AV_NOPTS_VALUE &&
ost->last_mux_dts != AV_NOPTS_VALUE) { ms->last_mux_dts != AV_NOPTS_VALUE) {
int64_t max = ost->last_mux_dts + !(s->oformat->flags & AVFMT_TS_NONSTRICT); int64_t max = ms->last_mux_dts + !(s->oformat->flags & AVFMT_TS_NONSTRICT);
if (pkt->dts < max) { if (pkt->dts < max) {
int loglevel = max - pkt->dts > 2 || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO ? AV_LOG_WARNING : AV_LOG_DEBUG; int loglevel = max - pkt->dts > 2 || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO ? AV_LOG_WARNING : AV_LOG_DEBUG;
if (exit_on_error) if (exit_on_error)
loglevel = AV_LOG_ERROR; loglevel = AV_LOG_ERROR;
av_log(s, loglevel, "Non-monotonous DTS in output stream " av_log(s, loglevel, "Non-monotonous DTS in output stream "
"%d:%d; previous: %"PRId64", current: %"PRId64"; ", "%d:%d; previous: %"PRId64", current: %"PRId64"; ",
ost->file_index, ost->st->index, ost->last_mux_dts, pkt->dts); ost->file_index, ost->st->index, ms->last_mux_dts, pkt->dts);
if (exit_on_error) { if (exit_on_error) {
av_log(NULL, AV_LOG_FATAL, "aborting.\n"); ret = AVERROR(EINVAL);
exit_program(1); goto fail;
} }
av_log(s, loglevel, "changing to %"PRId64". This may result " av_log(s, loglevel, "changing to %"PRId64". This may result "
"in incorrect timestamps in the output file.\n", "in incorrect timestamps in the output file.\n",
max); max);
@ -161,17 +156,17 @@ void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost,
} }
} }
} }
ost->last_mux_dts = pkt->dts; ms->last_mux_dts = pkt->dts;
ost->data_size += pkt->size; ost->data_size_mux += pkt->size;
ost->packets_written++; frame_num = atomic_fetch_add(&ost->packets_written, 1);
pkt->stream_index = ost->index; pkt->stream_index = ost->index;
if (debug_ts) { if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "muxer <- type:%s " av_log(ost, AV_LOG_INFO, "muxer <- type:%s "
"pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s duration:%s duration_time:%s size:%d\n", "pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s duration:%s duration_time:%s size:%d\n",
av_get_media_type_string(ost->enc_ctx->codec_type), av_get_media_type_string(st->codecpar->codec_type),
av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, &ost->st->time_base), av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, &ost->st->time_base),
av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, &ost->st->time_base), av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, &ost->st->time_base),
av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, &ost->st->time_base), av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, &ost->st->time_base),
@ -179,12 +174,307 @@ void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost,
); );
} }
if (ms->stats.io)
enc_stats_write(ost, &ms->stats, NULL, pkt, frame_num);
ret = av_interleaved_write_frame(s, pkt); ret = av_interleaved_write_frame(s, pkt);
if (ret < 0) { if (ret < 0) {
print_error("av_interleaved_write_frame()", ret); print_error("av_interleaved_write_frame()", ret);
main_ffmpeg_return_code = 1; goto fail;
close_all_output_streams(ost, MUXER_FINISHED | ENCODER_FINISHED, ENCODER_FINISHED); }
return 0;
fail:
av_packet_unref(pkt);
return ret;
}
static int sync_queue_process(Muxer *mux, OutputStream *ost, AVPacket *pkt, int *stream_eof)
{
OutputFile *of = &mux->of;
if (ost->sq_idx_mux >= 0) {
int ret = sq_send(mux->sq_mux, ost->sq_idx_mux, SQPKT(pkt));
if (ret < 0) {
if (ret == AVERROR_EOF)
*stream_eof = 1;
return ret;
}
while (1) {
ret = sq_receive(mux->sq_mux, -1, SQPKT(mux->sq_pkt));
if (ret < 0)
return (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) ? 0 : ret;
ret = write_packet(mux, of->streams[ret],
mux->sq_pkt);
if (ret < 0)
return ret;
}
} else if (pkt)
return write_packet(mux, ost, pkt);
return 0;
}
static void thread_set_name(OutputFile *of)
{
char name[16];
snprintf(name, sizeof(name), "mux%d:%s", of->index, of->format->name);
ff_thread_setname(name);
}
static void *muxer_thread(void *arg)
{
Muxer *mux = arg;
OutputFile *of = &mux->of;
AVPacket *pkt = NULL;
int ret = 0;
pkt = av_packet_alloc();
if (!pkt) {
ret = AVERROR(ENOMEM);
goto finish;
}
thread_set_name(of);
while (1) {
OutputStream *ost;
int stream_idx, stream_eof = 0;
ret = tq_receive(mux->tq, &stream_idx, pkt);
if (stream_idx < 0) {
av_log(mux, AV_LOG_VERBOSE, "All streams finished\n");
ret = 0;
break;
}
ost = of->streams[stream_idx];
ret = sync_queue_process(mux, ost, ret < 0 ? NULL : pkt, &stream_eof);
av_packet_unref(pkt);
if (ret == AVERROR_EOF && stream_eof)
tq_receive_finish(mux->tq, stream_idx);
else if (ret < 0) {
av_log(mux, AV_LOG_ERROR, "Error muxing a packet\n");
break;
}
}
finish:
av_packet_free(&pkt);
for (unsigned int i = 0; i < mux->fc->nb_streams; i++)
tq_receive_finish(mux->tq, i);
av_log(mux, AV_LOG_VERBOSE, "Terminating muxer thread\n");
return (void*)(intptr_t)ret;
}
static int thread_submit_packet(Muxer *mux, OutputStream *ost, AVPacket *pkt)
{
int ret = 0;
if (!pkt || ost->finished & MUXER_FINISHED)
goto finish;
ret = tq_send(mux->tq, ost->index, pkt);
if (ret < 0)
goto finish;
return 0;
finish:
if (pkt)
av_packet_unref(pkt);
ost->finished |= MUXER_FINISHED;
tq_send_finish(mux->tq, ost->index);
return ret == AVERROR_EOF ? 0 : ret;
}
static int queue_packet(Muxer *mux, OutputStream *ost, AVPacket *pkt)
{
MuxStream *ms = ms_from_ost(ost);
AVPacket *tmp_pkt = NULL;
int ret;
if (!av_fifo_can_write(ms->muxing_queue)) {
size_t cur_size = av_fifo_can_read(ms->muxing_queue);
size_t pkt_size = pkt ? pkt->size : 0;
unsigned int are_we_over_size =
(ms->muxing_queue_data_size + pkt_size) > ms->muxing_queue_data_threshold;
size_t limit = are_we_over_size ? ms->max_muxing_queue_size : SIZE_MAX;
size_t new_size = FFMIN(2 * cur_size, limit);
if (new_size <= cur_size) {
av_log(ost, AV_LOG_ERROR,
"Too many packets buffered for output stream %d:%d.\n",
ost->file_index, ost->st->index);
return AVERROR(ENOSPC);
}
ret = av_fifo_grow2(ms->muxing_queue, new_size - cur_size);
if (ret < 0)
return ret;
}
if (pkt) {
ret = av_packet_make_refcounted(pkt);
if (ret < 0)
return ret;
tmp_pkt = av_packet_alloc();
if (!tmp_pkt)
return AVERROR(ENOMEM);
av_packet_move_ref(tmp_pkt, pkt);
ms->muxing_queue_data_size += tmp_pkt->size;
}
av_fifo_write(ms->muxing_queue, &tmp_pkt, 1);
return 0;
}
static int submit_packet(Muxer *mux, AVPacket *pkt, OutputStream *ost)
{
int ret;
if (mux->tq) {
return thread_submit_packet(mux, ost, pkt);
} else {
/* the muxer is not initialized yet, buffer the packet */
ret = queue_packet(mux, ost, pkt);
if (ret < 0) {
if (pkt)
av_packet_unref(pkt);
return ret;
}
} }
return 0;
}
void of_output_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, int eof)
{
Muxer *mux = mux_from_of(of);
MuxStream *ms = ms_from_ost(ost);
const char *err_msg;
int ret = 0;
if (!eof && pkt->dts != AV_NOPTS_VALUE)
ost->last_mux_dts = av_rescale_q(pkt->dts, pkt->time_base, AV_TIME_BASE_Q);
/* apply the output bitstream filters */
if (ms->bsf_ctx) {
int bsf_eof = 0;
ret = av_bsf_send_packet(ms->bsf_ctx, eof ? NULL : pkt);
if (ret < 0) {
err_msg = "submitting a packet for bitstream filtering";
goto fail;
}
while (!bsf_eof) {
ret = av_bsf_receive_packet(ms->bsf_ctx, pkt);
if (ret == AVERROR(EAGAIN))
return;
else if (ret == AVERROR_EOF)
bsf_eof = 1;
else if (ret < 0) {
err_msg = "applying bitstream filters to a packet";
goto fail;
}
ret = submit_packet(mux, bsf_eof ? NULL : pkt, ost);
if (ret < 0)
goto mux_fail;
}
} else {
ret = submit_packet(mux, eof ? NULL : pkt, ost);
if (ret < 0)
goto mux_fail;
}
return;
mux_fail:
err_msg = "submitting a packet to the muxer";
fail:
av_log(ost, AV_LOG_ERROR, "Error %s\n", err_msg);
if (exit_on_error)
exit_program(1);
}
static int thread_stop(Muxer *mux)
{
void *ret;
if (!mux || !mux->tq)
return 0;
for (unsigned int i = 0; i < mux->fc->nb_streams; i++)
tq_send_finish(mux->tq, i);
pthread_join(mux->thread, &ret);
tq_free(&mux->tq);
return (int)(intptr_t)ret;
}
static void pkt_move(void *dst, void *src)
{
av_packet_move_ref(dst, src);
}
static int thread_start(Muxer *mux)
{
AVFormatContext *fc = mux->fc;
ObjPool *op;
int ret;
op = objpool_alloc_packets();
if (!op)
return AVERROR(ENOMEM);
mux->tq = tq_alloc(fc->nb_streams, mux->thread_queue_size, op, pkt_move);
if (!mux->tq) {
objpool_free(&op);
return AVERROR(ENOMEM);
}
ret = pthread_create(&mux->thread, NULL, muxer_thread, (void*)mux);
if (ret) {
tq_free(&mux->tq);
return AVERROR(ret);
}
/* flush the muxing queues */
for (int i = 0; i < fc->nb_streams; i++) {
OutputStream *ost = mux->of.streams[i];
MuxStream *ms = ms_from_ost(ost);
AVPacket *pkt;
/* try to improve muxing time_base (only possible if nothing has been written yet) */
if (!av_fifo_can_read(ms->muxing_queue))
ost->mux_timebase = ost->st->time_base;
while (av_fifo_read(ms->muxing_queue, &pkt, 1) >= 0) {
ret = thread_submit_packet(mux, ost, pkt);
if (pkt) {
ms->muxing_queue_data_size -= pkt->size;
av_packet_free(&pkt);
}
if (ret < 0)
return ret;
}
}
return 0;
} }
static int print_sdp(void) static int print_sdp(void)
@ -196,16 +486,16 @@ static int print_sdp(void)
AVFormatContext **avc; AVFormatContext **avc;
for (i = 0; i < nb_output_files; i++) { for (i = 0; i < nb_output_files; i++) {
if (!output_files[i]->header_written) if (!mux_from_of(output_files[i])->header_written)
return 0; return 0;
} }
avc = av_malloc_array(nb_output_files, sizeof(*avc)); avc = av_malloc_array(nb_output_files, sizeof(*avc));
if (!avc) if (!avc)
exit_program(1); return AVERROR(ENOMEM);
for (i = 0, j = 0; i < nb_output_files; i++) { for (i = 0, j = 0; i < nb_output_files; i++) {
if (!strcmp(output_files[i]->ctx->oformat->name, "rtp")) { if (!strcmp(output_files[i]->format->name, "rtp")) {
avc[j] = output_files[i]->ctx; avc[j] = mux_from_of(output_files[i])->fc;
j++; j++;
} }
} }
@ -221,7 +511,7 @@ static int print_sdp(void)
goto fail; goto fail;
if (!sdp_filename) { if (!sdp_filename) {
printf("SDP:\n%s\n", sdp); av_log(NULL, AV_LOG_ERROR, "SDP:\n%s\n", sdp);
fflush(stdout); fflush(stdout);
} else { } else {
ret = avio_open2(&sdp_pb, sdp_filename, AVIO_FLAG_WRITE, &int_cb, NULL); ret = avio_open2(&sdp_pb, sdp_filename, AVIO_FLAG_WRITE, &int_cb, NULL);
@ -235,34 +525,36 @@ static int print_sdp(void)
av_freep(&sdp_filename); av_freep(&sdp_filename);
} }
// SDP successfully written, allow muxer threads to start
ret = 1;
fail: fail:
av_freep(&avc); av_freep(&avc);
return ret; return ret;
} }
/* open the muxer when all the streams are initialized */ int mux_check_init(Muxer *mux)
int of_check_init(OutputFile *of)
{ {
OutputFile *of = &mux->of;
AVFormatContext *fc = mux->fc;
int ret, i; int ret, i;
for (i = 0; i < of->ctx->nb_streams; i++) { for (i = 0; i < fc->nb_streams; i++) {
OutputStream *ost = output_streams[of->ost_index + i]; OutputStream *ost = of->streams[i];
if (!ost->initialized) if (!ost->initialized)
return 0; return 0;
} }
ret = avformat_write_header(of->ctx, &of->opts); ret = avformat_write_header(fc, &mux->opts);
if (ret < 0) { if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, av_log(mux, AV_LOG_ERROR, "Could not write header (incorrect codec "
"Could not write header for output file #%d " "parameters ?): %s\n", av_err2str(ret));
"(incorrect codec parameters ?): %s\n",
of->index, av_err2str(ret));
return ret; return ret;
} }
//assert_avoptions(of->opts); //assert_avoptions(of->opts);
of->header_written = 1; mux->header_written = 1;
av_dump_format(of->ctx, of->index, of->ctx->url, 1); av_dump_format(fc, of->index, fc->url, 1);
nb_output_dumped++; nb_output_dumped++;
if (sdp_filename || want_sdp) { if (sdp_filename || want_sdp) {
@ -270,62 +562,220 @@ int of_check_init(OutputFile *of)
if (ret < 0) { if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error writing the SDP.\n"); av_log(NULL, AV_LOG_ERROR, "Error writing the SDP.\n");
return ret; return ret;
} else if (ret == 1) {
/* SDP is written only after all the muxers are ready, so now we
* start ALL the threads */
for (i = 0; i < nb_output_files; i++) {
ret = thread_start(mux_from_of(output_files[i]));
if (ret < 0)
return ret;
}
} }
} else {
ret = thread_start(mux_from_of(of));
if (ret < 0)
return ret;
} }
/* flush the muxing queues */ return 0;
for (i = 0; i < of->ctx->nb_streams; i++) { }
OutputStream *ost = output_streams[of->ost_index + i];
AVPacket *pkt;
/* try to improve muxing time_base (only possible if nothing has been written yet) */ static int bsf_init(MuxStream *ms)
if (!av_fifo_can_read(ost->muxing_queue)) {
ost->mux_timebase = ost->st->time_base; OutputStream *ost = &ms->ost;
AVBSFContext *ctx = ms->bsf_ctx;
int ret;
while (av_fifo_read(ost->muxing_queue, &pkt, 1) >= 0) { if (!ctx)
ost->muxing_queue_data_size -= pkt->size; return 0;
of_write_packet(of, pkt, ost, 1);
av_packet_free(&pkt); ret = avcodec_parameters_copy(ctx->par_in, ost->st->codecpar);
} if (ret < 0)
return ret;
ctx->time_base_in = ost->st->time_base;
ret = av_bsf_init(ctx);
if (ret < 0) {
av_log(ms, AV_LOG_ERROR, "Error initializing bitstream filter: %s\n",
ctx->filter->name);
return ret;
} }
ret = avcodec_parameters_copy(ost->st->codecpar, ctx->par_out);
if (ret < 0)
return ret;
ost->st->time_base = ctx->time_base_out;
return 0; return 0;
} }
int of_stream_init(OutputFile *of, OutputStream *ost)
{
Muxer *mux = mux_from_of(of);
MuxStream *ms = ms_from_ost(ost);
int ret;
if (ost->sq_idx_mux >= 0)
sq_set_tb(mux->sq_mux, ost->sq_idx_mux, ost->mux_timebase);
/* initialize bitstream filters for the output stream
* needs to be done here, because the codec id for streamcopy is not
* known until now */
ret = bsf_init(ms);
if (ret < 0)
return ret;
ost->initialized = 1;
return mux_check_init(mux);
}
int of_write_trailer(OutputFile *of) int of_write_trailer(OutputFile *of)
{ {
Muxer *mux = mux_from_of(of);
AVFormatContext *fc = mux->fc;
int ret; int ret;
if (!of->header_written) { if (!mux->tq) {
av_log(NULL, AV_LOG_ERROR, av_log(mux, AV_LOG_ERROR,
"Nothing was written into output file %d (%s), because " "Nothing was written into output file, because "
"at least one of its streams received no packets.\n", "at least one of its streams received no packets.\n");
of->index, of->ctx->url);
return AVERROR(EINVAL); return AVERROR(EINVAL);
} }
ret = av_write_trailer(of->ctx); ret = thread_stop(mux);
if (ret < 0)
main_ffmpeg_return_code = ret;
ret = av_write_trailer(fc);
if (ret < 0) { if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error writing trailer of %s: %s\n", of->ctx->url, av_err2str(ret)); av_log(mux, AV_LOG_ERROR, "Error writing trailer: %s\n", av_err2str(ret));
return ret; return ret;
} }
mux->last_filesize = filesize(fc->pb);
if (!(of->format->flags & AVFMT_NOFILE)) {
ret = avio_closep(&fc->pb);
if (ret < 0) {
av_log(mux, AV_LOG_ERROR, "Error closing file: %s\n", av_err2str(ret));
return ret;
}
}
return 0; return 0;
} }
static void ost_free(OutputStream **post)
{
OutputStream *ost = *post;
MuxStream *ms;
if (!ost)
return;
ms = ms_from_ost(ost);
if (ost->logfile) {
if (fclose(ost->logfile))
av_log(ms, AV_LOG_ERROR,
"Error closing logfile, loss of information possible: %s\n",
av_err2str(AVERROR(errno)));
ost->logfile = NULL;
}
if (ms->muxing_queue) {
AVPacket *pkt;
while (av_fifo_read(ms->muxing_queue, &pkt, 1) >= 0)
av_packet_free(&pkt);
av_fifo_freep2(&ms->muxing_queue);
}
av_bsf_free(&ms->bsf_ctx);
av_frame_free(&ost->filtered_frame);
av_frame_free(&ost->sq_frame);
av_frame_free(&ost->last_frame);
av_packet_free(&ost->pkt);
av_dict_free(&ost->encoder_opts);
av_freep(&ost->kf.pts);
av_expr_free(ost->kf.pexpr);
av_freep(&ost->avfilter);
av_freep(&ost->logfile_prefix);
av_freep(&ost->apad);
#if FFMPEG_OPT_MAP_CHANNEL
av_freep(&ost->audio_channels_map);
ost->audio_channels_mapped = 0;
#endif
av_dict_free(&ost->sws_dict);
av_dict_free(&ost->swr_opts);
if (ost->enc_ctx)
av_freep(&ost->enc_ctx->stats_in);
avcodec_free_context(&ost->enc_ctx);
for (int i = 0; i < ost->enc_stats_pre.nb_components; i++)
av_freep(&ost->enc_stats_pre.components[i].str);
av_freep(&ost->enc_stats_pre.components);
for (int i = 0; i < ost->enc_stats_post.nb_components; i++)
av_freep(&ost->enc_stats_post.components[i].str);
av_freep(&ost->enc_stats_post.components);
for (int i = 0; i < ms->stats.nb_components; i++)
av_freep(&ms->stats.components[i].str);
av_freep(&ms->stats.components);
av_freep(post);
}
static void fc_close(AVFormatContext **pfc)
{
AVFormatContext *fc = *pfc;
if (!fc)
return;
if (!(fc->oformat->flags & AVFMT_NOFILE))
avio_closep(&fc->pb);
avformat_free_context(fc);
*pfc = NULL;
}
void of_close(OutputFile **pof) void of_close(OutputFile **pof)
{ {
OutputFile *of = *pof; OutputFile *of = *pof;
AVFormatContext *s; Muxer *mux;
if (!of) if (!of)
return; return;
mux = mux_from_of(of);
s = of->ctx; thread_stop(mux);
if (s && s->oformat && !(s->oformat->flags & AVFMT_NOFILE))
avio_closep(&s->pb); sq_free(&of->sq_encode);
avformat_free_context(s); sq_free(&mux->sq_mux);
av_dict_free(&of->opts);
for (int i = 0; i < of->nb_streams; i++)
ost_free(&of->streams[i]);
av_freep(&of->streams);
av_dict_free(&mux->opts);
av_packet_free(&mux->sq_pkt);
fc_close(&mux->fc);
av_freep(pof); av_freep(pof);
} }
int64_t of_filesize(OutputFile *of)
{
Muxer *mux = mux_from_of(of);
return atomic_load(&mux->last_filesize);
}

@ -0,0 +1,165 @@
/*
* Muxer internal APIs - should not be included outside of ffmpeg_mux*
* Copyright (c) 2023 ARTHENICA LTD
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of ffmpeg_mux.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
* - want_sdp made thread-local
* - EncStatsFile declaration migrated from ffmpeg_mux_init.c
* - WARN_MULTIPLE_OPT_USAGE, MATCH_PER_STREAM_OPT, MATCH_PER_TYPE_OPT, SPECIFIER_OPT_FMT declarations migrated from
* ffmpeg.h
* - ms_from_ost migrated to ffmpeg_mux.c
*/
#ifndef FFTOOLS_FFMPEG_MUX_H
#define FFTOOLS_FFMPEG_MUX_H
#include <stdatomic.h>
#include <stdint.h>
#include "fftools_thread_queue.h"
#include "libavformat/avformat.h"
#include "libavcodec/packet.h"
#include "libavutil/dict.h"
#include "libavutil/fifo.h"
#include "libavutil/thread.h"
#define SPECIFIER_OPT_FMT_str "%s"
#define SPECIFIER_OPT_FMT_i "%i"
#define SPECIFIER_OPT_FMT_i64 "%"PRId64
#define SPECIFIER_OPT_FMT_ui64 "%"PRIu64
#define SPECIFIER_OPT_FMT_f "%f"
#define SPECIFIER_OPT_FMT_dbl "%lf"
#define WARN_MULTIPLE_OPT_USAGE(name, type, so, st)\
{\
char namestr[128] = "";\
const char *spec = so->specifier && so->specifier[0] ? so->specifier : "";\
for (int _i = 0; opt_name_##name[_i]; _i++)\
av_strlcatf(namestr, sizeof(namestr), "-%s%s", opt_name_##name[_i], opt_name_##name[_i+1] ? (opt_name_##name[_i+2] ? ", " : " or ") : "");\
av_log(NULL, AV_LOG_WARNING, "Multiple %s options specified for stream %d, only the last option '-%s%s%s "SPECIFIER_OPT_FMT_##type"' will be used.\n",\
namestr, st->index, opt_name_##name[0], spec[0] ? ":" : "", spec, so->u.type);\
}
#define MATCH_PER_STREAM_OPT(name, type, outvar, fmtctx, st)\
{\
int _ret, _matches = 0;\
SpecifierOpt *so;\
for (int _i = 0; _i < o->nb_ ## name; _i++) {\
char *spec = o->name[_i].specifier;\
if ((_ret = check_stream_specifier(fmtctx, st, spec)) > 0) {\
outvar = o->name[_i].u.type;\
so = &o->name[_i];\
_matches++;\
} else if (_ret < 0)\
exit_program(1);\
}\
if (_matches > 1)\
WARN_MULTIPLE_OPT_USAGE(name, type, so, st);\
}
#define MATCH_PER_TYPE_OPT(name, type, outvar, fmtctx, mediatype)\
{\
int i;\
for (i = 0; i < o->nb_ ## name; i++) {\
char *spec = o->name[i].specifier;\
if (!strcmp(spec, mediatype))\
outvar = o->name[i].u.type;\
}\
}
typedef struct MuxStream {
OutputStream ost;
// name used for logging
char log_name[32];
/* the packets are buffered here until the muxer is ready to be initialized */
AVFifo *muxing_queue;
AVBSFContext *bsf_ctx;
EncStats stats;
int64_t max_frames;
/*
* The size of the AVPackets' buffers in queue.
* Updated when a packet is either pushed or pulled from the queue.
*/
size_t muxing_queue_data_size;
int max_muxing_queue_size;
/* Threshold after which max_muxing_queue_size will be in effect */
size_t muxing_queue_data_threshold;
/* dts of the last packet sent to the muxer, in the stream timebase
* used for making up missing dts values */
int64_t last_mux_dts;
} MuxStream;
typedef struct Muxer {
OutputFile of;
// name used for logging
char log_name[32];
AVFormatContext *fc;
pthread_t thread;
ThreadQueue *tq;
AVDictionary *opts;
int thread_queue_size;
/* filesize limit expressed in bytes */
int64_t limit_filesize;
atomic_int_least64_t last_filesize;
int header_written;
SyncQueue *sq_mux;
AVPacket *sq_pkt;
} Muxer;
typedef struct EncStatsFile {
char *path;
AVIOContext *io;
} EncStatsFile;
/* whether we want to print an SDP, set in of_open() */
extern __thread int want_sdp;
int mux_check_init(Muxer *mux);
#endif /* FFTOOLS_FFMPEG_MUX_H */

@ -1,6 +1,7 @@
/* /*
* Copyright (c) 2007-2010 Stefano Sabatini * Copyright (c) 2007-2010 Stefano Sabatini
* Copyright (c) 2020 Taner Sener * Copyright (c) 2020-2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -29,6 +30,13 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -52,9 +60,11 @@
#include "libavutil/ffversion.h" #include "libavutil/ffversion.h"
#include <string.h> #include <string.h>
#include <math.h>
#include "libavformat/avformat.h" #include "libavformat/avformat.h"
#include "libavcodec/avcodec.h" #include "libavcodec/avcodec.h"
#include "libavutil/ambient_viewing_environment.h"
#include "libavutil/avassert.h" #include "libavutil/avassert.h"
#include "libavutil/avstring.h" #include "libavutil/avstring.h"
#include "libavutil/bprint.h" #include "libavutil/bprint.h"
@ -163,6 +173,8 @@ typedef struct ReadInterval {
__thread ReadInterval *read_intervals; __thread ReadInterval *read_intervals;
__thread int read_intervals_nb = 0; __thread int read_intervals_nb = 0;
__thread int find_stream_info = 1;
/* section structure definition */ /* section structure definition */
#define SECTION_MAX_NB_CHILDREN 10 #define SECTION_MAX_NB_CHILDREN 10
@ -626,6 +638,7 @@ static inline void writer_put_str_printf(WriterContext *wctx, const char *str)
static inline void writer_printf_printf(WriterContext *wctx, const char *fmt, ...) static inline void writer_printf_printf(WriterContext *wctx, const char *fmt, ...)
{ {
va_list ap; va_list ap;
va_start(ap, fmt); va_start(ap, fmt);
av_vlog(NULL, AV_LOG_STDERR, fmt, ap); av_vlog(NULL, AV_LOG_STDERR, fmt, ap);
va_end(ap); va_end(ap);
@ -671,7 +684,7 @@ static int writer_open(WriterContext **wctx, const Writer *writer, const char *a
goto fail; goto fail;
} }
while ((opt = av_dict_get(opts, "", opt, AV_DICT_IGNORE_SUFFIX))) { while ((opt = av_dict_iterate(opts, opt))) {
if ((ret = av_opt_set(*wctx, opt->key, opt->value, AV_OPT_SEARCH_CHILDREN)) < 0) { if ((ret = av_opt_set(*wctx, opt->key, opt->value, AV_OPT_SEARCH_CHILDREN)) < 0) {
av_log(*wctx, AV_LOG_ERROR, "Failed to set option '%s' with value '%s' provided to writer context\n", av_log(*wctx, AV_LOG_ERROR, "Failed to set option '%s' with value '%s' provided to writer context\n",
opt->key, opt->value); opt->key, opt->value);
@ -1907,13 +1920,15 @@ static void writer_register_all(void)
writer_print_string(w, k, pbuf.str, 0); \ writer_print_string(w, k, pbuf.str, 0); \
} while (0) } while (0)
#define print_list_fmt(k, f, n, ...) do { \ #define print_list_fmt(k, f, n, m, ...) do { \
av_bprint_clear(&pbuf); \ av_bprint_clear(&pbuf); \
for (int idx = 0; idx < n; idx++) { \ for (int idx = 0; idx < n; idx++) { \
if (idx > 0) \ for (int idx2 = 0; idx2 < m; idx2++) { \
if (idx > 0 || idx2 > 0) \
av_bprint_chars(&pbuf, ' ', 1); \ av_bprint_chars(&pbuf, ' ', 1); \
av_bprintf(&pbuf, f, __VA_ARGS__); \ av_bprintf(&pbuf, f, __VA_ARGS__); \
} \ } \
} \
writer_print_string(w, k, pbuf.str, 0); \ writer_print_string(w, k, pbuf.str, 0); \
} while (0) } while (0)
@ -1953,7 +1968,7 @@ static inline int show_tags(WriterContext *w, AVDictionary *tags, int section_id
return 0; return 0;
writer_print_section_header(w, section_id); writer_print_section_header(w, section_id);
while ((tag = av_dict_get(tags, "", tag, AV_DICT_IGNORE_SUFFIX))) { while ((tag = av_dict_iterate(tags, tag))) {
if ((ret = print_str_validate(tag->key, tag->value)) < 0) if ((ret = print_str_validate(tag->key, tag->value)) < 0)
break; break;
} }
@ -2023,7 +2038,7 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
const AVDOVIReshapingCurve *curve = &mapping->curves[c]; const AVDOVIReshapingCurve *curve = &mapping->curves[c];
writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_COMPONENT); writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_COMPONENT);
print_list_fmt("pivots", "%"PRIu16, curve->num_pivots, curve->pivots[idx]); print_list_fmt("pivots", "%"PRIu16, curve->num_pivots, 1, curve->pivots[idx]);
writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_PIECE_LIST); writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_PIECE_LIST);
for (int i = 0; i < curve->num_pivots - 1; i++) { for (int i = 0; i < curve->num_pivots - 1; i++) {
@ -2035,7 +2050,7 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
print_str("mapping_idc_name", "polynomial"); print_str("mapping_idc_name", "polynomial");
print_int("poly_order", curve->poly_order[i]); print_int("poly_order", curve->poly_order[i]);
print_list_fmt("poly_coef", "%"PRIi64, print_list_fmt("poly_coef", "%"PRIi64,
curve->poly_order[i] + 1, curve->poly_order[i] + 1, 1,
curve->poly_coef[i][idx]); curve->poly_coef[i][idx]);
break; break;
case AV_DOVI_MAPPING_MMR: case AV_DOVI_MAPPING_MMR:
@ -2043,8 +2058,8 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
print_int("mmr_order", curve->mmr_order[i]); print_int("mmr_order", curve->mmr_order[i]);
print_int("mmr_constant", curve->mmr_constant[i]); print_int("mmr_constant", curve->mmr_constant[i]);
print_list_fmt("mmr_coef", "%"PRIi64, print_list_fmt("mmr_coef", "%"PRIi64,
curve->mmr_order[i] * 7, curve->mmr_order[i], 7,
curve->mmr_coef[i][0][idx]); curve->mmr_coef[i][idx][idx2]);
break; break;
default: default:
print_str("mapping_idc_name", "unknown"); print_str("mapping_idc_name", "unknown");
@ -2082,15 +2097,15 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
print_int("dm_metadata_id", color->dm_metadata_id); print_int("dm_metadata_id", color->dm_metadata_id);
print_int("scene_refresh_flag", color->scene_refresh_flag); print_int("scene_refresh_flag", color->scene_refresh_flag);
print_list_fmt("ycc_to_rgb_matrix", "%d/%d", print_list_fmt("ycc_to_rgb_matrix", "%d/%d",
FF_ARRAY_ELEMS(color->ycc_to_rgb_matrix), FF_ARRAY_ELEMS(color->ycc_to_rgb_matrix), 1,
color->ycc_to_rgb_matrix[idx].num, color->ycc_to_rgb_matrix[idx].num,
color->ycc_to_rgb_matrix[idx].den); color->ycc_to_rgb_matrix[idx].den);
print_list_fmt("ycc_to_rgb_offset", "%d/%d", print_list_fmt("ycc_to_rgb_offset", "%d/%d",
FF_ARRAY_ELEMS(color->ycc_to_rgb_offset), FF_ARRAY_ELEMS(color->ycc_to_rgb_offset), 1,
color->ycc_to_rgb_offset[idx].num, color->ycc_to_rgb_offset[idx].num,
color->ycc_to_rgb_offset[idx].den); color->ycc_to_rgb_offset[idx].den);
print_list_fmt("rgb_to_lms_matrix", "%d/%d", print_list_fmt("rgb_to_lms_matrix", "%d/%d",
FF_ARRAY_ELEMS(color->rgb_to_lms_matrix), FF_ARRAY_ELEMS(color->rgb_to_lms_matrix), 1,
color->rgb_to_lms_matrix[idx].num, color->rgb_to_lms_matrix[idx].num,
color->rgb_to_lms_matrix[idx].den); color->rgb_to_lms_matrix[idx].den);
print_int("signal_eotf", color->signal_eotf); print_int("signal_eotf", color->signal_eotf);
@ -2276,6 +2291,17 @@ static void print_dynamic_hdr_vivid(WriterContext *w, const AVDynamicHDRVivid *m
} }
} }
static void print_ambient_viewing_environment(WriterContext *w,
const AVAmbientViewingEnvironment *env)
{
if (!env)
return;
print_q("ambient_illuminance", env->ambient_illuminance, '/');
print_q("ambient_light_x", env->ambient_light_x, '/');
print_q("ambient_light_y", env->ambient_light_y, '/');
}
static void print_pkt_side_data(WriterContext *w, static void print_pkt_side_data(WriterContext *w,
AVCodecParameters *par, AVCodecParameters *par,
const AVPacketSideData *side_data, const AVPacketSideData *side_data,
@ -2293,8 +2319,11 @@ static void print_pkt_side_data(WriterContext *w,
writer_print_section_header(w, id_data); writer_print_section_header(w, id_data);
print_str("side_data_type", name ? name : "unknown"); print_str("side_data_type", name ? name : "unknown");
if (sd->type == AV_PKT_DATA_DISPLAYMATRIX && sd->size >= 9*4) { if (sd->type == AV_PKT_DATA_DISPLAYMATRIX && sd->size >= 9*4) {
double rotation = av_display_rotation_get((int32_t *)sd->data);
if (isnan(rotation))
rotation = 0;
writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1); writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1);
print_int("rotation", av_display_rotation_get((int32_t *)sd->data)); print_int("rotation", rotation);
} else if (sd->type == AV_PKT_DATA_STEREO3D) { } else if (sd->type == AV_PKT_DATA_STEREO3D) {
const AVStereo3D *stereo = (AVStereo3D *)sd->data; const AVStereo3D *stereo = (AVStereo3D *)sd->data;
print_str("type", av_stereo3d_type_name(stereo->type)); print_str("type", av_stereo3d_type_name(stereo->type));
@ -2506,8 +2535,12 @@ static void show_packet(WriterContext *w, InputFile *ifile, AVPacket *pkt, int p
print_val("size", pkt->size, unit_byte_str); print_val("size", pkt->size, unit_byte_str);
if (pkt->pos != -1) print_fmt ("pos", "%"PRId64, pkt->pos); if (pkt->pos != -1) print_fmt ("pos", "%"PRId64, pkt->pos);
else print_str_opt("pos", "N/A"); else print_str_opt("pos", "N/A");
print_fmt("flags", "%c%c", pkt->flags & AV_PKT_FLAG_KEY ? 'K' : '_', print_fmt("flags", "%c%c%c", pkt->flags & AV_PKT_FLAG_KEY ? 'K' : '_',
pkt->flags & AV_PKT_FLAG_DISCARD ? 'D' : '_'); pkt->flags & AV_PKT_FLAG_DISCARD ? 'D' : '_',
pkt->flags & AV_PKT_FLAG_CORRUPT ? 'C' : '_');
if (do_show_data)
writer_print_data(w, "data", pkt->data, pkt->size);
writer_print_data_hash(w, "data_hash", pkt->data, pkt->size);
if (pkt->side_data_elems) { if (pkt->side_data_elems) {
size_t size; size_t size;
@ -2526,9 +2559,6 @@ static void show_packet(WriterContext *w, InputFile *ifile, AVPacket *pkt, int p
SECTION_ID_PACKET_SIDE_DATA); SECTION_ID_PACKET_SIDE_DATA);
} }
if (do_show_data)
writer_print_data(w, "data", pkt->data, pkt->size);
writer_print_data_hash(w, "data_hash", pkt->data, pkt->size);
writer_print_section_footer(w); writer_print_section_footer(w);
av_bprint_finalize(&pbuf, NULL); av_bprint_finalize(&pbuf, NULL);
@ -2581,8 +2611,14 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
print_time("pkt_dts_time", frame->pkt_dts, &stream->time_base); print_time("pkt_dts_time", frame->pkt_dts, &stream->time_base);
print_ts ("best_effort_timestamp", frame->best_effort_timestamp); print_ts ("best_effort_timestamp", frame->best_effort_timestamp);
print_time("best_effort_timestamp_time", frame->best_effort_timestamp, &stream->time_base); print_time("best_effort_timestamp_time", frame->best_effort_timestamp, &stream->time_base);
#if LIBAVUTIL_VERSION_MAJOR < 59
AV_NOWARN_DEPRECATED(
print_duration_ts ("pkt_duration", frame->pkt_duration); print_duration_ts ("pkt_duration", frame->pkt_duration);
print_duration_time("pkt_duration_time", frame->pkt_duration, &stream->time_base); print_duration_time("pkt_duration_time", frame->pkt_duration, &stream->time_base);
)
#endif
print_duration_ts ("duration", frame->duration);
print_duration_time("duration_time", frame->duration, &stream->time_base);
if (frame->pkt_pos != -1) print_fmt ("pkt_pos", "%"PRId64, frame->pkt_pos); if (frame->pkt_pos != -1) print_fmt ("pkt_pos", "%"PRId64, frame->pkt_pos);
else print_str_opt("pkt_pos", "N/A"); else print_str_opt("pkt_pos", "N/A");
if (frame->pkt_size != -1) print_val ("pkt_size", frame->pkt_size, unit_byte_str); if (frame->pkt_size != -1) print_val ("pkt_size", frame->pkt_size, unit_byte_str);
@ -2604,8 +2640,12 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
print_str_opt("sample_aspect_ratio", "N/A"); print_str_opt("sample_aspect_ratio", "N/A");
} }
print_fmt("pict_type", "%c", av_get_picture_type_char(frame->pict_type)); print_fmt("pict_type", "%c", av_get_picture_type_char(frame->pict_type));
#if LIBAVUTIL_VERSION_MAJOR < 59
AV_NOWARN_DEPRECATED(
print_int("coded_picture_number", frame->coded_picture_number); print_int("coded_picture_number", frame->coded_picture_number);
print_int("display_picture_number", frame->display_picture_number); print_int("display_picture_number", frame->display_picture_number);
)
#endif
print_int("interlaced_frame", frame->interlaced_frame); print_int("interlaced_frame", frame->interlaced_frame);
print_int("top_field_first", frame->top_field_first); print_int("top_field_first", frame->top_field_first);
print_int("repeat_pict", frame->repeat_pict); print_int("repeat_pict", frame->repeat_pict);
@ -2644,8 +2684,11 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
name = av_frame_side_data_name(sd->type); name = av_frame_side_data_name(sd->type);
print_str("side_data_type", name ? name : "unknown"); print_str("side_data_type", name ? name : "unknown");
if (sd->type == AV_FRAME_DATA_DISPLAYMATRIX && sd->size >= 9*4) { if (sd->type == AV_FRAME_DATA_DISPLAYMATRIX && sd->size >= 9*4) {
double rotation = av_display_rotation_get((int32_t *)sd->data);
if (isnan(rotation))
rotation = 0;
writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1); writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1);
print_int("rotation", av_display_rotation_get((int32_t *)sd->data)); print_int("rotation", rotation);
} else if (sd->type == AV_FRAME_DATA_AFD && sd->size > 0) { } else if (sd->type == AV_FRAME_DATA_AFD && sd->size > 0) {
print_int("active_format", *sd->data); print_int("active_format", *sd->data);
} else if (sd->type == AV_FRAME_DATA_GOP_TIMECODE && sd->size >= 8) { } else if (sd->type == AV_FRAME_DATA_GOP_TIMECODE && sd->size >= 8) {
@ -2700,6 +2743,9 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
} else if (sd->type == AV_FRAME_DATA_DYNAMIC_HDR_VIVID) { } else if (sd->type == AV_FRAME_DATA_DYNAMIC_HDR_VIVID) {
AVDynamicHDRVivid *metadata = (AVDynamicHDRVivid *)sd->data; AVDynamicHDRVivid *metadata = (AVDynamicHDRVivid *)sd->data;
print_dynamic_hdr_vivid(w, metadata); print_dynamic_hdr_vivid(w, metadata);
} else if (sd->type == AV_FRAME_DATA_AMBIENT_VIEWING_ENVIRONMENT) {
print_ambient_viewing_environment(
w, (const AVAmbientViewingEnvironment *)sd->data);
} }
writer_print_section_footer(w); writer_print_section_footer(w);
} }
@ -2714,7 +2760,7 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
static av_always_inline int process_frame(WriterContext *w, static av_always_inline int process_frame(WriterContext *w,
InputFile *ifile, InputFile *ifile,
AVFrame *frame, AVPacket *pkt, AVFrame *frame, const AVPacket *pkt,
int *packet_new) int *packet_new)
{ {
AVFormatContext *fmt_ctx = ifile->fmt_ctx; AVFormatContext *fmt_ctx = ifile->fmt_ctx;
@ -2858,9 +2904,10 @@ static int read_interval_packets(WriterContext *w, InputFile *ifile,
} }
if (selected_streams[pkt->stream_index]) { if (selected_streams[pkt->stream_index]) {
AVRational tb = ifile->streams[pkt->stream_index].st->time_base; AVRational tb = ifile->streams[pkt->stream_index].st->time_base;
int64_t pts = pkt->pts != AV_NOPTS_VALUE ? pkt->pts : pkt->dts;
if (pkt->pts != AV_NOPTS_VALUE) if (pts != AV_NOPTS_VALUE)
*cur_ts = av_rescale_q(pkt->pts, tb, AV_TIME_BASE_Q); *cur_ts = av_rescale_q(pts, tb, AV_TIME_BASE_Q);
if (!has_start && *cur_ts != AV_NOPTS_VALUE) { if (!has_start && *cur_ts != AV_NOPTS_VALUE) {
start = *cur_ts; start = *cur_ts;
@ -2894,7 +2941,7 @@ static int read_interval_packets(WriterContext *w, InputFile *ifile,
} }
av_packet_unref(pkt); av_packet_unref(pkt);
//Flush remaining frames that are cached in the decoder //Flush remaining frames that are cached in the decoder
for (i = 0; i < fmt_ctx->nb_streams; i++) { for (i = 0; i < ifile->nb_streams; i++) {
pkt->stream_index = i; pkt->stream_index = i;
if (do_read_frames) { if (do_read_frames) {
while (process_frame(w, ifile, frame, pkt, &(int){1}) > 0); while (process_frame(w, ifile, frame, pkt, &(int){1}) > 0);
@ -3052,6 +3099,8 @@ static int show_stream(WriterContext *w, AVFormatContext *fmt_ctx, int stream_id
} }
print_int("bits_per_sample", av_get_bits_per_sample(par->codec_id)); print_int("bits_per_sample", av_get_bits_per_sample(par->codec_id));
print_int("initial_padding", par->initial_padding);
break; break;
case AVMEDIA_TYPE_SUBTITLE: case AVMEDIA_TYPE_SUBTITLE:
@ -3278,15 +3327,9 @@ static int show_format(WriterContext *w, InputFile *ifile)
static void show_error(WriterContext *w, int err) static void show_error(WriterContext *w, int err)
{ {
char errbuf[128];
const char *errbuf_ptr = errbuf;
if (av_strerror(err, errbuf, sizeof(errbuf)) < 0)
errbuf_ptr = strerror(AVUNERROR(err));
writer_print_section_header(w, SECTION_ID_ERROR); writer_print_section_header(w, SECTION_ID_ERROR);
print_int("code", err); print_int("code", err);
print_str("string", errbuf_ptr); print_str("string", av_err2str(err));
writer_print_section_footer(w); writer_print_section_footer(w);
} }
@ -3299,10 +3342,8 @@ static int open_input_file(InputFile *ifile, const char *filename,
int scan_all_pmts_set = 0; int scan_all_pmts_set = 0;
fmt_ctx = avformat_alloc_context(); fmt_ctx = avformat_alloc_context();
if (!fmt_ctx) { if (!fmt_ctx)
print_error(filename, AVERROR(ENOMEM)); report_and_exit(AVERROR(ENOMEM));
exit_program(1);
}
if (!av_dict_get(format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) { if (!av_dict_get(format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) {
av_dict_set(&format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE); av_dict_set(&format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE);
@ -3320,7 +3361,7 @@ static int open_input_file(InputFile *ifile, const char *filename,
ifile->fmt_ctx = fmt_ctx; ifile->fmt_ctx = fmt_ctx;
if (scan_all_pmts_set) if (scan_all_pmts_set)
av_dict_set(&format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE); av_dict_set(&format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE);
while ((t = av_dict_get(format_opts, "", t, AV_DICT_IGNORE_SUFFIX))) while ((t = av_dict_iterate(format_opts, t)))
av_log(NULL, AV_LOG_WARNING, "Option %s skipped - not known to demuxer.\n", t->key); av_log(NULL, AV_LOG_WARNING, "Option %s skipped - not known to demuxer.\n", t->key);
if (find_stream_info) { if (find_stream_info) {
@ -3718,7 +3759,7 @@ static void opt_input_file(void *optctx, const char *arg)
exit_program(1); exit_program(1);
} }
if (!strcmp(arg, "-")) if (!strcmp(arg, "-"))
arg = "pipe:"; arg = "fd:";
input_filename = arg; input_filename = arg;
} }
@ -3737,7 +3778,7 @@ static void opt_output_file(void *optctx, const char *arg)
exit_program(1); exit_program(1);
} }
if (!strcmp(arg, "-")) if (!strcmp(arg, "-"))
arg = "pipe:"; arg = "fd:";
output_filename = arg; output_filename = arg;
} }

@ -0,0 +1,145 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of objpool.c file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*/
#include <stdint.h>
#include "libavcodec/packet.h"
#include "libavutil/common.h"
#include "libavutil/error.h"
#include "libavutil/frame.h"
#include "libavutil/mem.h"
#include "fftools_objpool.h"
struct ObjPool {
void *pool[32];
unsigned int pool_count;
ObjPoolCBAlloc alloc;
ObjPoolCBReset reset;
ObjPoolCBFree free;
};
ObjPool *objpool_alloc(ObjPoolCBAlloc cb_alloc, ObjPoolCBReset cb_reset,
ObjPoolCBFree cb_free)
{
ObjPool *op = av_mallocz(sizeof(*op));
if (!op)
return NULL;
op->alloc = cb_alloc;
op->reset = cb_reset;
op->free = cb_free;
return op;
}
void objpool_free(ObjPool **pop)
{
ObjPool *op = *pop;
if (!op)
return;
for (unsigned int i = 0; i < op->pool_count; i++)
op->free(&op->pool[i]);
av_freep(pop);
}
int objpool_get(ObjPool *op, void **obj)
{
if (op->pool_count) {
*obj = op->pool[--op->pool_count];
op->pool[op->pool_count] = NULL;
} else
*obj = op->alloc();
return *obj ? 0 : AVERROR(ENOMEM);
}
void objpool_release(ObjPool *op, void **obj)
{
if (!*obj)
return;
op->reset(*obj);
if (op->pool_count < FF_ARRAY_ELEMS(op->pool))
op->pool[op->pool_count++] = *obj;
else
op->free(obj);
*obj = NULL;
}
static void *alloc_packet(void)
{
return av_packet_alloc();
}
static void *alloc_frame(void)
{
return av_frame_alloc();
}
static void reset_packet(void *obj)
{
av_packet_unref(obj);
}
static void reset_frame(void *obj)
{
av_frame_unref(obj);
}
static void free_packet(void **obj)
{
AVPacket *pkt = *obj;
av_packet_free(&pkt);
*obj = NULL;
}
static void free_frame(void **obj)
{
AVFrame *frame = *obj;
av_frame_free(&frame);
*obj = NULL;
}
ObjPool *objpool_alloc_packets(void)
{
return objpool_alloc(alloc_packet, reset_packet, free_packet);
}
ObjPool *objpool_alloc_frames(void)
{
return objpool_alloc(alloc_frame, reset_frame, free_frame);
}

@ -0,0 +1,50 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of objpool.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*/
#ifndef FFTOOLS_OBJPOOL_H
#define FFTOOLS_OBJPOOL_H
typedef struct ObjPool ObjPool;
typedef void* (*ObjPoolCBAlloc)(void);
typedef void (*ObjPoolCBReset)(void *);
typedef void (*ObjPoolCBFree)(void **);
void objpool_free(ObjPool **op);
ObjPool *objpool_alloc(ObjPoolCBAlloc cb_alloc, ObjPoolCBReset cb_reset,
ObjPoolCBFree cb_free);
ObjPool *objpool_alloc_packets(void);
ObjPool *objpool_alloc_frames(void);
int objpool_get(ObjPool *op, void **obj);
void objpool_release(ObjPool *op, void **obj);
#endif // FFTOOLS_OBJPOOL_H

@ -1,6 +1,7 @@
/* /*
* Option handlers shared between the tools. * Option handlers shared between the tools.
* Copyright (c) 2022 Taner Sener * Copyright (c) 2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -24,6 +25,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop the ffmpeg-kit library. * by us to develop the ffmpeg-kit library.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - time field in report_callback updated as double
*
* ffmpeg-kit changes by Taner Sener * ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -88,7 +95,7 @@ static __thread FILE *report_file = NULL;
static __thread int report_file_level = AV_LOG_DEBUG; static __thread int report_file_level = AV_LOG_DEBUG;
extern void ffmpegkit_log_callback_function(void *ptr, int level, const char* format, va_list vargs); extern void ffmpegkit_log_callback_function(void *ptr, int level, const char* format, va_list vargs);
extern void (*report_callback)(int, float, float, int64_t, int, double, double); extern void (*report_callback)(int, float, float, int64_t, double, double, double);
extern __thread char *program_name; extern __thread char *program_name;
int show_license(void *optctx, const char *opt, const char *arg) int show_license(void *optctx, const char *opt, const char *arg)

@ -0,0 +1,462 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of sync_queue.c file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*/
#include <stdint.h>
#include <string.h>
#include "libavutil/avassert.h"
#include "libavutil/error.h"
#include "libavutil/fifo.h"
#include "libavutil/mathematics.h"
#include "libavutil/mem.h"
#include "fftools_objpool.h"
#include "fftools_sync_queue.h"
typedef struct SyncQueueStream {
AVFifo *fifo;
AVRational tb;
/* stream head: largest timestamp seen */
int64_t head_ts;
int limiting;
/* no more frames will be sent for this stream */
int finished;
uint64_t frames_sent;
uint64_t frames_max;
} SyncQueueStream;
struct SyncQueue {
enum SyncQueueType type;
/* no more frames will be sent for any stream */
int finished;
/* sync head: the stream with the _smallest_ head timestamp
* this stream determines which frames can be output */
int head_stream;
/* the finished stream with the smallest finish timestamp or -1 */
int head_finished_stream;
// maximum buffering duration in microseconds
int64_t buf_size_us;
SyncQueueStream *streams;
unsigned int nb_streams;
// pool of preallocated frames to avoid constant allocations
ObjPool *pool;
};
static void frame_move(const SyncQueue *sq, SyncQueueFrame dst,
SyncQueueFrame src)
{
if (sq->type == SYNC_QUEUE_PACKETS)
av_packet_move_ref(dst.p, src.p);
else
av_frame_move_ref(dst.f, src.f);
}
static int64_t frame_ts(const SyncQueue *sq, SyncQueueFrame frame)
{
return (sq->type == SYNC_QUEUE_PACKETS) ?
frame.p->pts + frame.p->duration :
frame.f->pts + frame.f->duration;
}
static int frame_null(const SyncQueue *sq, SyncQueueFrame frame)
{
return (sq->type == SYNC_QUEUE_PACKETS) ? (frame.p == NULL) : (frame.f == NULL);
}
static void finish_stream(SyncQueue *sq, unsigned int stream_idx)
{
SyncQueueStream *st = &sq->streams[stream_idx];
st->finished = 1;
if (st->limiting && st->head_ts != AV_NOPTS_VALUE) {
/* check if this stream is the new finished head */
if (sq->head_finished_stream < 0 ||
av_compare_ts(st->head_ts, st->tb,
sq->streams[sq->head_finished_stream].head_ts,
sq->streams[sq->head_finished_stream].tb) < 0) {
sq->head_finished_stream = stream_idx;
}
/* mark as finished all streams that should no longer receive new frames,
* due to them being ahead of some finished stream */
st = &sq->streams[sq->head_finished_stream];
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st1 = &sq->streams[i];
if (st != st1 && st1->head_ts != AV_NOPTS_VALUE &&
av_compare_ts(st->head_ts, st->tb, st1->head_ts, st1->tb) <= 0)
st1->finished = 1;
}
}
/* mark the whole queue as finished if all streams are finished */
for (unsigned int i = 0; i < sq->nb_streams; i++) {
if (!sq->streams[i].finished)
return;
}
sq->finished = 1;
}
static void queue_head_update(SyncQueue *sq)
{
if (sq->head_stream < 0) {
/* wait for one timestamp in each stream before determining
* the queue head */
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st = &sq->streams[i];
if (st->limiting && st->head_ts == AV_NOPTS_VALUE)
return;
}
// placeholder value, correct one will be found below
sq->head_stream = 0;
}
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st_head = &sq->streams[sq->head_stream];
SyncQueueStream *st_other = &sq->streams[i];
if (st_other->limiting && st_other->head_ts != AV_NOPTS_VALUE &&
av_compare_ts(st_other->head_ts, st_other->tb,
st_head->head_ts, st_head->tb) < 0)
sq->head_stream = i;
}
}
/* update this stream's head timestamp */
static void stream_update_ts(SyncQueue *sq, unsigned int stream_idx, int64_t ts)
{
SyncQueueStream *st = &sq->streams[stream_idx];
if (ts == AV_NOPTS_VALUE ||
(st->head_ts != AV_NOPTS_VALUE && st->head_ts >= ts))
return;
st->head_ts = ts;
/* if this stream is now ahead of some finished stream, then
* this stream is also finished */
if (sq->head_finished_stream >= 0 &&
av_compare_ts(sq->streams[sq->head_finished_stream].head_ts,
sq->streams[sq->head_finished_stream].tb,
ts, st->tb) <= 0)
finish_stream(sq, stream_idx);
/* update the overall head timestamp if it could have changed */
if (st->limiting &&
(sq->head_stream < 0 || sq->head_stream == stream_idx))
queue_head_update(sq);
}
/* If the queue for the given stream (or all streams when stream_idx=-1)
* is overflowing, trigger a fake heartbeat on lagging streams.
*
* @return 1 if heartbeat triggered, 0 otherwise
*/
static int overflow_heartbeat(SyncQueue *sq, int stream_idx)
{
SyncQueueStream *st;
SyncQueueFrame frame;
int64_t tail_ts = AV_NOPTS_VALUE;
/* if no stream specified, pick the one that is most ahead */
if (stream_idx < 0) {
int64_t ts = AV_NOPTS_VALUE;
for (int i = 0; i < sq->nb_streams; i++) {
st = &sq->streams[i];
if (st->head_ts != AV_NOPTS_VALUE &&
(ts == AV_NOPTS_VALUE ||
av_compare_ts(ts, sq->streams[stream_idx].tb,
st->head_ts, st->tb) < 0)) {
ts = st->head_ts;
stream_idx = i;
}
}
/* no stream has a timestamp yet -> nothing to do */
if (stream_idx < 0)
return 0;
}
st = &sq->streams[stream_idx];
/* get the chosen stream's tail timestamp */
for (size_t i = 0; tail_ts == AV_NOPTS_VALUE &&
av_fifo_peek(st->fifo, &frame, 1, i) >= 0; i++)
tail_ts = frame_ts(sq, frame);
/* overflow triggers when the tail is over specified duration behind the head */
if (tail_ts == AV_NOPTS_VALUE || tail_ts >= st->head_ts ||
av_rescale_q(st->head_ts - tail_ts, st->tb, AV_TIME_BASE_Q) < sq->buf_size_us)
return 0;
/* signal a fake timestamp for all streams that prevent tail_ts from being output */
tail_ts++;
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st1 = &sq->streams[i];
int64_t ts;
if (st == st1 || st1->finished ||
(st1->head_ts != AV_NOPTS_VALUE &&
av_compare_ts(tail_ts, st->tb, st1->head_ts, st1->tb) <= 0))
continue;
ts = av_rescale_q(tail_ts, st->tb, st1->tb);
if (st1->head_ts != AV_NOPTS_VALUE)
ts = FFMAX(st1->head_ts + 1, ts);
stream_update_ts(sq, i, ts);
}
return 1;
}
int sq_send(SyncQueue *sq, unsigned int stream_idx, SyncQueueFrame frame)
{
SyncQueueStream *st;
SyncQueueFrame dst;
int64_t ts;
int ret;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
av_assert0(st->tb.num > 0 && st->tb.den > 0);
if (frame_null(sq, frame)) {
finish_stream(sq, stream_idx);
return 0;
}
if (st->finished)
return AVERROR_EOF;
ret = objpool_get(sq->pool, (void**)&dst);
if (ret < 0)
return ret;
frame_move(sq, dst, frame);
ts = frame_ts(sq, dst);
ret = av_fifo_write(st->fifo, &dst, 1);
if (ret < 0) {
frame_move(sq, frame, dst);
objpool_release(sq->pool, (void**)&dst);
return ret;
}
stream_update_ts(sq, stream_idx, ts);
st->frames_sent++;
if (st->frames_sent >= st->frames_max)
finish_stream(sq, stream_idx);
return 0;
}
static int receive_for_stream(SyncQueue *sq, unsigned int stream_idx,
SyncQueueFrame frame)
{
SyncQueueStream *st_head = sq->head_stream >= 0 ?
&sq->streams[sq->head_stream] : NULL;
SyncQueueStream *st;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
if (av_fifo_can_read(st->fifo)) {
SyncQueueFrame peek;
int64_t ts;
int cmp = 1;
av_fifo_peek(st->fifo, &peek, 1, 0);
ts = frame_ts(sq, peek);
/* check if this stream's tail timestamp does not overtake
* the overall queue head */
if (ts != AV_NOPTS_VALUE && st_head)
cmp = av_compare_ts(ts, st->tb, st_head->head_ts, st_head->tb);
/* We can release frames that do not end after the queue head.
* Frames with no timestamps are just passed through with no conditions.
*/
if (cmp <= 0 || ts == AV_NOPTS_VALUE) {
frame_move(sq, frame, peek);
objpool_release(sq->pool, (void**)&peek);
av_fifo_drain2(st->fifo, 1);
return 0;
}
}
return (sq->finished || (st->finished && !av_fifo_can_read(st->fifo))) ?
AVERROR_EOF : AVERROR(EAGAIN);
}
static int receive_internal(SyncQueue *sq, int stream_idx, SyncQueueFrame frame)
{
int nb_eof = 0;
int ret;
/* read a frame for a specific stream */
if (stream_idx >= 0) {
ret = receive_for_stream(sq, stream_idx, frame);
return (ret < 0) ? ret : stream_idx;
}
/* read a frame for any stream with available output */
for (unsigned int i = 0; i < sq->nb_streams; i++) {
ret = receive_for_stream(sq, i, frame);
if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) {
nb_eof += (ret == AVERROR_EOF);
continue;
}
return (ret < 0) ? ret : i;
}
return (nb_eof == sq->nb_streams) ? AVERROR_EOF : AVERROR(EAGAIN);
}
int sq_receive(SyncQueue *sq, int stream_idx, SyncQueueFrame frame)
{
int ret = receive_internal(sq, stream_idx, frame);
/* try again if the queue overflowed and triggered a fake heartbeat
* for lagging streams */
if (ret == AVERROR(EAGAIN) && overflow_heartbeat(sq, stream_idx))
ret = receive_internal(sq, stream_idx, frame);
return ret;
}
int sq_add_stream(SyncQueue *sq, int limiting)
{
SyncQueueStream *tmp, *st;
tmp = av_realloc_array(sq->streams, sq->nb_streams + 1, sizeof(*sq->streams));
if (!tmp)
return AVERROR(ENOMEM);
sq->streams = tmp;
st = &sq->streams[sq->nb_streams];
memset(st, 0, sizeof(*st));
st->fifo = av_fifo_alloc2(1, sizeof(SyncQueueFrame), AV_FIFO_FLAG_AUTO_GROW);
if (!st->fifo)
return AVERROR(ENOMEM);
/* we set a valid default, so that a pathological stream that never
* receives even a real timebase (and no frames) won't stall all other
* streams forever; cf. overflow_heartbeat() */
st->tb = (AVRational){ 1, 1 };
st->head_ts = AV_NOPTS_VALUE;
st->frames_max = UINT64_MAX;
st->limiting = limiting;
return sq->nb_streams++;
}
void sq_set_tb(SyncQueue *sq, unsigned int stream_idx, AVRational tb)
{
SyncQueueStream *st;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
av_assert0(!av_fifo_can_read(st->fifo));
if (st->head_ts != AV_NOPTS_VALUE)
st->head_ts = av_rescale_q(st->head_ts, st->tb, tb);
st->tb = tb;
}
void sq_limit_frames(SyncQueue *sq, unsigned int stream_idx, uint64_t frames)
{
SyncQueueStream *st;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
st->frames_max = frames;
if (st->frames_sent >= st->frames_max)
finish_stream(sq, stream_idx);
}
SyncQueue *sq_alloc(enum SyncQueueType type, int64_t buf_size_us)
{
SyncQueue *sq = av_mallocz(sizeof(*sq));
if (!sq)
return NULL;
sq->type = type;
sq->buf_size_us = buf_size_us;
sq->head_stream = -1;
sq->head_finished_stream = -1;
sq->pool = (type == SYNC_QUEUE_PACKETS) ? objpool_alloc_packets() :
objpool_alloc_frames();
if (!sq->pool) {
av_freep(&sq);
return NULL;
}
return sq;
}
void sq_free(SyncQueue **psq)
{
SyncQueue *sq = *psq;
if (!sq)
return;
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueFrame frame;
while (av_fifo_read(sq->streams[i].fifo, &frame, 1) >= 0)
objpool_release(sq->pool, (void**)&frame);
av_fifo_freep2(&sq->streams[i].fifo);
}
av_freep(&sq->streams);
objpool_free(&sq->pool);
av_freep(psq);
}

@ -0,0 +1,122 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of sync_queue.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*/
#ifndef FFTOOLS_SYNC_QUEUE_H
#define FFTOOLS_SYNC_QUEUE_H
#include <stdint.h>
#include "libavcodec/packet.h"
#include "libavutil/frame.h"
enum SyncQueueType {
SYNC_QUEUE_PACKETS,
SYNC_QUEUE_FRAMES,
};
typedef union SyncQueueFrame {
AVFrame *f;
AVPacket *p;
} SyncQueueFrame;
#define SQFRAME(frame) ((SyncQueueFrame){ .f = (frame) })
#define SQPKT(pkt) ((SyncQueueFrame){ .p = (pkt) })
typedef struct SyncQueue SyncQueue;
/**
* Allocate a sync queue of the given type.
*
* @param buf_size_us maximum duration that will be buffered in microseconds
*/
SyncQueue *sq_alloc(enum SyncQueueType type, int64_t buf_size_us);
void sq_free(SyncQueue **sq);
/**
* Add a new stream to the sync queue.
*
* @param limiting whether the stream is limiting, i.e. no other stream can be
* longer than this one
* @return
* - a non-negative stream index on success
* - a negative error code on error
*/
int sq_add_stream(SyncQueue *sq, int limiting);
/**
* Set the timebase for the stream with index stream_idx. Should be called
* before sending any frames for this stream.
*/
void sq_set_tb(SyncQueue *sq, unsigned int stream_idx, AVRational tb);
/**
* Limit the number of output frames for stream with index stream_idx
* to max_frames.
*/
void sq_limit_frames(SyncQueue *sq, unsigned int stream_idx,
uint64_t max_frames);
/**
* Submit a frame for the stream with index stream_idx.
*
* On success, the sync queue takes ownership of the frame and will reset the
* contents of the supplied frame. On failure, the frame remains owned by the
* caller.
*
* Sending a frame with NULL contents marks the stream as finished.
*
* @return
* - 0 on success
* - AVERROR_EOF when no more frames should be submitted for this stream
* - another a negative error code on failure
*/
int sq_send(SyncQueue *sq, unsigned int stream_idx, SyncQueueFrame frame);
/**
* Read a frame from the queue.
*
* @param stream_idx index of the stream to read a frame for. May be -1, then
* try to read a frame from any stream that is ready for
* output.
* @param frame output frame will be written here on success. The frame is owned
* by the caller.
*
* @return
* - a non-negative index of the stream to which the returned frame belongs
* - AVERROR(EAGAIN) when more frames need to be submitted to the queue
* - AVERROR_EOF when no more frames will be available for this stream (for any
* stream if stream_idx is -1)
* - another negative error code on failure
*/
int sq_receive(SyncQueue *sq, int stream_idx, SyncQueueFrame frame);
#endif // FFTOOLS_SYNC_QUEUE_H

@ -0,0 +1,259 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of thread_queue.c file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*/
#include <stdint.h>
#include <string.h>
#include "libavutil/avassert.h"
#include "libavutil/error.h"
#include "libavutil/fifo.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/mem.h"
#include "libavutil/thread.h"
#include "fftools_objpool.h"
#include "fftools_thread_queue.h"
enum {
FINISHED_SEND = (1 << 0),
FINISHED_RECV = (1 << 1),
};
typedef struct FifoElem {
void *obj;
unsigned int stream_idx;
} FifoElem;
struct ThreadQueue {
int *finished;
unsigned int nb_streams;
AVFifo *fifo;
ObjPool *obj_pool;
void (*obj_move)(void *dst, void *src);
pthread_mutex_t lock;
pthread_cond_t cond;
};
void tq_free(ThreadQueue **ptq)
{
ThreadQueue *tq = *ptq;
if (!tq)
return;
if (tq->fifo) {
FifoElem elem;
while (av_fifo_read(tq->fifo, &elem, 1) >= 0)
objpool_release(tq->obj_pool, &elem.obj);
}
av_fifo_freep2(&tq->fifo);
objpool_free(&tq->obj_pool);
av_freep(&tq->finished);
pthread_cond_destroy(&tq->cond);
pthread_mutex_destroy(&tq->lock);
av_freep(ptq);
}
ThreadQueue *tq_alloc(unsigned int nb_streams, size_t queue_size,
ObjPool *obj_pool, void (*obj_move)(void *dst, void *src))
{
ThreadQueue *tq;
int ret;
tq = av_mallocz(sizeof(*tq));
if (!tq)
return NULL;
ret = pthread_cond_init(&tq->cond, NULL);
if (ret) {
av_freep(&tq);
return NULL;
}
ret = pthread_mutex_init(&tq->lock, NULL);
if (ret) {
pthread_cond_destroy(&tq->cond);
av_freep(&tq);
return NULL;
}
tq->finished = av_calloc(nb_streams, sizeof(*tq->finished));
if (!tq->finished)
goto fail;
tq->nb_streams = nb_streams;
tq->fifo = av_fifo_alloc2(queue_size, sizeof(FifoElem), 0);
if (!tq->fifo)
goto fail;
tq->obj_pool = obj_pool;
tq->obj_move = obj_move;
return tq;
fail:
tq_free(&tq);
return NULL;
}
int tq_send(ThreadQueue *tq, unsigned int stream_idx, void *data)
{
int *finished;
int ret;
av_assert0(stream_idx < tq->nb_streams);
finished = &tq->finished[stream_idx];
pthread_mutex_lock(&tq->lock);
if (*finished & FINISHED_SEND) {
ret = AVERROR(EINVAL);
goto finish;
}
while (!(*finished & FINISHED_RECV) && !av_fifo_can_write(tq->fifo))
pthread_cond_wait(&tq->cond, &tq->lock);
if (*finished & FINISHED_RECV) {
ret = AVERROR_EOF;
*finished |= FINISHED_SEND;
} else {
FifoElem elem = { .stream_idx = stream_idx };
ret = objpool_get(tq->obj_pool, &elem.obj);
if (ret < 0)
goto finish;
tq->obj_move(elem.obj, data);
ret = av_fifo_write(tq->fifo, &elem, 1);
av_assert0(ret >= 0);
pthread_cond_broadcast(&tq->cond);
}
finish:
pthread_mutex_unlock(&tq->lock);
return ret;
}
static int receive_locked(ThreadQueue *tq, int *stream_idx,
void *data)
{
FifoElem elem;
unsigned int nb_finished = 0;
if (av_fifo_read(tq->fifo, &elem, 1) >= 0) {
tq->obj_move(data, elem.obj);
objpool_release(tq->obj_pool, &elem.obj);
*stream_idx = elem.stream_idx;
return 0;
}
for (unsigned int i = 0; i < tq->nb_streams; i++) {
if (!(tq->finished[i] & FINISHED_SEND))
continue;
/* return EOF to the consumer at most once for each stream */
if (!(tq->finished[i] & FINISHED_RECV)) {
tq->finished[i] |= FINISHED_RECV;
*stream_idx = i;
return AVERROR_EOF;
}
nb_finished++;
}
return nb_finished == tq->nb_streams ? AVERROR_EOF : AVERROR(EAGAIN);
}
int tq_receive(ThreadQueue *tq, int *stream_idx, void *data)
{
int ret;
*stream_idx = -1;
pthread_mutex_lock(&tq->lock);
while (1) {
ret = receive_locked(tq, stream_idx, data);
if (ret == AVERROR(EAGAIN)) {
pthread_cond_wait(&tq->cond, &tq->lock);
continue;
}
break;
}
if (ret == 0)
pthread_cond_broadcast(&tq->cond);
pthread_mutex_unlock(&tq->lock);
return ret;
}
void tq_send_finish(ThreadQueue *tq, unsigned int stream_idx)
{
av_assert0(stream_idx < tq->nb_streams);
pthread_mutex_lock(&tq->lock);
/* mark the stream as send-finished;
* next time the consumer thread tries to read this stream it will get
* an EOF and recv-finished flag will be set */
tq->finished[stream_idx] |= FINISHED_SEND;
pthread_cond_broadcast(&tq->cond);
pthread_mutex_unlock(&tq->lock);
}
void tq_receive_finish(ThreadQueue *tq, unsigned int stream_idx)
{
av_assert0(stream_idx < tq->nb_streams);
pthread_mutex_lock(&tq->lock);
/* mark the stream as recv-finished;
* next time the producer thread tries to send for this stream, it will
* get an EOF and send-finished flag will be set */
tq->finished[stream_idx] |= FINISHED_RECV;
pthread_cond_broadcast(&tq->cond);
pthread_mutex_unlock(&tq->lock);
}

@ -0,0 +1,94 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of thread_queue.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*/
#ifndef FFTOOLS_THREAD_QUEUE_H
#define FFTOOLS_THREAD_QUEUE_H
#include <string.h>
#include "fftools_objpool.h"
typedef struct ThreadQueue ThreadQueue;
/**
* Allocate a queue for sending data between threads.
*
* @param nb_streams number of streams for which a distinct EOF state is
* maintained
* @param queue_size number of items that can be stored in the queue without
* blocking
* @param obj_pool object pool that will be used to allocate items stored in the
* queue; the pool becomes owned by the queue
* @param callback that moves the contents between two data pointers
*/
ThreadQueue *tq_alloc(unsigned int nb_streams, size_t queue_size,
ObjPool *obj_pool, void (*obj_move)(void *dst, void *src));
void tq_free(ThreadQueue **tq);
/**
* Send an item for the given stream to the queue.
*
* @param data the item to send, its contents will be moved using the callback
* provided to tq_alloc(); on failure the item will be left
* untouched
* @return
* - 0 the item was successfully sent
* - AVERROR(ENOMEM) could not allocate an item for writing to the FIFO
* - AVERROR(EINVAL) the sending side has previously been marked as finished
* - AVERROR_EOF the receiving side has marked the given stream as finished
*/
int tq_send(ThreadQueue *tq, unsigned int stream_idx, void *data);
/**
* Mark the given stream finished from the sending side.
*/
void tq_send_finish(ThreadQueue *tq, unsigned int stream_idx);
/**
* Read the next item from the queue.
*
* @param stream_idx the index of the stream that was processed or -1 will be
* written here
* @param data the data item will be written here on success using the
* callback provided to tq_alloc()
* @return
* - 0 a data item was successfully read; *stream_idx contains a non-negative
* stream index
* - AVERROR_EOF When *stream_idx is non-negative, this signals that the sending
* side has marked the given stream as finished. This will happen at most once
* for each stream. When *stream_idx is -1, all streams are done.
*/
int tq_receive(ThreadQueue *tq, int *stream_idx, void *data);
/**
* Mark the given stream finished from the receiving side.
*/
void tq_receive_finish(ThreadQueue *tq, unsigned int stream_idx);
#endif // FFTOOLS_THREAD_QUEUE_H

@ -334,7 +334,7 @@ public class FFmpegKitConfig {
*/ */
private static void statistics(final long sessionId, final int videoFrameNumber, private static void statistics(final long sessionId, final int videoFrameNumber,
final float videoFps, final float videoQuality, final long size, final float videoFps, final float videoQuality, final long size,
final int time, final double bitrate, final double speed) { final double time, final double bitrate, final double speed) {
final Statistics statistics = new Statistics(sessionId, videoFrameNumber, videoFps, videoQuality, size, time, bitrate, speed); final Statistics statistics = new Statistics(sessionId, videoFrameNumber, videoFps, videoQuality, size, time, bitrate, speed);
final Session session = getSession(sessionId); final Session session = getSession(sessionId);

@ -85,7 +85,7 @@ public class NativeLoader {
} }
static String loadVersion() { static String loadVersion() {
final String version = "5.1"; final String version = "6.0";
if (isTestModeDisabled()) { if (isTestModeDisabled()) {
return FFmpegKitConfig.getVersion(); return FFmpegKitConfig.getVersion();

@ -28,11 +28,11 @@ public class Statistics {
private float videoFps; private float videoFps;
private float videoQuality; private float videoQuality;
private long size; private long size;
private int time; private double time;
private double bitrate; private double bitrate;
private double speed; private double speed;
public Statistics(final long sessionId, final int videoFrameNumber, final float videoFps, final float videoQuality, final long size, final int time, final double bitrate, final double speed) { public Statistics(final long sessionId, final int videoFrameNumber, final float videoFps, final float videoQuality, final long size, final double time, final double bitrate, final double speed) {
this.sessionId = sessionId; this.sessionId = sessionId;
this.videoFrameNumber = videoFrameNumber; this.videoFrameNumber = videoFrameNumber;
this.videoFps = videoFps; this.videoFps = videoFps;
@ -83,11 +83,11 @@ public class Statistics {
this.size = size; this.size = size;
} }
public int getTime() { public double getTime() {
return time; return time;
} }
public void setTime(int time) { public void setTime(double time) {
this.time = time; this.time = time;
} }

@ -69,7 +69,7 @@ include $(BUILD_SHARED_LIBRARY)
$(call import-module, cpu-features) $(call import-module, cpu-features)
MY_SRC_FILES := ffmpegkit.c ffprobekit.c ffmpegkit_exception.c fftools_cmdutils.c fftools_ffmpeg.c fftools_ffprobe.c fftools_ffmpeg_mux.c fftools_ffmpeg_opt.c fftools_opt_common.c fftools_ffmpeg_hw.c fftools_ffmpeg_filter.c MY_SRC_FILES := ffmpegkit.c ffprobekit.c ffmpegkit_exception.c fftools_cmdutils.c fftools_ffmpeg.c fftools_ffprobe.c fftools_ffmpeg_mux.c fftools_ffmpeg_mux_init.c fftools_ffmpeg_demux.c fftools_ffmpeg_opt.c fftools_opt_common.c fftools_ffmpeg_hw.c fftools_ffmpeg_filter.c fftools_objpool.c fftools_sync_queue.c fftools_thread_queue.c
ifeq ($(TARGET_PLATFORM),android-16) ifeq ($(TARGET_PLATFORM),android-16)
MY_SRC_FILES += android_lts_support.c MY_SRC_FILES += android_lts_support.c

@ -38,7 +38,7 @@ PROJECT_NAME = "FFmpegKit iOS / macOS / tvOS API"
# could be handy for archiving the generated documentation or if some version # could be handy for archiving the generated documentation or if some version
# control system is used. # control system is used.
PROJECT_NUMBER = 5.1 PROJECT_NUMBER = 6.0
# Using the PROJECT_BRIEF tag one can provide an optional one line description # Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a # for a project that appears at the top of each page and should give viewer a

@ -111,17 +111,17 @@ All libraries created can be found under the `prebuilt` directory.
- iOS - iOS
```yaml ```yaml
pod 'ffmpeg-kit-ios-full', '~> 5.1' pod 'ffmpeg-kit-ios-full', '~> 6.0'
``` ```
- macOS - macOS
```yaml ```yaml
pod 'ffmpeg-kit-macos-full', '~> 5.1' pod 'ffmpeg-kit-macos-full', '~> 6.0'
``` ```
- tvOS - tvOS
```yaml ```yaml
pod 'ffmpeg-kit-tvos-full', '~> 5.1' pod 'ffmpeg-kit-tvos-full', '~> 6.0'
``` ```
2. Execute synchronous `FFmpeg` commands. 2. Execute synchronous `FFmpeg` commands.

@ -1,6 +1,6 @@
# ffmpeg-kit 5.1 configure.ac # ffmpeg-kit 6.0 configure.ac
AC_INIT([ffmpeg-kit], [5.1], [https://github.com/arthenica/ffmpeg-kit/issues/new]) AC_INIT([ffmpeg-kit], [6.0], [https://github.com/arthenica/ffmpeg-kit/issues/new])
AC_CONFIG_MACRO_DIR([m4]) AC_CONFIG_MACRO_DIR([m4])
AC_CONFIG_SRCDIR([src/FFmpegKit.m]) AC_CONFIG_SRCDIR([src/FFmpegKit.m])

@ -36,7 +36,7 @@
#import "SessionState.h" #import "SessionState.h"
/** Global library version */ /** Global library version */
NSString* const FFmpegKitVersion = @"5.1"; NSString* const FFmpegKitVersion = @"6.0";
/** /**
* Prefix of named pipes created by ffmpeg-kit. * Prefix of named pipes created by ffmpeg-kit.
@ -150,7 +150,7 @@ void addSessionToSessionHistory(id<Session> session) {
float _statisticsFps; // statistics fps float _statisticsFps; // statistics fps
float _statisticsQuality; // statistics quality float _statisticsQuality; // statistics quality
int64_t _statisticsSize; // statistics size int64_t _statisticsSize; // statistics size
int _statisticsTime; // statistics time double _statisticsTime; // statistics time
double _statisticsBitrate; // statistics bitrate double _statisticsBitrate; // statistics bitrate
double _statisticsSpeed; // statistics speed double _statisticsSpeed; // statistics speed
} }
@ -173,7 +173,7 @@ void addSessionToSessionHistory(id<Session> session) {
fps:(float)videoFps fps:(float)videoFps
quality:(float)videoQuality quality:(float)videoQuality
size:(int64_t)size size:(int64_t)size
time:(int)time time:(double)time
bitrate:(double)bitrate bitrate:(double)bitrate
speed:(double)speed { speed:(double)speed {
self = [super init]; self = [super init];
@ -224,7 +224,7 @@ void addSessionToSessionHistory(id<Session> session) {
return _statisticsSize; return _statisticsSize;
} }
- (int)getStatisticsTime { - (double)getStatisticsTime {
return _statisticsTime; return _statisticsTime;
} }
@ -340,7 +340,7 @@ void logCallbackDataAdd(int level, AVBPrint *data) {
/** /**
* Adds statistics data to the end of callback data list. * Adds statistics data to the end of callback data list.
*/ */
void statisticsCallbackDataAdd(int frameNumber, float fps, float quality, int64_t size, int time, double bitrate, double speed) { void statisticsCallbackDataAdd(int frameNumber, float fps, float quality, int64_t size, double time, double bitrate, double speed) {
CallbackData *callbackData = [[CallbackData alloc] init:globalSessionId videoFrameNumber:frameNumber fps:fps quality:quality size:size time:time bitrate:bitrate speed:speed]; CallbackData *callbackData = [[CallbackData alloc] init:globalSessionId videoFrameNumber:frameNumber fps:fps quality:quality size:size time:time bitrate:bitrate speed:speed];
[lock lock]; [lock lock];
@ -481,7 +481,7 @@ void ffmpegkit_log_callback_function(void *ptr, int level, const char* format, v
* @param bitrate output bit rate in kbits/s * @param bitrate output bit rate in kbits/s
* @param speed processing speed = processed duration / operation duration * @param speed processing speed = processed duration / operation duration
*/ */
void ffmpegkit_statistics_callback_function(int frameNumber, float fps, float quality, int64_t size, int time, double bitrate, double speed) { void ffmpegkit_statistics_callback_function(int frameNumber, float fps, float quality, int64_t size, double time, double bitrate, double speed) {
statisticsCallbackDataAdd(frameNumber, fps, quality, size, time, bitrate, speed); statisticsCallbackDataAdd(frameNumber, fps, quality, size, time, bitrate, speed);
} }
@ -575,7 +575,7 @@ void process_log(long sessionId, int levelValue, AVBPrint* logMessage) {
} }
} }
void process_statistics(long sessionId, int videoFrameNumber, float videoFps, float videoQuality, long size, int time, double bitrate, double speed) { void process_statistics(long sessionId, int videoFrameNumber, float videoFps, float videoQuality, long size, double time, double bitrate, double speed) {
Statistics *statistics = [[Statistics alloc] init:sessionId videoFrameNumber:videoFrameNumber videoFps:videoFps videoQuality:videoQuality size:size time:time bitrate:bitrate speed:speed]; Statistics *statistics = [[Statistics alloc] init:sessionId videoFrameNumber:videoFrameNumber videoFps:videoFps videoQuality:videoQuality size:size time:time bitrate:bitrate speed:speed];

@ -23,12 +23,17 @@ libffmpegkit_la_SOURCES = \
ffmpegkit_exception.m \ ffmpegkit_exception.m \
fftools_cmdutils.c \ fftools_cmdutils.c \
fftools_ffmpeg.c \ fftools_ffmpeg.c \
fftools_ffmpeg_demux.c \
fftools_ffmpeg_filter.c \ fftools_ffmpeg_filter.c \
fftools_ffmpeg_hw.c \ fftools_ffmpeg_hw.c \
fftools_ffmpeg_mux.c \ fftools_ffmpeg_mux.c \
fftools_ffmpeg_mux_init.c \
fftools_ffmpeg_opt.c \ fftools_ffmpeg_opt.c \
fftools_ffprobe.c \ fftools_ffprobe.c \
fftools_opt_common.c fftools_objpool.c \
fftools_opt_common.c \
fftools_sync_queue.c \
fftools_thread_queue.c
include_HEADERS = \ include_HEADERS = \
AbstractSession.h \ AbstractSession.h \
@ -60,8 +65,12 @@ include_HEADERS = \
ffmpegkit_exception.h \ ffmpegkit_exception.h \
fftools_cmdutils.h \ fftools_cmdutils.h \
fftools_ffmpeg.h \ fftools_ffmpeg.h \
fftools_ffmpeg_mux.h \
fftools_fopen_utf8.h \ fftools_fopen_utf8.h \
fftools_opt_common.h fftools_objpool.h \
fftools_opt_common.h \
fftools_sync_queue.h \
fftools_thread_queue.h
libffmpegkit_la_CFLAGS = $(CFLAGS) libffmpegkit_la_CFLAGS = $(CFLAGS)
libffmpegkit_la_OBJCFLAGS = $(CFLAGS) libffmpegkit_la_OBJCFLAGS = $(CFLAGS)

@ -147,12 +147,17 @@ am_libffmpegkit_la_OBJECTS = libffmpegkit_la-AbstractSession.lo \
libffmpegkit_la-ffmpegkit_exception.lo \ libffmpegkit_la-ffmpegkit_exception.lo \
libffmpegkit_la-fftools_cmdutils.lo \ libffmpegkit_la-fftools_cmdutils.lo \
libffmpegkit_la-fftools_ffmpeg.lo \ libffmpegkit_la-fftools_ffmpeg.lo \
libffmpegkit_la-fftools_ffmpeg_demux.lo \
libffmpegkit_la-fftools_ffmpeg_filter.lo \ libffmpegkit_la-fftools_ffmpeg_filter.lo \
libffmpegkit_la-fftools_ffmpeg_hw.lo \ libffmpegkit_la-fftools_ffmpeg_hw.lo \
libffmpegkit_la-fftools_ffmpeg_mux.lo \ libffmpegkit_la-fftools_ffmpeg_mux.lo \
libffmpegkit_la-fftools_ffmpeg_mux_init.lo \
libffmpegkit_la-fftools_ffmpeg_opt.lo \ libffmpegkit_la-fftools_ffmpeg_opt.lo \
libffmpegkit_la-fftools_ffprobe.lo \ libffmpegkit_la-fftools_ffprobe.lo \
libffmpegkit_la-fftools_opt_common.lo libffmpegkit_la-fftools_objpool.lo \
libffmpegkit_la-fftools_opt_common.lo \
libffmpegkit_la-fftools_sync_queue.lo \
libffmpegkit_la-fftools_thread_queue.lo
libffmpegkit_la_OBJECTS = $(am_libffmpegkit_la_OBJECTS) libffmpegkit_la_OBJECTS = $(am_libffmpegkit_la_OBJECTS)
AM_V_lt = $(am__v_lt_@AM_V@) AM_V_lt = $(am__v_lt_@AM_V@)
am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
@ -197,12 +202,17 @@ am__depfiles_remade = ./$(DEPDIR)/libffmpegkit_la-AbstractSession.Plo \
./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo \ ./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo ./$(DEPDIR)/libffmpegkit_la-fftools_objpool.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Plo
am__mv = mv -f am__mv = mv -f
COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
@ -415,12 +425,17 @@ libffmpegkit_la_SOURCES = \
ffmpegkit_exception.m \ ffmpegkit_exception.m \
fftools_cmdutils.c \ fftools_cmdutils.c \
fftools_ffmpeg.c \ fftools_ffmpeg.c \
fftools_ffmpeg_demux.c \
fftools_ffmpeg_filter.c \ fftools_ffmpeg_filter.c \
fftools_ffmpeg_hw.c \ fftools_ffmpeg_hw.c \
fftools_ffmpeg_mux.c \ fftools_ffmpeg_mux.c \
fftools_ffmpeg_mux_init.c \
fftools_ffmpeg_opt.c \ fftools_ffmpeg_opt.c \
fftools_ffprobe.c \ fftools_ffprobe.c \
fftools_opt_common.c fftools_objpool.c \
fftools_opt_common.c \
fftools_sync_queue.c \
fftools_thread_queue.c
include_HEADERS = \ include_HEADERS = \
AbstractSession.h \ AbstractSession.h \
@ -452,8 +467,12 @@ include_HEADERS = \
ffmpegkit_exception.h \ ffmpegkit_exception.h \
fftools_cmdutils.h \ fftools_cmdutils.h \
fftools_ffmpeg.h \ fftools_ffmpeg.h \
fftools_ffmpeg_mux.h \
fftools_fopen_utf8.h \ fftools_fopen_utf8.h \
fftools_opt_common.h fftools_objpool.h \
fftools_opt_common.h \
fftools_sync_queue.h \
fftools_thread_queue.h
libffmpegkit_la_CFLAGS = $(CFLAGS) libffmpegkit_la_CFLAGS = $(CFLAGS)
libffmpegkit_la_OBJCFLAGS = $(CFLAGS) libffmpegkit_la_OBJCFLAGS = $(CFLAGS)
@ -557,12 +576,17 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_objpool.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Plo@am__quote@ # am--include-marker
$(am__depfiles_remade): $(am__depfiles_remade):
@$(MKDIR_P) $(@D) @$(MKDIR_P) $(@D)
@ -608,6 +632,13 @@ libffmpegkit_la-fftools_ffmpeg.lo: fftools_ffmpeg.c
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg.lo `test -f 'fftools_ffmpeg.c' || echo '$(srcdir)/'`fftools_ffmpeg.c @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg.lo `test -f 'fftools_ffmpeg.c' || echo '$(srcdir)/'`fftools_ffmpeg.c
libffmpegkit_la-fftools_ffmpeg_demux.lo: fftools_ffmpeg_demux.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_demux.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_demux.lo `test -f 'fftools_ffmpeg_demux.c' || echo '$(srcdir)/'`fftools_ffmpeg_demux.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='fftools_ffmpeg_demux.c' object='libffmpegkit_la-fftools_ffmpeg_demux.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg_demux.lo `test -f 'fftools_ffmpeg_demux.c' || echo '$(srcdir)/'`fftools_ffmpeg_demux.c
libffmpegkit_la-fftools_ffmpeg_filter.lo: fftools_ffmpeg_filter.c libffmpegkit_la-fftools_ffmpeg_filter.lo: fftools_ffmpeg_filter.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_filter.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_filter.lo `test -f 'fftools_ffmpeg_filter.c' || echo '$(srcdir)/'`fftools_ffmpeg_filter.c @am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_filter.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_filter.lo `test -f 'fftools_ffmpeg_filter.c' || echo '$(srcdir)/'`fftools_ffmpeg_filter.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo @am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo
@ -629,6 +660,13 @@ libffmpegkit_la-fftools_ffmpeg_mux.lo: fftools_ffmpeg_mux.c
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg_mux.lo `test -f 'fftools_ffmpeg_mux.c' || echo '$(srcdir)/'`fftools_ffmpeg_mux.c @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg_mux.lo `test -f 'fftools_ffmpeg_mux.c' || echo '$(srcdir)/'`fftools_ffmpeg_mux.c
libffmpegkit_la-fftools_ffmpeg_mux_init.lo: fftools_ffmpeg_mux_init.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_mux_init.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_mux_init.lo `test -f 'fftools_ffmpeg_mux_init.c' || echo '$(srcdir)/'`fftools_ffmpeg_mux_init.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='fftools_ffmpeg_mux_init.c' object='libffmpegkit_la-fftools_ffmpeg_mux_init.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg_mux_init.lo `test -f 'fftools_ffmpeg_mux_init.c' || echo '$(srcdir)/'`fftools_ffmpeg_mux_init.c
libffmpegkit_la-fftools_ffmpeg_opt.lo: fftools_ffmpeg_opt.c libffmpegkit_la-fftools_ffmpeg_opt.lo: fftools_ffmpeg_opt.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_opt.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_opt.lo `test -f 'fftools_ffmpeg_opt.c' || echo '$(srcdir)/'`fftools_ffmpeg_opt.c @am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_opt.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_opt.lo `test -f 'fftools_ffmpeg_opt.c' || echo '$(srcdir)/'`fftools_ffmpeg_opt.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo @am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo
@ -643,6 +681,13 @@ libffmpegkit_la-fftools_ffprobe.lo: fftools_ffprobe.c
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffprobe.lo `test -f 'fftools_ffprobe.c' || echo '$(srcdir)/'`fftools_ffprobe.c @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffprobe.lo `test -f 'fftools_ffprobe.c' || echo '$(srcdir)/'`fftools_ffprobe.c
libffmpegkit_la-fftools_objpool.lo: fftools_objpool.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_objpool.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_objpool.Tpo -c -o libffmpegkit_la-fftools_objpool.lo `test -f 'fftools_objpool.c' || echo '$(srcdir)/'`fftools_objpool.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_objpool.Tpo $(DEPDIR)/libffmpegkit_la-fftools_objpool.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='fftools_objpool.c' object='libffmpegkit_la-fftools_objpool.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_objpool.lo `test -f 'fftools_objpool.c' || echo '$(srcdir)/'`fftools_objpool.c
libffmpegkit_la-fftools_opt_common.lo: fftools_opt_common.c libffmpegkit_la-fftools_opt_common.lo: fftools_opt_common.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_opt_common.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Tpo -c -o libffmpegkit_la-fftools_opt_common.lo `test -f 'fftools_opt_common.c' || echo '$(srcdir)/'`fftools_opt_common.c @am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_opt_common.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Tpo -c -o libffmpegkit_la-fftools_opt_common.lo `test -f 'fftools_opt_common.c' || echo '$(srcdir)/'`fftools_opt_common.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Tpo $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo @am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Tpo $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo
@ -650,6 +695,20 @@ libffmpegkit_la-fftools_opt_common.lo: fftools_opt_common.c
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_opt_common.lo `test -f 'fftools_opt_common.c' || echo '$(srcdir)/'`fftools_opt_common.c @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_opt_common.lo `test -f 'fftools_opt_common.c' || echo '$(srcdir)/'`fftools_opt_common.c
libffmpegkit_la-fftools_sync_queue.lo: fftools_sync_queue.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_sync_queue.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Tpo -c -o libffmpegkit_la-fftools_sync_queue.lo `test -f 'fftools_sync_queue.c' || echo '$(srcdir)/'`fftools_sync_queue.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Tpo $(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='fftools_sync_queue.c' object='libffmpegkit_la-fftools_sync_queue.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_sync_queue.lo `test -f 'fftools_sync_queue.c' || echo '$(srcdir)/'`fftools_sync_queue.c
libffmpegkit_la-fftools_thread_queue.lo: fftools_thread_queue.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_thread_queue.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Tpo -c -o libffmpegkit_la-fftools_thread_queue.lo `test -f 'fftools_thread_queue.c' || echo '$(srcdir)/'`fftools_thread_queue.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Tpo $(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='fftools_thread_queue.c' object='libffmpegkit_la-fftools_thread_queue.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_thread_queue.lo `test -f 'fftools_thread_queue.c' || echo '$(srcdir)/'`fftools_thread_queue.c
.m.o: .m.o:
@am__fastdepOBJC_TRUE@ $(AM_V_OBJC)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\ @am__fastdepOBJC_TRUE@ $(AM_V_OBJC)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\
@am__fastdepOBJC_TRUE@ $(OBJCCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\ @am__fastdepOBJC_TRUE@ $(OBJCCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\
@ -974,12 +1033,17 @@ distclean: distclean-am
-rm -f ./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_objpool.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Plo
-rm -f Makefile -rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \ distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags distclean-tags
@ -1045,12 +1109,17 @@ maintainer-clean: maintainer-clean-am
-rm -f ./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_objpool.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Plo
-rm -f Makefile -rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic maintainer-clean-am: distclean-am maintainer-clean-generic

@ -27,7 +27,7 @@
*/ */
@interface Statistics : NSObject @interface Statistics : NSObject
- (instancetype)init:(long)sessionId videoFrameNumber:(int)videoFrameNumber videoFps:(float)videoFps videoQuality:(float)videoQuality size:(int64_t)size time:(int)time bitrate:(double)bitrate speed:(double)speed; - (instancetype)init:(long)sessionId videoFrameNumber:(int)videoFrameNumber videoFps:(float)videoFps videoQuality:(float)videoQuality size:(int64_t)size time:(double)time bitrate:(double)bitrate speed:(double)speed;
- (long)getSessionId; - (long)getSessionId;
@ -39,7 +39,7 @@
- (long)getSize; - (long)getSize;
- (int)getTime; - (double)getTime;
- (double)getBitrate; - (double)getBitrate;

@ -25,12 +25,12 @@
float _videoFps; float _videoFps;
float _videoQuality; float _videoQuality;
long _size; long _size;
int _time; double _time;
double _bitrate; double _bitrate;
double _speed; double _speed;
} }
- (instancetype)init:(long)sessionId videoFrameNumber:(int)videoFrameNumber videoFps:(float)videoFps videoQuality:(float)videoQuality size:(int64_t)size time:(int)time bitrate:(double)bitrate speed:(double)speed { - (instancetype)init:(long)sessionId videoFrameNumber:(int)videoFrameNumber videoFps:(float)videoFps videoQuality:(float)videoQuality size:(int64_t)size time:(double)time bitrate:(double)bitrate speed:(double)speed {
self = [super init]; self = [super init];
if (self) { if (self) {
_sessionId = sessionId; _sessionId = sessionId;
@ -66,7 +66,7 @@
return _size; return _size;
} }
- (int)getTime { - (double)getTime {
return _time; return _time;
} }

@ -2,6 +2,7 @@
* Various utilities for command line tools * Various utilities for command line tools
* Copyright (c) 2000-2003 Fabrice Bellard * Copyright (c) 2000-2003 Fabrice Bellard
* Copyright (c) 2018-2022 Taner Sener * Copyright (c) 2018-2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -25,6 +26,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -129,11 +136,18 @@ void register_exit(void (*cb)(int ret))
program_exit = cb; program_exit = cb;
} }
void report_and_exit(int ret)
{
av_log(NULL, AV_LOG_FATAL, "%s\n", av_err2str(ret));
exit_program(AVUNERROR(ret));
}
void exit_program(int ret) void exit_program(int ret)
{ {
if (program_exit) if (program_exit)
program_exit(ret); program_exit(ret);
// FFmpegKit
// exit disabled and replaced with longjmp, exit value stored in longjmp_value // exit disabled and replaced with longjmp, exit value stored in longjmp_value
// exit(ret); // exit(ret);
longjmp_value = ret; longjmp_value = ret;
@ -696,7 +710,7 @@ static void init_parse_context(OptionParseContext *octx,
octx->nb_groups = nb_groups; octx->nb_groups = nb_groups;
octx->groups = av_calloc(octx->nb_groups, sizeof(*octx->groups)); octx->groups = av_calloc(octx->nb_groups, sizeof(*octx->groups));
if (!octx->groups) if (!octx->groups)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
for (i = 0; i < octx->nb_groups; i++) for (i = 0; i < octx->nb_groups; i++)
octx->groups[i].group_def = &groups[i]; octx->groups[i].group_def = &groups[i];
@ -843,12 +857,7 @@ do { \
void print_error(const char *filename, int err) void print_error(const char *filename, int err)
{ {
char errbuf[128]; av_log(NULL, AV_LOG_ERROR, "%s: %s\n", filename, av_err2str(err));
const char *errbuf_ptr = errbuf;
if (av_strerror(err, errbuf, sizeof(errbuf)) < 0)
errbuf_ptr = strerror(AVUNERROR(err));
av_log(NULL, AV_LOG_ERROR, "%s: %s\n", filename, errbuf_ptr);
} }
int read_yesno(void) int read_yesno(void)
@ -971,7 +980,7 @@ AVDictionary *filter_codec_opts(AVDictionary *opts, enum AVCodecID codec_id,
break; break;
} }
while ((t = av_dict_get(opts, "", t, AV_DICT_IGNORE_SUFFIX))) { while ((t = av_dict_iterate(opts, t))) {
const AVClass *priv_class; const AVClass *priv_class;
char *p = strchr(t->key, ':'); char *p = strchr(t->key, ':');
@ -1009,11 +1018,8 @@ AVDictionary **setup_find_stream_info_opts(AVFormatContext *s,
if (!s->nb_streams) if (!s->nb_streams)
return NULL; return NULL;
opts = av_calloc(s->nb_streams, sizeof(*opts)); opts = av_calloc(s->nb_streams, sizeof(*opts));
if (!opts) { if (!opts)
av_log(NULL, AV_LOG_ERROR, report_and_exit(AVERROR(ENOMEM));
"Could not alloc memory for stream options.\n");
exit_program(1);
}
for (i = 0; i < s->nb_streams; i++) for (i = 0; i < s->nb_streams; i++)
opts[i] = filter_codec_opts(codec_opts, s->streams[i]->codecpar->codec_id, opts[i] = filter_codec_opts(codec_opts, s->streams[i]->codecpar->codec_id,
s, s->streams[i], NULL); s, s->streams[i], NULL);
@ -1028,10 +1034,8 @@ void *grow_array(void *array, int elem_size, int *size, int new_size)
} }
if (*size < new_size) { if (*size < new_size) {
uint8_t *tmp = av_realloc_array(array, new_size, elem_size); uint8_t *tmp = av_realloc_array(array, new_size, elem_size);
if (!tmp) { if (!tmp)
av_log(NULL, AV_LOG_ERROR, "Could not alloc buffer.\n"); report_and_exit(AVERROR(ENOMEM));
exit_program(1);
}
memset(tmp + *size*elem_size, 0, (new_size-*size) * elem_size); memset(tmp + *size*elem_size, 0, (new_size-*size) * elem_size);
*size = new_size; *size = new_size;
return tmp; return tmp;
@ -1044,10 +1048,8 @@ void *allocate_array_elem(void *ptr, size_t elem_size, int *nb_elems)
void *new_elem; void *new_elem;
if (!(new_elem = av_mallocz(elem_size)) || if (!(new_elem = av_mallocz(elem_size)) ||
av_dynarray_add_nofree(ptr, nb_elems, new_elem) < 0) { av_dynarray_add_nofree(ptr, nb_elems, new_elem) < 0)
av_log(NULL, AV_LOG_ERROR, "Could not alloc buffer.\n"); report_and_exit(AVERROR(ENOMEM));
exit_program(1);
}
return new_elem; return new_elem;
} }

@ -2,6 +2,7 @@
* Various utilities for command line tools * Various utilities for command line tools
* copyright (c) 2003 Fabrice Bellard * copyright (c) 2003 Fabrice Bellard
* copyright (c) 2018-2022 Taner Sener * copyright (c) 2018-2022 Taner Sener
* copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -25,6 +26,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -95,6 +102,17 @@ extern __thread int find_stream_info;
*/ */
void register_exit(void (*cb)(int ret)); void register_exit(void (*cb)(int ret));
/**
* Reports an error corresponding to the provided
* AVERROR code and calls exit_program() with the
* corresponding POSIX error code.
* @note ret must be an AVERROR-value of a POSIX error code
* (i.e. AVERROR(EFOO) and not AVERROR_FOO).
* library functions can return both, so call this only
* with AVERROR(EFOO) of your own.
*/
void report_and_exit(int ret) av_noreturn;
/** /**
* Wraps exit with a program-specific cleanup routine. * Wraps exit with a program-specific cleanup routine.
*/ */
@ -232,11 +250,6 @@ void show_help_children(const AVClass *clazz, int flags);
void show_help_default_ffmpeg(const char *opt, const char *arg); void show_help_default_ffmpeg(const char *opt, const char *arg);
void show_help_default_ffprobe(const char *opt, const char *arg); void show_help_default_ffprobe(const char *opt, const char *arg);
/**
* Generic -h handler common to all fftools.
*/
int show_help(void *optctx, const char *opt, const char *arg);
/** /**
* Parse the command line arguments. * Parse the command line arguments.
* *

File diff suppressed because it is too large Load Diff

@ -1,6 +1,7 @@
/* /*
* This file is part of FFmpeg. * This file is part of FFmpeg.
* Copyright (c) 2018-2022 Taner Sener * Copyright (c) 2018-2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* FFmpeg is free software; you can redistribute it and/or * FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public * modify it under the terms of the GNU Lesser General Public
@ -22,6 +23,16 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - WARN_MULTIPLE_OPT_USAGE, MATCH_PER_STREAM_OPT, MATCH_PER_TYPE_OPT, SPECIFIER_OPT_FMT declarations migrated to
* ffmpeg_mux.h
* - "class" member field renamed as clazz
* - time field in set_report_callback updated as double
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -59,11 +70,13 @@
#include "config.h" #include "config.h"
#include <stdatomic.h>
#include <stdint.h> #include <stdint.h>
#include <stdio.h> #include <stdio.h>
#include <signal.h> #include <signal.h>
#include "fftools_cmdutils.h" #include "fftools_cmdutils.h"
#include "fftools_sync_queue.h"
#include "libavformat/avformat.h" #include "libavformat/avformat.h"
#include "libavformat/avio.h" #include "libavformat/avio.h"
@ -85,6 +98,12 @@
#include "libswresample/swresample.h" #include "libswresample/swresample.h"
// deprecated features
#define FFMPEG_OPT_PSNR 1
#define FFMPEG_OPT_MAP_CHANNEL 1
#define FFMPEG_OPT_MAP_SYNC 1
#define FFMPEG_ROTATION_METADATA 1
enum VideoSyncMethod { enum VideoSyncMethod {
VSYNC_AUTO = -1, VSYNC_AUTO = -1,
VSYNC_PASSTHROUGH, VSYNC_PASSTHROUGH,
@ -113,15 +132,15 @@ typedef struct StreamMap {
int disabled; /* 1 is this mapping is disabled by a negative map */ int disabled; /* 1 is this mapping is disabled by a negative map */
int file_index; int file_index;
int stream_index; int stream_index;
int sync_file_index;
int sync_stream_index;
char *linklabel; /* name of an output link, for mapping lavfi outputs */ char *linklabel; /* name of an output link, for mapping lavfi outputs */
} StreamMap; } StreamMap;
#if FFMPEG_OPT_MAP_CHANNEL
typedef struct { typedef struct {
int file_idx, stream_idx, channel_idx; // input int file_idx, stream_idx, channel_idx; // input
int ofile_idx, ostream_idx; // output int ofile_idx, ostream_idx; // output
} AudioChannelMap; } AudioChannelMap;
#endif
typedef struct OptionsContext { typedef struct OptionsContext {
OptionGroup *g; OptionGroup *g;
@ -157,6 +176,7 @@ typedef struct OptionsContext {
int accurate_seek; int accurate_seek;
int thread_queue_size; int thread_queue_size;
int input_sync_ref; int input_sync_ref;
int find_stream_info;
SpecifierOpt *ts_scale; SpecifierOpt *ts_scale;
int nb_ts_scale; int nb_ts_scale;
@ -174,11 +194,10 @@ typedef struct OptionsContext {
/* output options */ /* output options */
StreamMap *stream_maps; StreamMap *stream_maps;
int nb_stream_maps; int nb_stream_maps;
#if FFMPEG_OPT_MAP_CHANNEL
AudioChannelMap *audio_channel_maps; /* one info entry per -map_channel */ AudioChannelMap *audio_channel_maps; /* one info entry per -map_channel */
int nb_audio_channel_maps; /* number of (valid) -map_channel settings */ int nb_audio_channel_maps; /* number of (valid) -map_channel settings */
int metadata_global_manual; #endif
int metadata_streams_manual;
int metadata_chapters_manual;
const char **attachments; const char **attachments;
int nb_attachments; int nb_attachments;
@ -186,9 +205,10 @@ typedef struct OptionsContext {
int64_t recording_time; int64_t recording_time;
int64_t stop_time; int64_t stop_time;
uint64_t limit_filesize; int64_t limit_filesize;
float mux_preload; float mux_preload;
float mux_max_delay; float mux_max_delay;
float shortest_buf_duration;
int shortest; int shortest;
int bitexact; int bitexact;
@ -221,6 +241,12 @@ typedef struct OptionsContext {
int nb_force_fps; int nb_force_fps;
SpecifierOpt *frame_aspect_ratios; SpecifierOpt *frame_aspect_ratios;
int nb_frame_aspect_ratios; int nb_frame_aspect_ratios;
SpecifierOpt *display_rotations;
int nb_display_rotations;
SpecifierOpt *display_hflips;
int nb_display_hflips;
SpecifierOpt *display_vflips;
int nb_display_vflips;
SpecifierOpt *rc_overrides; SpecifierOpt *rc_overrides;
int nb_rc_overrides; int nb_rc_overrides;
SpecifierOpt *intra_matrices; SpecifierOpt *intra_matrices;
@ -247,6 +273,8 @@ typedef struct OptionsContext {
int nb_reinit_filters; int nb_reinit_filters;
SpecifierOpt *fix_sub_duration; SpecifierOpt *fix_sub_duration;
int nb_fix_sub_duration; int nb_fix_sub_duration;
SpecifierOpt *fix_sub_duration_heartbeat;
int nb_fix_sub_duration_heartbeat;
SpecifierOpt *canvas_sizes; SpecifierOpt *canvas_sizes;
int nb_canvas_sizes; int nb_canvas_sizes;
SpecifierOpt *pass; SpecifierOpt *pass;
@ -275,6 +303,18 @@ typedef struct OptionsContext {
int nb_autoscale; int nb_autoscale;
SpecifierOpt *bits_per_raw_sample; SpecifierOpt *bits_per_raw_sample;
int nb_bits_per_raw_sample; int nb_bits_per_raw_sample;
SpecifierOpt *enc_stats_pre;
int nb_enc_stats_pre;
SpecifierOpt *enc_stats_post;
int nb_enc_stats_post;
SpecifierOpt *mux_stats;
int nb_mux_stats;
SpecifierOpt *enc_stats_pre_fmt;
int nb_enc_stats_pre_fmt;
SpecifierOpt *enc_stats_post_fmt;
int nb_enc_stats_post_fmt;
SpecifierOpt *mux_stats_fmt;
int nb_mux_stats_fmt;
} OptionsContext; } OptionsContext;
typedef struct InputFilter { typedef struct InputFilter {
@ -350,12 +390,22 @@ typedef struct InputStream {
#define DECODING_FOR_OST 1 #define DECODING_FOR_OST 1
#define DECODING_FOR_FILTER 2 #define DECODING_FOR_FILTER 2
int processing_needed; /* non zero if the packets must be processed */ int processing_needed; /* non zero if the packets must be processed */
// should attach FrameData as opaque_ref after decoding
int want_frame_data;
/**
* Codec parameters - to be used by the decoding/streamcopy code.
* st->codecpar should not be accessed, because it may be modified
* concurrently by the demuxing thread.
*/
AVCodecParameters *par;
AVCodecContext *dec_ctx; AVCodecContext *dec_ctx;
const AVCodec *dec; const AVCodec *dec;
AVFrame *decoded_frame; AVFrame *decoded_frame;
AVPacket *pkt; AVPacket *pkt;
AVRational framerate_guessed;
int64_t prev_pkt_pts; int64_t prev_pkt_pts;
int64_t start; /* time when read started */ int64_t start; /* time when read started */
/* predicted dts of the next packet read for this stream or (when there are /* predicted dts of the next packet read for this stream or (when there are
@ -368,6 +418,12 @@ typedef struct InputStream {
int64_t pts; ///< current pts of the decoded frame (in AV_TIME_BASE units) int64_t pts; ///< current pts of the decoded frame (in AV_TIME_BASE units)
int wrap_correction_done; int wrap_correction_done;
// the value of AVCodecParserContext.repeat_pict from the AVStream parser
// for the last packet returned from ifile_get_packet()
// -1 if unknown
// FIXME: this is a hack, the avstream parser should not be used
int last_pkt_repeat_pict;
int64_t filter_in_rescale_delta_last; int64_t filter_in_rescale_delta_last;
int64_t min_pts; /* pts with the smallest value in a current stream */ int64_t min_pts; /* pts with the smallest value in a current stream */
@ -417,12 +473,8 @@ typedef struct InputStream {
char *hwaccel_device; char *hwaccel_device;
enum AVPixelFormat hwaccel_output_format; enum AVPixelFormat hwaccel_output_format;
/* hwaccel context */
void *hwaccel_ctx;
void (*hwaccel_uninit)(AVCodecContext *s);
int (*hwaccel_retrieve_data)(AVCodecContext *s, AVFrame *frame); int (*hwaccel_retrieve_data)(AVCodecContext *s, AVFrame *frame);
enum AVPixelFormat hwaccel_pix_fmt; enum AVPixelFormat hwaccel_pix_fmt;
enum AVPixelFormat hwaccel_retrieved_pix_fmt;
/* stats */ /* stats */
// combined size of all the packets read // combined size of all the packets read
@ -439,38 +491,46 @@ typedef struct InputStream {
int got_output; int got_output;
} InputStream; } InputStream;
typedef struct LastFrameDuration {
int stream_idx;
int64_t duration;
} LastFrameDuration;
typedef struct InputFile { typedef struct InputFile {
int index;
AVFormatContext *ctx; AVFormatContext *ctx;
int eof_reached; /* true if eof reached */ int eof_reached; /* true if eof reached */
int eagain; /* true if last read attempt returned EAGAIN */ int eagain; /* true if last read attempt returned EAGAIN */
int ist_index; /* index of first stream in input_streams */
int loop; /* set number of times input stream should be looped */
int64_t duration; /* actual duration of the longest stream in a file
at the moment when looping happens */
AVRational time_base; /* time base of the duration */
int64_t input_ts_offset; int64_t input_ts_offset;
int input_sync_ref; int input_sync_ref;
/**
* Effective format start time based on enabled streams.
*/
int64_t start_time_effective;
int64_t ts_offset; int64_t ts_offset;
/**
* Extra timestamp offset added by discontinuity handling.
*/
int64_t ts_offset_discont;
int64_t last_ts; int64_t last_ts;
int64_t start_time; /* user-specified start time in AV_TIME_BASE or AV_NOPTS_VALUE */ int64_t start_time; /* user-specified start time in AV_TIME_BASE or AV_NOPTS_VALUE */
int64_t recording_time; int64_t recording_time;
int nb_streams; /* number of stream that ffmpeg is aware of; may be different
from ctx.nb_streams if new streams appear during av_read_frame() */ /* streams that ffmpeg is aware of;
int nb_streams_warn; /* number of streams that the user was warned of */ * there may be extra streams in ctx that are not mapped to an InputStream
* if new streams appear dynamically during demuxing */
InputStream **streams;
int nb_streams;
int rate_emu; int rate_emu;
float readrate; float readrate;
int accurate_seek; int accurate_seek;
AVPacket *pkt; /* when looping the input file, this queue is used by decoders to report
* the last frame duration back to the demuxer thread */
#if HAVE_THREADS AVThreadMessageQueue *audio_duration_queue;
AVThreadMessageQueue *in_thread_queue; int audio_duration_queue_size;
pthread_t thread; /* thread reading from this file */
int non_blocking; /* reading packets from the thread should not block */
int joined; /* the thread has been joined */
int thread_queue_size; /* maximum number of queued packets */
#endif
} InputFile; } InputFile;
enum forced_keyframes_const { enum forced_keyframes_const {
@ -485,6 +545,41 @@ enum forced_keyframes_const {
#define ABORT_ON_FLAG_EMPTY_OUTPUT (1 << 0) #define ABORT_ON_FLAG_EMPTY_OUTPUT (1 << 0)
#define ABORT_ON_FLAG_EMPTY_OUTPUT_STREAM (1 << 1) #define ABORT_ON_FLAG_EMPTY_OUTPUT_STREAM (1 << 1)
enum EncStatsType {
ENC_STATS_LITERAL = 0,
ENC_STATS_FILE_IDX,
ENC_STATS_STREAM_IDX,
ENC_STATS_FRAME_NUM,
ENC_STATS_FRAME_NUM_IN,
ENC_STATS_TIMEBASE,
ENC_STATS_TIMEBASE_IN,
ENC_STATS_PTS,
ENC_STATS_PTS_TIME,
ENC_STATS_PTS_IN,
ENC_STATS_PTS_TIME_IN,
ENC_STATS_DTS,
ENC_STATS_DTS_TIME,
ENC_STATS_SAMPLE_NUM,
ENC_STATS_NB_SAMPLES,
ENC_STATS_PKT_SIZE,
ENC_STATS_BITRATE,
ENC_STATS_AVG_BITRATE,
};
typedef struct EncStatsComponent {
enum EncStatsType type;
uint8_t *str;
size_t str_len;
} EncStatsComponent;
typedef struct EncStats {
EncStatsComponent *components;
int nb_components;
AVIOContext *io;
} EncStats;
extern const char *const forced_keyframes_const_names[]; extern const char *const forced_keyframes_const_names[];
typedef enum { typedef enum {
@ -492,68 +587,92 @@ typedef enum {
MUXER_FINISHED = 2, MUXER_FINISHED = 2,
} OSTFinished ; } OSTFinished ;
enum {
KF_FORCE_SOURCE = 1,
KF_FORCE_SOURCE_NO_DROP = 2,
};
typedef struct KeyframeForceCtx {
int type;
int64_t ref_pts;
// timestamps of the forced keyframes, in AV_TIME_BASE_Q
int64_t *pts;
int nb_pts;
int index;
AVExpr *pexpr;
double expr_const_values[FKF_NB];
int dropped_keyframe;
} KeyframeForceCtx;
typedef struct OutputStream { typedef struct OutputStream {
const AVClass *clazz;
int file_index; /* file index */ int file_index; /* file index */
int index; /* stream index in the output file */ int index; /* stream index in the output file */
int source_index; /* InputStream index */
/* input stream that is the source for this output stream;
* may be NULL for streams with no well-defined source, e.g.
* attachments or outputs from complex filtergraphs */
InputStream *ist;
AVStream *st; /* stream in the output file */ AVStream *st; /* stream in the output file */
int encoding_needed; /* true if encoding needed for this stream */ /* number of frames emitted by the video-encoding sync code */
int64_t frame_number; int64_t vsync_frame_number;
/* input pts and corresponding output pts /* predicted pts of the next frame to be encoded
for A/V sync */ * audio/video encoding only */
struct InputStream *sync_ist; /* input stream to sync against */ int64_t next_pts;
int64_t sync_opts; /* output frame counter, could be changed to some true timestamp */ // FIXME look at frame_number /* dts of the last packet sent to the muxing queue, in AV_TIME_BASE_Q */
/* pts of the first frame encoded for this stream, used for limiting
* recording time */
int64_t first_pts;
/* dts of the last packet sent to the muxer */
int64_t last_mux_dts; int64_t last_mux_dts;
/* pts of the last frame received from the filters, in AV_TIME_BASE_Q */
int64_t last_filter_pts;
// timestamp from which the streamcopied streams should start,
// in AV_TIME_BASE_Q;
// everything before it should be discarded
int64_t ts_copy_start;
// the timebase of the packets sent to the muxer // the timebase of the packets sent to the muxer
AVRational mux_timebase; AVRational mux_timebase;
AVRational enc_timebase; AVRational enc_timebase;
AVBSFContext *bsf_ctx;
AVCodecContext *enc_ctx; AVCodecContext *enc_ctx;
AVCodecParameters *ref_par; /* associated input codec parameters with encoders options applied */
const AVCodec *enc;
int64_t max_frames;
AVFrame *filtered_frame; AVFrame *filtered_frame;
AVFrame *last_frame; AVFrame *last_frame;
AVFrame *sq_frame;
AVPacket *pkt; AVPacket *pkt;
int64_t last_dropped; int64_t last_dropped;
int64_t last_nb0_frames[3]; int64_t last_nb0_frames[3];
void *hwaccel_ctx;
/* video only */ /* video only */
AVRational frame_rate; AVRational frame_rate;
AVRational max_frame_rate; AVRational max_frame_rate;
enum VideoSyncMethod vsync_method; enum VideoSyncMethod vsync_method;
int is_cfr; int is_cfr;
const char *fps_mode;
int force_fps; int force_fps;
int top_field_first; int top_field_first;
#if FFMPEG_ROTATION_METADATA
int rotate_overridden; int rotate_overridden;
#endif
int autoscale; int autoscale;
int bitexact;
int bits_per_raw_sample; int bits_per_raw_sample;
#if FFMPEG_ROTATION_METADATA
double rotate_override_value; double rotate_override_value;
#endif
AVRational frame_aspect_ratio; AVRational frame_aspect_ratio;
/* forced key frames */ KeyframeForceCtx kf;
int64_t forced_kf_ref_pts;
int64_t *forced_kf_pts;
int forced_kf_count;
int forced_kf_index;
char *forced_keyframes;
AVExpr *forced_keyframes_pexpr;
double forced_keyframes_expr_const_values[FKF_NB];
int dropped_keyframe;
/* audio only */ /* audio only */
#if FFMPEG_OPT_MAP_CHANNEL
int *audio_channels_map; /* list of the channels id to pick from the source stream */ int *audio_channels_map; /* list of the channels id to pick from the source stream */
int audio_channels_mapped; /* number of channels in audio_channels_map */ int audio_channels_mapped; /* number of channels in audio_channels_map */
#endif
char *logfile_prefix; char *logfile_prefix;
FILE *logfile; FILE *logfile;
@ -569,7 +688,6 @@ typedef struct OutputStream {
char *apad; char *apad;
OSTFinished finished; /* no more packets should be written for this stream */ OSTFinished finished; /* no more packets should be written for this stream */
int unavailable; /* true if the steram is unavailable (possibly temporarily) */ int unavailable; /* true if the steram is unavailable (possibly temporarily) */
int stream_copy;
// init_output_stream() has been called for this stream // init_output_stream() has been called for this stream
// The encoder and the bitstream filters have been initialized and the stream // The encoder and the bitstream filters have been initialized and the stream
@ -582,15 +700,16 @@ typedef struct OutputStream {
int streamcopy_started; int streamcopy_started;
int copy_initial_nonkeyframes; int copy_initial_nonkeyframes;
int copy_prior_start; int copy_prior_start;
char *disposition;
int keep_pix_fmt; int keep_pix_fmt;
/* stats */ /* stats */
// combined size of all the packets written // combined size of all the packets sent to the muxer
uint64_t data_size; uint64_t data_size_mux;
// combined size of all the packets received from the encoder
uint64_t data_size_enc;
// number of packets send to the muxer // number of packets send to the muxer
uint64_t packets_written; atomic_uint_least64_t packets_written;
// number of frames/samples sent to the encoder // number of frames/samples sent to the encoder
uint64_t frames_encoded; uint64_t frames_encoded;
uint64_t samples_encoded; uint64_t samples_encoded;
@ -600,51 +719,48 @@ typedef struct OutputStream {
/* packet quality factor */ /* packet quality factor */
int quality; int quality;
int max_muxing_queue_size;
/* the packets are buffered here until the muxer is ready to be initialized */
AVFifo *muxing_queue;
/*
* The size of the AVPackets' buffers in queue.
* Updated when a packet is either pushed or pulled from the queue.
*/
size_t muxing_queue_data_size;
/* Threshold after which max_muxing_queue_size will be in effect */
size_t muxing_queue_data_threshold;
/* packet picture type */ /* packet picture type */
int pict_type; int pict_type;
/* frame encode sum of squared error values */ /* frame encode sum of squared error values */
int64_t error[4]; int64_t error[4];
int sq_idx_encode;
int sq_idx_mux;
EncStats enc_stats_pre;
EncStats enc_stats_post;
/*
* bool on whether this stream should be utilized for splitting
* subtitles utilizing fix_sub_duration at random access points.
*/
unsigned int fix_sub_duration_heartbeat;
} OutputStream; } OutputStream;
typedef struct OutputFile { typedef struct OutputFile {
const AVClass *clazz;
int index; int index;
const AVOutputFormat *format; const AVOutputFormat *format;
const char *url;
OutputStream **streams;
int nb_streams;
SyncQueue *sq_encode;
AVFormatContext *ctx;
AVDictionary *opts;
int ost_index; /* index of the first stream in output_streams */
int64_t recording_time; ///< desired length of the resulting file in microseconds == AV_TIME_BASE units int64_t recording_time; ///< desired length of the resulting file in microseconds == AV_TIME_BASE units
int64_t start_time; ///< start time in microseconds == AV_TIME_BASE units int64_t start_time; ///< start time in microseconds == AV_TIME_BASE units
uint64_t limit_filesize; /* filesize limit expressed in bytes */
int shortest; int shortest;
int bitexact;
int header_written;
} OutputFile; } OutputFile;
extern __thread InputStream **input_streams;
extern __thread int nb_input_streams;
extern __thread InputFile **input_files; extern __thread InputFile **input_files;
extern __thread int nb_input_files; extern __thread int nb_input_files;
extern __thread OutputStream **output_streams;
extern __thread int nb_output_streams;
extern __thread OutputFile **output_files; extern __thread OutputFile **output_files;
extern __thread int nb_output_files; extern __thread int nb_output_files;
@ -658,13 +774,10 @@ extern __thread float audio_drift_threshold;
extern __thread float dts_delta_threshold; extern __thread float dts_delta_threshold;
extern __thread float dts_error_threshold; extern __thread float dts_error_threshold;
extern __thread int audio_volume;
extern __thread int audio_sync_method;
extern __thread enum VideoSyncMethod video_sync_method; extern __thread enum VideoSyncMethod video_sync_method;
extern __thread float frame_drop_threshold; extern __thread float frame_drop_threshold;
extern __thread int do_benchmark; extern __thread int do_benchmark;
extern __thread int do_benchmark_all; extern __thread int do_benchmark_all;
extern __thread int do_deinterlace;
extern __thread int do_hex_dump; extern __thread int do_hex_dump;
extern __thread int do_pkt_dump; extern __thread int do_pkt_dump;
extern __thread int copy_ts; extern __thread int copy_ts;
@ -677,7 +790,6 @@ extern __thread int print_stats;
extern __thread int64_t stats_period; extern __thread int64_t stats_period;
extern __thread int qp_hist; extern __thread int qp_hist;
extern __thread int stdin_interaction; extern __thread int stdin_interaction;
extern __thread int frame_bits_per_raw_sample;
extern __thread AVIOContext *progress_avio; extern __thread AVIOContext *progress_avio;
extern __thread float max_error_rate; extern __thread float max_error_rate;
@ -688,15 +800,20 @@ extern __thread int auto_conversion_filters;
extern __thread const AVIOInterruptCB int_cb; extern __thread const AVIOInterruptCB int_cb;
#if CONFIG_QSV
extern __thread char *qsv_device;
#endif
extern __thread HWDevice *filter_hw_device; extern __thread HWDevice *filter_hw_device;
extern __thread int want_sdp;
extern __thread unsigned nb_output_dumped; extern __thread unsigned nb_output_dumped;
extern __thread int main_ffmpeg_return_code; extern __thread int main_ffmpeg_return_code;
extern __thread int ignore_unknown_streams;
extern __thread int copy_unknown_streams;
extern __thread int recast_media;
#if FFMPEG_OPT_PSNR
extern __thread int do_psnr;
#endif
void term_init(void); void term_init(void);
void term_exit(void); void term_exit(void);
@ -705,7 +822,12 @@ void show_usage(void);
void remove_avoptions(AVDictionary **a, AVDictionary *b); void remove_avoptions(AVDictionary **a, AVDictionary *b);
void assert_avoptions(AVDictionary *m); void assert_avoptions(AVDictionary *m);
int guess_input_channel_layout(InputStream *ist); void assert_file_overwrite(const char *filename);
char *file_read(const char *filename);
AVDictionary *strip_specifiers(const AVDictionary *dict);
const AVCodec *find_codec_or_die(void *logctx, const char *name,
enum AVMediaType type, int encoder);
int parse_and_set_vsync(const char *arg, int *vsync_var, int file_idx, int st_idx, int is_global);
int configure_filtergraph(FilterGraph *fg); int configure_filtergraph(FilterGraph *fg);
void check_filter_outputs(void); void check_filter_outputs(void);
@ -719,8 +841,9 @@ int ifilter_parameters_from_frame(InputFilter *ifilter, const AVFrame *frame);
int ffmpeg_parse_options(int argc, char **argv); int ffmpeg_parse_options(int argc, char **argv);
int videotoolbox_init(AVCodecContext *s); void enc_stats_write(OutputStream *ost, EncStats *es,
int qsv_init(AVCodecContext *s); const AVFrame *frame, const AVPacket *pkt,
uint64_t frame_num);
HWDevice *hw_device_get_by_name(const char *name); HWDevice *hw_device_get_by_name(const char *name);
int hw_device_init_from_string(const char *arg, HWDevice **dev); int hw_device_init_from_string(const char *arg, HWDevice **dev);
@ -732,15 +855,58 @@ int hw_device_setup_for_filter(FilterGraph *fg);
int hwaccel_decode_init(AVCodecContext *avctx); int hwaccel_decode_init(AVCodecContext *avctx);
/* open the muxer when all the streams are initialized */ /*
int of_check_init(OutputFile *of); * Initialize muxing state for the given stream, should be called
* after the codec/streamcopy setup has been done.
*
* Open the muxer once all the streams have been initialized.
*/
int of_stream_init(OutputFile *of, OutputStream *ost);
int of_write_trailer(OutputFile *of); int of_write_trailer(OutputFile *of);
int of_open(const OptionsContext *o, const char *filename);
void of_close(OutputFile **pof); void of_close(OutputFile **pof);
void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, void of_enc_stats_close(void);
int unqueue);
/*
* Send a single packet to the output, applying any bitstream filters
* associated with the output stream. This may result in any number
* of packets actually being written, depending on what bitstream
* filters are applied. The supplied packet is consumed and will be
* blank (as if newly-allocated) when this function returns.
*
* If eof is set, instead indicate EOF to all bitstream filters and
* therefore flush any delayed packets to the output. A blank packet
* must be supplied in this case.
*/
void of_output_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, int eof);
int64_t of_filesize(OutputFile *of);
int ifile_open(const OptionsContext *o, const char *filename);
void ifile_close(InputFile **f);
/**
* Get next input packet from the demuxer.
*
* @param pkt the packet is written here when this function returns 0
* @return
* - 0 when a packet has been read successfully
* - 1 when stream end was reached, but the stream is looped;
* caller should flush decoders and read from this demuxer again
* - a negative error code on failure
*/
int ifile_get_packet(InputFile *f, AVPacket **pkt);
/* iterate over all input streams in all input files;
* pass NULL to start iteration */
InputStream *ist_iter(InputStream *prev);
extern const char * const opt_name_codec_names[];
extern const char * const opt_name_codec_tags[];
extern const char * const opt_name_frame_rates[];
extern const char * const opt_name_top_field_first[];
void set_report_callback(void (*callback)(int, float, float, int64_t, int, double, double)); void set_report_callback(void (*callback)(int, float, float, int64_t, double, double, double));
void cancel_operation(long id); void cancel_operation(long id);
#endif /* FFTOOLS_FFMPEG_H */ #endif /* FFTOOLS_FFMPEG_H */

File diff suppressed because it is too large Load Diff

@ -1,6 +1,7 @@
/* /*
* ffmpeg filter configuration * ffmpeg filter configuration
* Copyright (c) 2018 Taner Sener * Copyright (c) 2018 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -24,6 +25,12 @@
* We manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * We manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 08.2018 * 08.2018
@ -69,8 +76,9 @@ static const enum AVPixelFormat *get_compliance_normal_pix_fmts(const AVCodec *c
} }
} }
enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx, enum AVPixelFormat
const AVCodec *codec, enum AVPixelFormat target) choose_pixel_fmt(const AVCodec *codec, enum AVPixelFormat target,
int strict_std_compliance)
{ {
if (codec && codec->pix_fmts) { if (codec && codec->pix_fmts) {
const enum AVPixelFormat *p = codec->pix_fmts; const enum AVPixelFormat *p = codec->pix_fmts;
@ -79,7 +87,7 @@ enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx,
int has_alpha = desc ? desc->nb_components % 2 == 0 : 0; int has_alpha = desc ? desc->nb_components % 2 == 0 : 0;
enum AVPixelFormat best= AV_PIX_FMT_NONE; enum AVPixelFormat best= AV_PIX_FMT_NONE;
if (enc_ctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) { if (strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
p = get_compliance_normal_pix_fmts(codec, p); p = get_compliance_normal_pix_fmts(codec, p);
} }
for (; *p != AV_PIX_FMT_NONE; p++) { for (; *p != AV_PIX_FMT_NONE; p++) {
@ -106,6 +114,7 @@ enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx,
static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint) static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint)
{ {
OutputStream *ost = ofilter->ost; OutputStream *ost = ofilter->ost;
AVCodecContext *enc = ost->enc_ctx;
const AVDictionaryEntry *strict_dict = av_dict_get(ost->encoder_opts, "strict", NULL, 0); const AVDictionaryEntry *strict_dict = av_dict_get(ost->encoder_opts, "strict", NULL, 0);
if (strict_dict) if (strict_dict)
// used by choose_pixel_fmt() and below // used by choose_pixel_fmt() and below
@ -119,13 +128,14 @@ static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint)
return av_get_pix_fmt_name(ost->enc_ctx->pix_fmt); return av_get_pix_fmt_name(ost->enc_ctx->pix_fmt);
} }
if (ost->enc_ctx->pix_fmt != AV_PIX_FMT_NONE) { if (ost->enc_ctx->pix_fmt != AV_PIX_FMT_NONE) {
return av_get_pix_fmt_name(choose_pixel_fmt(ost->st, ost->enc_ctx, ost->enc, ost->enc_ctx->pix_fmt)); return av_get_pix_fmt_name(choose_pixel_fmt(enc->codec, enc->pix_fmt,
} else if (ost->enc && ost->enc->pix_fmts) { ost->enc_ctx->strict_std_compliance));
} else if (enc->codec->pix_fmts) {
const enum AVPixelFormat *p; const enum AVPixelFormat *p;
p = ost->enc->pix_fmts; p = enc->codec->pix_fmts;
if (ost->enc_ctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) { if (ost->enc_ctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
p = get_compliance_normal_pix_fmts(ost->enc, p); p = get_compliance_normal_pix_fmts(enc->codec, p);
} }
for (; *p != AV_PIX_FMT_NONE; p++) { for (; *p != AV_PIX_FMT_NONE; p++) {
@ -133,7 +143,7 @@ static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint)
av_bprintf(bprint, "%s%c", name, p[1] == AV_PIX_FMT_NONE ? '\0' : '|'); av_bprintf(bprint, "%s%c", name, p[1] == AV_PIX_FMT_NONE ? '\0' : '|');
} }
if (!av_bprint_is_complete(bprint)) if (!av_bprint_is_complete(bprint))
exit_program(1); report_and_exit(AVERROR(ENOMEM));
return bprint->str; return bprint->str;
} else } else
return NULL; return NULL;
@ -197,7 +207,7 @@ int init_simple_filtergraph(InputStream *ist, OutputStream *ost)
InputFilter *ifilter; InputFilter *ifilter;
if (!fg) if (!fg)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
fg->index = nb_filtergraphs; fg->index = nb_filtergraphs;
ofilter = ALLOC_ARRAY_ELEM(fg->outputs, fg->nb_outputs); ofilter = ALLOC_ARRAY_ELEM(fg->outputs, fg->nb_outputs);
@ -214,7 +224,7 @@ int init_simple_filtergraph(InputStream *ist, OutputStream *ost)
ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW); ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW);
if (!ifilter->frame_queue) if (!ifilter->frame_queue)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
GROW_ARRAY(ist->filters, ist->nb_filters); GROW_ARRAY(ist->filters, ist->nb_filters);
ist->filters[ist->nb_filters - 1] = ifilter; ist->filters[ist->nb_filters - 1] = ifilter;
@ -238,7 +248,7 @@ static char *describe_filter_link(FilterGraph *fg, AVFilterInOut *inout, int in)
res = av_asprintf("%s:%s", ctx->filter->name, res = av_asprintf("%s:%s", ctx->filter->name,
avfilter_pad_get_name(pads, inout->pad_idx)); avfilter_pad_get_name(pads, inout->pad_idx));
if (!res) if (!res)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
return res; return res;
} }
@ -285,7 +295,7 @@ static void init_input_filter(FilterGraph *fg, AVFilterInOut *in)
"matches no streams.\n", p, fg->graph_desc); "matches no streams.\n", p, fg->graph_desc);
exit_program(1); exit_program(1);
} }
ist = input_streams[input_files[file_idx]->ist_index + st->index]; ist = input_files[file_idx]->streams[st->index];
if (ist->user_set_discard == AVDISCARD_ALL) { if (ist->user_set_discard == AVDISCARD_ALL) {
av_log(NULL, AV_LOG_FATAL, "Stream specifier '%s' in filtergraph description %s " av_log(NULL, AV_LOG_FATAL, "Stream specifier '%s' in filtergraph description %s "
"matches a disabled input stream.\n", p, fg->graph_desc); "matches a disabled input stream.\n", p, fg->graph_desc);
@ -293,14 +303,13 @@ static void init_input_filter(FilterGraph *fg, AVFilterInOut *in)
} }
} else { } else {
/* find the first unused stream of corresponding type */ /* find the first unused stream of corresponding type */
for (i = 0; i < nb_input_streams; i++) { for (ist = ist_iter(NULL); ist; ist = ist_iter(ist)) {
ist = input_streams[i];
if (ist->user_set_discard == AVDISCARD_ALL) if (ist->user_set_discard == AVDISCARD_ALL)
continue; continue;
if (ist->dec_ctx->codec_type == type && ist->discard) if (ist->dec_ctx->codec_type == type && ist->discard)
break; break;
} }
if (i == nb_input_streams) { if (!ist) {
av_log(NULL, AV_LOG_FATAL, "Cannot find a matching stream for " av_log(NULL, AV_LOG_FATAL, "Cannot find a matching stream for "
"unlabeled input pad %d on filter %s\n", in->pad_idx, "unlabeled input pad %d on filter %s\n", in->pad_idx,
in->filter_ctx->name); in->filter_ctx->name);
@ -323,12 +332,162 @@ static void init_input_filter(FilterGraph *fg, AVFilterInOut *in)
ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW); ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW);
if (!ifilter->frame_queue) if (!ifilter->frame_queue)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
GROW_ARRAY(ist->filters, ist->nb_filters); GROW_ARRAY(ist->filters, ist->nb_filters);
ist->filters[ist->nb_filters - 1] = ifilter; ist->filters[ist->nb_filters - 1] = ifilter;
} }
static int read_binary(const char *path, uint8_t **data, int *len)
{
AVIOContext *io = NULL;
int64_t fsize;
int ret;
*data = NULL;
*len = 0;
ret = avio_open2(&io, path, AVIO_FLAG_READ, &int_cb, NULL);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Cannot open file '%s': %s\n",
path, av_err2str(ret));
return ret;
}
fsize = avio_size(io);
if (fsize < 0 || fsize > INT_MAX) {
av_log(NULL, AV_LOG_ERROR, "Cannot obtain size of file %s\n", path);
ret = AVERROR(EIO);
goto fail;
}
*data = av_malloc(fsize);
if (!*data) {
ret = AVERROR(ENOMEM);
goto fail;
}
ret = avio_read(io, *data, fsize);
if (ret != fsize) {
av_log(NULL, AV_LOG_ERROR, "Error reading file %s\n", path);
ret = ret < 0 ? ret : AVERROR(EIO);
goto fail;
}
*len = fsize;
return 0;
fail:
avio_close(io);
av_freep(data);
*len = 0;
return ret;
}
static int filter_opt_apply(AVFilterContext *f, const char *key, const char *val)
{
const AVOption *o = NULL;
int ret;
ret = av_opt_set(f, key, val, AV_OPT_SEARCH_CHILDREN);
if (ret >= 0)
return 0;
if (ret == AVERROR_OPTION_NOT_FOUND && key[0] == '/')
o = av_opt_find(f, key + 1, NULL, 0, AV_OPT_SEARCH_CHILDREN);
if (!o)
goto err_apply;
// key is a valid option name prefixed with '/'
// interpret value as a path from which to load the actual option value
key++;
if (o->type == AV_OPT_TYPE_BINARY) {
uint8_t *data;
int len;
ret = read_binary(val, &data, &len);
if (ret < 0)
goto err_load;
ret = av_opt_set_bin(f, key, data, len, AV_OPT_SEARCH_CHILDREN);
av_freep(&data);
} else {
char *data = file_read(val);
if (!data) {
ret = AVERROR(EIO);
goto err_load;
}
ret = av_opt_set(f, key, data, AV_OPT_SEARCH_CHILDREN);
av_freep(&data);
}
if (ret < 0)
goto err_apply;
return 0;
err_apply:
av_log(NULL, AV_LOG_ERROR,
"Error applying option '%s' to filter '%s': %s\n",
key, f->filter->name, av_err2str(ret));
return ret;
err_load:
av_log(NULL, AV_LOG_ERROR,
"Error loading value for option '%s' from file '%s'\n",
key, val);
return ret;
}
static int graph_opts_apply(AVFilterGraphSegment *seg)
{
for (size_t i = 0; i < seg->nb_chains; i++) {
AVFilterChain *ch = seg->chains[i];
for (size_t j = 0; j < ch->nb_filters; j++) {
AVFilterParams *p = ch->filters[j];
const AVDictionaryEntry *e = NULL;
av_assert0(p->filter);
while ((e = av_dict_iterate(p->opts, e))) {
int ret = filter_opt_apply(p->filter, e->key, e->value);
if (ret < 0)
return ret;
}
av_dict_free(&p->opts);
}
}
return 0;
}
static int graph_parse(AVFilterGraph *graph, const char *desc,
AVFilterInOut **inputs, AVFilterInOut **outputs)
{
AVFilterGraphSegment *seg;
int ret;
ret = avfilter_graph_segment_parse(graph, desc, 0, &seg);
if (ret < 0)
return ret;
ret = avfilter_graph_segment_create_filters(seg, 0);
if (ret < 0)
goto fail;
ret = graph_opts_apply(seg);
if (ret < 0)
goto fail;
ret = avfilter_graph_segment_apply(seg, 0, inputs, outputs);
fail:
avfilter_graph_segment_free(&seg);
return ret;
}
int init_complex_filtergraph(FilterGraph *fg) int init_complex_filtergraph(FilterGraph *fg)
{ {
AVFilterInOut *inputs, *outputs, *cur; AVFilterInOut *inputs, *outputs, *cur;
@ -342,7 +501,7 @@ int init_complex_filtergraph(FilterGraph *fg)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
graph->nb_threads = 1; graph->nb_threads = 1;
ret = avfilter_graph_parse2(graph, fg->graph_desc, &inputs, &outputs); ret = graph_parse(graph, fg->graph_desc, &inputs, &outputs);
if (ret < 0) if (ret < 0)
goto fail; goto fail;
@ -467,8 +626,7 @@ static int configure_output_video_filter(FilterGraph *fg, OutputFilter *ofilter,
snprintf(args, sizeof(args), "%d:%d", snprintf(args, sizeof(args), "%d:%d",
ofilter->width, ofilter->height); ofilter->width, ofilter->height);
while ((e = av_dict_get(ost->sws_dict, "", e, while ((e = av_dict_iterate(ost->sws_dict, e))) {
AV_DICT_IGNORE_SUFFIX))) {
av_strlcatf(args, sizeof(args), ":%s=%s", e->key, e->value); av_strlcatf(args, sizeof(args), ":%s=%s", e->key, e->value);
} }
@ -575,6 +733,7 @@ static int configure_output_audio_filter(FilterGraph *fg, OutputFilter *ofilter,
pad_idx = 0; \ pad_idx = 0; \
} while (0) } while (0)
av_bprint_init(&args, 0, AV_BPRINT_SIZE_UNLIMITED); av_bprint_init(&args, 0, AV_BPRINT_SIZE_UNLIMITED);
#if FFMPEG_OPT_MAP_CHANNEL
if (ost->audio_channels_mapped) { if (ost->audio_channels_mapped) {
AVChannelLayout mapped_layout = { 0 }; AVChannelLayout mapped_layout = { 0 };
int i; int i;
@ -587,6 +746,7 @@ static int configure_output_audio_filter(FilterGraph *fg, OutputFilter *ofilter,
AUTO_INSERT_FILTER("-map_channel", "pan", args.str); AUTO_INSERT_FILTER("-map_channel", "pan", args.str);
av_bprint_clear(&args); av_bprint_clear(&args);
} }
#endif
if (codec->ch_layout.order == AV_CHANNEL_ORDER_UNSPEC) if (codec->ch_layout.order == AV_CHANNEL_ORDER_UNSPEC)
av_channel_layout_default(&codec->ch_layout, codec->ch_layout.nb_channels); av_channel_layout_default(&codec->ch_layout, codec->ch_layout.nb_channels);
@ -620,11 +780,11 @@ static int configure_output_audio_filter(FilterGraph *fg, OutputFilter *ofilter,
if (ost->apad && of->shortest) { if (ost->apad && of->shortest) {
int i; int i;
for (i=0; i<of->ctx->nb_streams; i++) for (i = 0; i < of->nb_streams; i++)
if (of->ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) if (of->streams[i]->st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
break; break;
if (i<of->ctx->nb_streams) { if (i < of->nb_streams) {
AUTO_INSERT_FILTER("-apad", "apad", ost->apad); AUTO_INSERT_FILTER("-apad", "apad", ost->apad);
} }
} }
@ -751,7 +911,7 @@ static int configure_input_video_filter(FilterGraph *fg, InputFilter *ifilter,
} }
if (!fr.num) if (!fr.num)
fr = av_guess_frame_rate(input_files[ist->file_index]->ctx, ist->st, NULL); fr = ist->framerate_guessed;
if (ist->dec_ctx->codec_type == AVMEDIA_TYPE_SUBTITLE) { if (ist->dec_ctx->codec_type == AVMEDIA_TYPE_SUBTITLE) {
ret = sub2video_prepare(ist, ifilter); ret = sub2video_prepare(ist, ifilter);
@ -904,40 +1064,6 @@ static int configure_input_audio_filter(FilterGraph *fg, InputFilter *ifilter,
last_filter = filt_ctx; \ last_filter = filt_ctx; \
} while (0) } while (0)
if (audio_sync_method > 0) {
char args[256] = {0};
av_strlcatf(args, sizeof(args), "async=%d", audio_sync_method);
if (audio_drift_threshold != 0.1)
av_strlcatf(args, sizeof(args), ":min_hard_comp=%f", audio_drift_threshold);
if (!fg->reconfiguration)
av_strlcatf(args, sizeof(args), ":first_pts=0");
AUTO_INSERT_FILTER_INPUT("-async", "aresample", args);
}
// if (ost->audio_channels_mapped) {
// int i;
// AVBPrint pan_buf;
// av_bprint_init(&pan_buf, 256, 8192);
// av_bprintf(&pan_buf, "0x%"PRIx64,
// av_get_default_channel_layout(ost->audio_channels_mapped));
// for (i = 0; i < ost->audio_channels_mapped; i++)
// if (ost->audio_channels_map[i] != -1)
// av_bprintf(&pan_buf, ":c%d=c%d", i, ost->audio_channels_map[i]);
// AUTO_INSERT_FILTER_INPUT("-map_channel", "pan", pan_buf.str);
// av_bprint_finalize(&pan_buf, NULL);
// }
if (audio_volume != 256) {
char args[256];
av_log(NULL, AV_LOG_WARNING, "-vol has been deprecated. Use the volume "
"audio filter instead.\n");
snprintf(args, sizeof(args), "%f", audio_volume / 256.);
AUTO_INSERT_FILTER_INPUT("-vol", "volume", args);
}
snprintf(name, sizeof(name), "trim for input stream %d:%d", snprintf(name, sizeof(name), "trim for input stream %d:%d",
ist->file_index, ist->st->index); ist->file_index, ist->st->index);
if (copy_ts) { if (copy_ts) {
@ -1020,44 +1146,39 @@ int configure_filtergraph(FilterGraph *fg)
if (simple) { if (simple) {
OutputStream *ost = fg->outputs[0]->ost; OutputStream *ost = fg->outputs[0]->ost;
char args[512];
const AVDictionaryEntry *e = NULL;
if (filter_nbthreads) { if (filter_nbthreads) {
ret = av_opt_set(fg->graph, "threads", filter_nbthreads, 0); ret = av_opt_set(fg->graph, "threads", filter_nbthreads, 0);
if (ret < 0) if (ret < 0)
goto fail; goto fail;
} else { } else {
const AVDictionaryEntry *e = NULL;
e = av_dict_get(ost->encoder_opts, "threads", NULL, 0); e = av_dict_get(ost->encoder_opts, "threads", NULL, 0);
if (e) if (e)
av_opt_set(fg->graph, "threads", e->value, 0); av_opt_set(fg->graph, "threads", e->value, 0);
} }
args[0] = 0; if (av_dict_count(ost->sws_dict)) {
e = NULL; ret = av_dict_get_string(ost->sws_dict,
while ((e = av_dict_get(ost->sws_dict, "", e, &fg->graph->scale_sws_opts,
AV_DICT_IGNORE_SUFFIX))) { '=', ':');
av_strlcatf(args, sizeof(args), "%s=%s:", e->key, e->value); if (ret < 0)
} goto fail;
if (strlen(args)) {
args[strlen(args)-1] = 0;
fg->graph->scale_sws_opts = av_strdup(args);
} }
args[0] = 0; if (av_dict_count(ost->swr_opts)) {
e = NULL; char *args;
while ((e = av_dict_get(ost->swr_opts, "", e, ret = av_dict_get_string(ost->swr_opts, &args, '=', ':');
AV_DICT_IGNORE_SUFFIX))) { if (ret < 0)
av_strlcatf(args, sizeof(args), "%s=%s:", e->key, e->value); goto fail;
}
if (strlen(args))
args[strlen(args)-1] = 0;
av_opt_set(fg->graph, "aresample_swr_opts", args, 0); av_opt_set(fg->graph, "aresample_swr_opts", args, 0);
av_free(args);
}
} else { } else {
fg->graph->nb_threads = filter_complex_nbthreads; fg->graph->nb_threads = filter_complex_nbthreads;
} }
if ((ret = avfilter_graph_parse2(fg->graph, graph_desc, &inputs, &outputs)) < 0) if ((ret = graph_parse(fg->graph, graph_desc, &inputs, &outputs)) < 0)
goto fail; goto fail;
ret = hw_device_setup_for_filter(fg); ret = hw_device_setup_for_filter(fg);
@ -1131,16 +1252,8 @@ int configure_filtergraph(FilterGraph *fg)
for (i = 0; i < fg->nb_outputs; i++) { for (i = 0; i < fg->nb_outputs; i++) {
OutputStream *ost = fg->outputs[i]->ost; OutputStream *ost = fg->outputs[i]->ost;
if (!ost->enc) { if (ost->enc_ctx->codec_type == AVMEDIA_TYPE_AUDIO &&
/* identical to the same check in ffmpeg.c, needed because !(ost->enc_ctx->codec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE))
complex filter graphs are initialized earlier */
av_log(NULL, AV_LOG_ERROR, "Encoder (codec %s) not found for output stream #%d:%d\n",
avcodec_get_name(ost->st->codecpar->codec_id), ost->file_index, ost->index);
ret = AVERROR(EINVAL);
goto fail;
}
if (ost->enc->type == AVMEDIA_TYPE_AUDIO &&
!(ost->enc->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE))
av_buffersink_set_frame_size(ost->filter->filter, av_buffersink_set_frame_size(ost->filter->filter,
ost->enc_ctx->frame_size); ost->enc_ctx->frame_size);
} }

@ -1,5 +1,6 @@
/* /*
* Copyright (c) 2018-2019 Taner Sener * Copyright (c) 2018-2019 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -23,6 +24,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 12.2019 * 12.2019
@ -357,7 +364,7 @@ int hw_device_setup_for_decode(InputStream *ist)
if (ist->hwaccel_id == HWACCEL_AUTO) { if (ist->hwaccel_id == HWACCEL_AUTO) {
ist->hwaccel_device_type = dev->type; ist->hwaccel_device_type = dev->type;
} else if (ist->hwaccel_device_type != dev->type) { } else if (ist->hwaccel_device_type != dev->type) {
av_log(ist->dec_ctx, AV_LOG_ERROR, "Invalid hwaccel device " av_log(NULL, AV_LOG_ERROR, "Invalid hwaccel device "
"specified for decoder: device %s of type %s is not " "specified for decoder: device %s of type %s is not "
"usable with hwaccel %s.\n", dev->name, "usable with hwaccel %s.\n", dev->name,
av_hwdevice_get_type_name(dev->type), av_hwdevice_get_type_name(dev->type),
@ -408,7 +415,7 @@ int hw_device_setup_for_decode(InputStream *ist)
type = config->device_type; type = config->device_type;
dev = hw_device_get_by_type(type); dev = hw_device_get_by_type(type);
if (dev) { if (dev) {
av_log(ist->dec_ctx, AV_LOG_INFO, "Using auto " av_log(NULL, AV_LOG_INFO, "Using auto "
"hwaccel type %s with existing device %s.\n", "hwaccel type %s with existing device %s.\n",
av_hwdevice_get_type_name(type), dev->name); av_hwdevice_get_type_name(type), dev->name);
} }
@ -426,12 +433,12 @@ int hw_device_setup_for_decode(InputStream *ist)
continue; continue;
} }
if (ist->hwaccel_device) { if (ist->hwaccel_device) {
av_log(ist->dec_ctx, AV_LOG_INFO, "Using auto " av_log(NULL, AV_LOG_INFO, "Using auto "
"hwaccel type %s with new device created " "hwaccel type %s with new device created "
"from %s.\n", av_hwdevice_get_type_name(type), "from %s.\n", av_hwdevice_get_type_name(type),
ist->hwaccel_device); ist->hwaccel_device);
} else { } else {
av_log(ist->dec_ctx, AV_LOG_INFO, "Using auto " av_log(NULL, AV_LOG_INFO, "Using auto "
"hwaccel type %s with new default device.\n", "hwaccel type %s with new default device.\n",
av_hwdevice_get_type_name(type)); av_hwdevice_get_type_name(type));
} }
@ -439,7 +446,7 @@ int hw_device_setup_for_decode(InputStream *ist)
if (dev) { if (dev) {
ist->hwaccel_device_type = type; ist->hwaccel_device_type = type;
} else { } else {
av_log(ist->dec_ctx, AV_LOG_INFO, "Auto hwaccel " av_log(NULL, AV_LOG_INFO, "Auto hwaccel "
"disabled: no device found.\n"); "disabled: no device found.\n");
ist->hwaccel_id = HWACCEL_NONE; ist->hwaccel_id = HWACCEL_NONE;
return 0; return 0;
@ -447,7 +454,7 @@ int hw_device_setup_for_decode(InputStream *ist)
} }
if (!dev) { if (!dev) {
av_log(ist->dec_ctx, AV_LOG_ERROR, "No device available " av_log(NULL, AV_LOG_ERROR, "No device available "
"for decoder: device type %s needed for codec %s.\n", "for decoder: device type %s needed for codec %s.\n",
av_hwdevice_get_type_name(type), ist->dec->name); av_hwdevice_get_type_name(type), ist->dec->name);
return err; return err;
@ -479,7 +486,7 @@ int hw_device_setup_for_encode(OutputStream *ost)
} }
for (i = 0;; i++) { for (i = 0;; i++) {
config = avcodec_get_hw_config(ost->enc, i); config = avcodec_get_hw_config(ost->enc_ctx->codec, i);
if (!config) if (!config)
break; break;
@ -490,7 +497,7 @@ int hw_device_setup_for_encode(OutputStream *ost)
av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using input " av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using input "
"frames context (format %s) with %s encoder.\n", "frames context (format %s) with %s encoder.\n",
av_get_pix_fmt_name(ost->enc_ctx->pix_fmt), av_get_pix_fmt_name(ost->enc_ctx->pix_fmt),
ost->enc->name); ost->enc_ctx->codec->name);
ost->enc_ctx->hw_frames_ctx = av_buffer_ref(frames_ref); ost->enc_ctx->hw_frames_ctx = av_buffer_ref(frames_ref);
if (!ost->enc_ctx->hw_frames_ctx) if (!ost->enc_ctx->hw_frames_ctx)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
@ -505,7 +512,7 @@ int hw_device_setup_for_encode(OutputStream *ost)
if (dev) { if (dev) {
av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using device %s " av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using device %s "
"(type %s) with %s encoder.\n", dev->name, "(type %s) with %s encoder.\n", dev->name,
av_hwdevice_get_type_name(dev->type), ost->enc->name); av_hwdevice_get_type_name(dev->type), ost->enc_ctx->codec->name);
ost->enc_ctx->hw_device_ctx = av_buffer_ref(dev->device_ref); ost->enc_ctx->hw_device_ctx = av_buffer_ref(dev->device_ref);
if (!ost->enc_ctx->hw_device_ctx) if (!ost->enc_ctx->hw_device_ctx)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);

@ -1,6 +1,7 @@
/* /*
* This file is part of FFmpeg. * This file is part of FFmpeg.
* Copyright (c) 2022 Taner Sener * Copyright (c) 2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* FFmpeg is free software; you can redistribute it and/or * FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public * modify it under the terms of the GNU Lesser General Public
@ -22,6 +23,15 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop the ffmpeg-kit library. * by us to develop the ffmpeg-kit library.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
* - want_sdp marked as thread-local
* - ms_from_ost migrated from ffmpeg_mux.c and marked as non-static
*
* ffmpeg-kit changes by Taner Sener * ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -31,100 +41,83 @@
* - printf replaced with av_log statements * - printf replaced with av_log statements
*/ */
#include <stdatomic.h>
#include <stdio.h> #include <stdio.h>
#include <string.h> #include <string.h>
#include "fftools_ffmpeg.h" #include "fftools_ffmpeg.h"
#include "fftools_ffmpeg_mux.h"
#include "fftools_objpool.h"
#include "fftools_sync_queue.h"
#include "fftools_thread_queue.h"
#include "libavutil/fifo.h" #include "libavutil/fifo.h"
#include "libavutil/intreadwrite.h" #include "libavutil/intreadwrite.h"
#include "libavutil/log.h" #include "libavutil/log.h"
#include "libavutil/mem.h" #include "libavutil/mem.h"
#include "libavutil/timestamp.h" #include "libavutil/timestamp.h"
#include "libavutil/thread.h"
#include "libavcodec/packet.h" #include "libavcodec/packet.h"
#include "libavformat/avformat.h" #include "libavformat/avformat.h"
#include "libavformat/avio.h" #include "libavformat/avio.h"
static void close_all_output_streams(OutputStream *ost, OSTFinished this_stream, OSTFinished others) __thread int want_sdp = 1;
MuxStream *ms_from_ost(OutputStream *ost)
{ {
int i; return (MuxStream*)ost;
for (i = 0; i < nb_output_streams; i++) {
OutputStream *ost2 = output_streams[i];
ost2->finished |= ost == ost2 ? this_stream : others;
}
} }
void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, static Muxer *mux_from_of(OutputFile *of)
int unqueue)
{ {
AVFormatContext *s = of->ctx; return (Muxer*)of;
AVStream *st = ost->st; }
int ret;
/* static int64_t filesize(AVIOContext *pb)
* Audio encoders may split the packets -- #frames in != #packets out. {
* But there is no reordering, so we can limit the number of output packets int64_t ret = -1;
* by simply dropping them here.
* Counting encoded video frames needs to be done separately because of if (pb) {
* reordering, see do_video_out(). ret = avio_size(pb);
* Do not count the packet when unqueued because it has been counted when queued. if (ret <= 0) // FIXME improve avio_size() so it works with non seekable output too
*/ ret = avio_tell(pb);
if (!(st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && ost->encoding_needed) && !unqueue) {
if (ost->frame_number >= ost->max_frames) {
av_packet_unref(pkt);
return;
}
ost->frame_number++;
} }
if (!of->header_written) { return ret;
AVPacket *tmp_pkt; }
/* the muxer is not initialized yet, buffer the packet */
if (!av_fifo_can_write(ost->muxing_queue)) {
size_t cur_size = av_fifo_can_read(ost->muxing_queue);
unsigned int are_we_over_size =
(ost->muxing_queue_data_size + pkt->size) > ost->muxing_queue_data_threshold;
size_t limit = are_we_over_size ? ost->max_muxing_queue_size : SIZE_MAX;
size_t new_size = FFMIN(2 * cur_size, limit);
if (new_size <= cur_size) { static int write_packet(Muxer *mux, OutputStream *ost, AVPacket *pkt)
av_log(NULL, AV_LOG_ERROR, {
"Too many packets buffered for output stream %d:%d.\n", MuxStream *ms = ms_from_ost(ost);
ost->file_index, ost->st->index); AVFormatContext *s = mux->fc;
exit_program(1); AVStream *st = ost->st;
} int64_t fs;
ret = av_fifo_grow2(ost->muxing_queue, new_size - cur_size); uint64_t frame_num;
if (ret < 0) int ret;
exit_program(1);
} fs = filesize(s->pb);
ret = av_packet_make_refcounted(pkt); atomic_store(&mux->last_filesize, fs);
if (ret < 0) if (fs >= mux->limit_filesize) {
exit_program(1); ret = AVERROR_EOF;
tmp_pkt = av_packet_alloc(); goto fail;
if (!tmp_pkt)
exit_program(1);
av_packet_move_ref(tmp_pkt, pkt);
ost->muxing_queue_data_size += tmp_pkt->size;
av_fifo_write(ost->muxing_queue, &tmp_pkt, 1);
return;
} }
if ((st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && ost->vsync_method == VSYNC_DROP) || if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && ost->vsync_method == VSYNC_DROP)
(st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO && audio_sync_method < 0))
pkt->pts = pkt->dts = AV_NOPTS_VALUE; pkt->pts = pkt->dts = AV_NOPTS_VALUE;
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
if (ost->frame_rate.num && ost->is_cfr) { if (ost->frame_rate.num && ost->is_cfr) {
if (pkt->duration > 0) if (pkt->duration > 0)
av_log(NULL, AV_LOG_WARNING, "Overriding packet duration by frame rate, this should not happen\n"); av_log(ost, AV_LOG_WARNING, "Overriding packet duration by frame rate, this should not happen\n");
pkt->duration = av_rescale_q(1, av_inv_q(ost->frame_rate), pkt->duration = av_rescale_q(1, av_inv_q(ost->frame_rate),
ost->mux_timebase); pkt->time_base);
} }
} }
av_packet_rescale_ts(pkt, ost->mux_timebase, ost->st->time_base); av_packet_rescale_ts(pkt, pkt->time_base, ost->st->time_base);
pkt->time_base = ost->st->time_base;
if (!(s->oformat->flags & AVFMT_NOTIMESTAMPS)) { if (!(s->oformat->flags & AVFMT_NOTIMESTAMPS)) {
if (pkt->dts != AV_NOPTS_VALUE && if (pkt->dts != AV_NOPTS_VALUE &&
@ -134,25 +127,26 @@ void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost,
pkt->dts, pkt->pts, pkt->dts, pkt->pts,
ost->file_index, ost->st->index); ost->file_index, ost->st->index);
pkt->pts = pkt->pts =
pkt->dts = pkt->pts + pkt->dts + ost->last_mux_dts + 1 pkt->dts = pkt->pts + pkt->dts + ms->last_mux_dts + 1
- FFMIN3(pkt->pts, pkt->dts, ost->last_mux_dts + 1) - FFMIN3(pkt->pts, pkt->dts, ms->last_mux_dts + 1)
- FFMAX3(pkt->pts, pkt->dts, ost->last_mux_dts + 1); - FFMAX3(pkt->pts, pkt->dts, ms->last_mux_dts + 1);
} }
if ((st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO || st->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE) && if ((st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO || st->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE) &&
pkt->dts != AV_NOPTS_VALUE && pkt->dts != AV_NOPTS_VALUE &&
ost->last_mux_dts != AV_NOPTS_VALUE) { ms->last_mux_dts != AV_NOPTS_VALUE) {
int64_t max = ost->last_mux_dts + !(s->oformat->flags & AVFMT_TS_NONSTRICT); int64_t max = ms->last_mux_dts + !(s->oformat->flags & AVFMT_TS_NONSTRICT);
if (pkt->dts < max) { if (pkt->dts < max) {
int loglevel = max - pkt->dts > 2 || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO ? AV_LOG_WARNING : AV_LOG_DEBUG; int loglevel = max - pkt->dts > 2 || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO ? AV_LOG_WARNING : AV_LOG_DEBUG;
if (exit_on_error) if (exit_on_error)
loglevel = AV_LOG_ERROR; loglevel = AV_LOG_ERROR;
av_log(s, loglevel, "Non-monotonous DTS in output stream " av_log(s, loglevel, "Non-monotonous DTS in output stream "
"%d:%d; previous: %"PRId64", current: %"PRId64"; ", "%d:%d; previous: %"PRId64", current: %"PRId64"; ",
ost->file_index, ost->st->index, ost->last_mux_dts, pkt->dts); ost->file_index, ost->st->index, ms->last_mux_dts, pkt->dts);
if (exit_on_error) { if (exit_on_error) {
av_log(NULL, AV_LOG_FATAL, "aborting.\n"); ret = AVERROR(EINVAL);
exit_program(1); goto fail;
} }
av_log(s, loglevel, "changing to %"PRId64". This may result " av_log(s, loglevel, "changing to %"PRId64". This may result "
"in incorrect timestamps in the output file.\n", "in incorrect timestamps in the output file.\n",
max); max);
@ -162,17 +156,17 @@ void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost,
} }
} }
} }
ost->last_mux_dts = pkt->dts; ms->last_mux_dts = pkt->dts;
ost->data_size += pkt->size; ost->data_size_mux += pkt->size;
ost->packets_written++; frame_num = atomic_fetch_add(&ost->packets_written, 1);
pkt->stream_index = ost->index; pkt->stream_index = ost->index;
if (debug_ts) { if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "muxer <- type:%s " av_log(ost, AV_LOG_INFO, "muxer <- type:%s "
"pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s duration:%s duration_time:%s size:%d\n", "pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s duration:%s duration_time:%s size:%d\n",
av_get_media_type_string(ost->enc_ctx->codec_type), av_get_media_type_string(st->codecpar->codec_type),
av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, &ost->st->time_base), av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, &ost->st->time_base),
av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, &ost->st->time_base), av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, &ost->st->time_base),
av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, &ost->st->time_base), av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, &ost->st->time_base),
@ -180,12 +174,307 @@ void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost,
); );
} }
if (ms->stats.io)
enc_stats_write(ost, &ms->stats, NULL, pkt, frame_num);
ret = av_interleaved_write_frame(s, pkt); ret = av_interleaved_write_frame(s, pkt);
if (ret < 0) { if (ret < 0) {
print_error("av_interleaved_write_frame()", ret); print_error("av_interleaved_write_frame()", ret);
main_ffmpeg_return_code = 1; goto fail;
close_all_output_streams(ost, MUXER_FINISHED | ENCODER_FINISHED, ENCODER_FINISHED); }
return 0;
fail:
av_packet_unref(pkt);
return ret;
}
static int sync_queue_process(Muxer *mux, OutputStream *ost, AVPacket *pkt, int *stream_eof)
{
OutputFile *of = &mux->of;
if (ost->sq_idx_mux >= 0) {
int ret = sq_send(mux->sq_mux, ost->sq_idx_mux, SQPKT(pkt));
if (ret < 0) {
if (ret == AVERROR_EOF)
*stream_eof = 1;
return ret;
}
while (1) {
ret = sq_receive(mux->sq_mux, -1, SQPKT(mux->sq_pkt));
if (ret < 0)
return (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) ? 0 : ret;
ret = write_packet(mux, of->streams[ret],
mux->sq_pkt);
if (ret < 0)
return ret;
}
} else if (pkt)
return write_packet(mux, ost, pkt);
return 0;
}
static void thread_set_name(OutputFile *of)
{
char name[16];
snprintf(name, sizeof(name), "mux%d:%s", of->index, of->format->name);
ff_thread_setname(name);
}
static void *muxer_thread(void *arg)
{
Muxer *mux = arg;
OutputFile *of = &mux->of;
AVPacket *pkt = NULL;
int ret = 0;
pkt = av_packet_alloc();
if (!pkt) {
ret = AVERROR(ENOMEM);
goto finish;
}
thread_set_name(of);
while (1) {
OutputStream *ost;
int stream_idx, stream_eof = 0;
ret = tq_receive(mux->tq, &stream_idx, pkt);
if (stream_idx < 0) {
av_log(mux, AV_LOG_VERBOSE, "All streams finished\n");
ret = 0;
break;
}
ost = of->streams[stream_idx];
ret = sync_queue_process(mux, ost, ret < 0 ? NULL : pkt, &stream_eof);
av_packet_unref(pkt);
if (ret == AVERROR_EOF && stream_eof)
tq_receive_finish(mux->tq, stream_idx);
else if (ret < 0) {
av_log(mux, AV_LOG_ERROR, "Error muxing a packet\n");
break;
}
}
finish:
av_packet_free(&pkt);
for (unsigned int i = 0; i < mux->fc->nb_streams; i++)
tq_receive_finish(mux->tq, i);
av_log(mux, AV_LOG_VERBOSE, "Terminating muxer thread\n");
return (void*)(intptr_t)ret;
}
static int thread_submit_packet(Muxer *mux, OutputStream *ost, AVPacket *pkt)
{
int ret = 0;
if (!pkt || ost->finished & MUXER_FINISHED)
goto finish;
ret = tq_send(mux->tq, ost->index, pkt);
if (ret < 0)
goto finish;
return 0;
finish:
if (pkt)
av_packet_unref(pkt);
ost->finished |= MUXER_FINISHED;
tq_send_finish(mux->tq, ost->index);
return ret == AVERROR_EOF ? 0 : ret;
}
static int queue_packet(Muxer *mux, OutputStream *ost, AVPacket *pkt)
{
MuxStream *ms = ms_from_ost(ost);
AVPacket *tmp_pkt = NULL;
int ret;
if (!av_fifo_can_write(ms->muxing_queue)) {
size_t cur_size = av_fifo_can_read(ms->muxing_queue);
size_t pkt_size = pkt ? pkt->size : 0;
unsigned int are_we_over_size =
(ms->muxing_queue_data_size + pkt_size) > ms->muxing_queue_data_threshold;
size_t limit = are_we_over_size ? ms->max_muxing_queue_size : SIZE_MAX;
size_t new_size = FFMIN(2 * cur_size, limit);
if (new_size <= cur_size) {
av_log(ost, AV_LOG_ERROR,
"Too many packets buffered for output stream %d:%d.\n",
ost->file_index, ost->st->index);
return AVERROR(ENOSPC);
}
ret = av_fifo_grow2(ms->muxing_queue, new_size - cur_size);
if (ret < 0)
return ret;
}
if (pkt) {
ret = av_packet_make_refcounted(pkt);
if (ret < 0)
return ret;
tmp_pkt = av_packet_alloc();
if (!tmp_pkt)
return AVERROR(ENOMEM);
av_packet_move_ref(tmp_pkt, pkt);
ms->muxing_queue_data_size += tmp_pkt->size;
}
av_fifo_write(ms->muxing_queue, &tmp_pkt, 1);
return 0;
}
static int submit_packet(Muxer *mux, AVPacket *pkt, OutputStream *ost)
{
int ret;
if (mux->tq) {
return thread_submit_packet(mux, ost, pkt);
} else {
/* the muxer is not initialized yet, buffer the packet */
ret = queue_packet(mux, ost, pkt);
if (ret < 0) {
if (pkt)
av_packet_unref(pkt);
return ret;
}
} }
return 0;
}
void of_output_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, int eof)
{
Muxer *mux = mux_from_of(of);
MuxStream *ms = ms_from_ost(ost);
const char *err_msg;
int ret = 0;
if (!eof && pkt->dts != AV_NOPTS_VALUE)
ost->last_mux_dts = av_rescale_q(pkt->dts, pkt->time_base, AV_TIME_BASE_Q);
/* apply the output bitstream filters */
if (ms->bsf_ctx) {
int bsf_eof = 0;
ret = av_bsf_send_packet(ms->bsf_ctx, eof ? NULL : pkt);
if (ret < 0) {
err_msg = "submitting a packet for bitstream filtering";
goto fail;
}
while (!bsf_eof) {
ret = av_bsf_receive_packet(ms->bsf_ctx, pkt);
if (ret == AVERROR(EAGAIN))
return;
else if (ret == AVERROR_EOF)
bsf_eof = 1;
else if (ret < 0) {
err_msg = "applying bitstream filters to a packet";
goto fail;
}
ret = submit_packet(mux, bsf_eof ? NULL : pkt, ost);
if (ret < 0)
goto mux_fail;
}
} else {
ret = submit_packet(mux, eof ? NULL : pkt, ost);
if (ret < 0)
goto mux_fail;
}
return;
mux_fail:
err_msg = "submitting a packet to the muxer";
fail:
av_log(ost, AV_LOG_ERROR, "Error %s\n", err_msg);
if (exit_on_error)
exit_program(1);
}
static int thread_stop(Muxer *mux)
{
void *ret;
if (!mux || !mux->tq)
return 0;
for (unsigned int i = 0; i < mux->fc->nb_streams; i++)
tq_send_finish(mux->tq, i);
pthread_join(mux->thread, &ret);
tq_free(&mux->tq);
return (int)(intptr_t)ret;
}
static void pkt_move(void *dst, void *src)
{
av_packet_move_ref(dst, src);
}
static int thread_start(Muxer *mux)
{
AVFormatContext *fc = mux->fc;
ObjPool *op;
int ret;
op = objpool_alloc_packets();
if (!op)
return AVERROR(ENOMEM);
mux->tq = tq_alloc(fc->nb_streams, mux->thread_queue_size, op, pkt_move);
if (!mux->tq) {
objpool_free(&op);
return AVERROR(ENOMEM);
}
ret = pthread_create(&mux->thread, NULL, muxer_thread, (void*)mux);
if (ret) {
tq_free(&mux->tq);
return AVERROR(ret);
}
/* flush the muxing queues */
for (int i = 0; i < fc->nb_streams; i++) {
OutputStream *ost = mux->of.streams[i];
MuxStream *ms = ms_from_ost(ost);
AVPacket *pkt;
/* try to improve muxing time_base (only possible if nothing has been written yet) */
if (!av_fifo_can_read(ms->muxing_queue))
ost->mux_timebase = ost->st->time_base;
while (av_fifo_read(ms->muxing_queue, &pkt, 1) >= 0) {
ret = thread_submit_packet(mux, ost, pkt);
if (pkt) {
ms->muxing_queue_data_size -= pkt->size;
av_packet_free(&pkt);
}
if (ret < 0)
return ret;
}
}
return 0;
} }
static int print_sdp(void) static int print_sdp(void)
@ -197,16 +486,16 @@ static int print_sdp(void)
AVFormatContext **avc; AVFormatContext **avc;
for (i = 0; i < nb_output_files; i++) { for (i = 0; i < nb_output_files; i++) {
if (!output_files[i]->header_written) if (!mux_from_of(output_files[i])->header_written)
return 0; return 0;
} }
avc = av_malloc_array(nb_output_files, sizeof(*avc)); avc = av_malloc_array(nb_output_files, sizeof(*avc));
if (!avc) if (!avc)
exit_program(1); return AVERROR(ENOMEM);
for (i = 0, j = 0; i < nb_output_files; i++) { for (i = 0, j = 0; i < nb_output_files; i++) {
if (!strcmp(output_files[i]->ctx->oformat->name, "rtp")) { if (!strcmp(output_files[i]->format->name, "rtp")) {
avc[j] = output_files[i]->ctx; avc[j] = mux_from_of(output_files[i])->fc;
j++; j++;
} }
} }
@ -236,34 +525,36 @@ static int print_sdp(void)
av_freep(&sdp_filename); av_freep(&sdp_filename);
} }
// SDP successfully written, allow muxer threads to start
ret = 1;
fail: fail:
av_freep(&avc); av_freep(&avc);
return ret; return ret;
} }
/* open the muxer when all the streams are initialized */ int mux_check_init(Muxer *mux)
int of_check_init(OutputFile *of)
{ {
OutputFile *of = &mux->of;
AVFormatContext *fc = mux->fc;
int ret, i; int ret, i;
for (i = 0; i < of->ctx->nb_streams; i++) { for (i = 0; i < fc->nb_streams; i++) {
OutputStream *ost = output_streams[of->ost_index + i]; OutputStream *ost = of->streams[i];
if (!ost->initialized) if (!ost->initialized)
return 0; return 0;
} }
ret = avformat_write_header(of->ctx, &of->opts); ret = avformat_write_header(fc, &mux->opts);
if (ret < 0) { if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, av_log(mux, AV_LOG_ERROR, "Could not write header (incorrect codec "
"Could not write header for output file #%d " "parameters ?): %s\n", av_err2str(ret));
"(incorrect codec parameters ?): %s\n",
of->index, av_err2str(ret));
return ret; return ret;
} }
//assert_avoptions(of->opts); //assert_avoptions(of->opts);
of->header_written = 1; mux->header_written = 1;
av_dump_format(of->ctx, of->index, of->ctx->url, 1); av_dump_format(fc, of->index, fc->url, 1);
nb_output_dumped++; nb_output_dumped++;
if (sdp_filename || want_sdp) { if (sdp_filename || want_sdp) {
@ -271,62 +562,220 @@ int of_check_init(OutputFile *of)
if (ret < 0) { if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error writing the SDP.\n"); av_log(NULL, AV_LOG_ERROR, "Error writing the SDP.\n");
return ret; return ret;
} else if (ret == 1) {
/* SDP is written only after all the muxers are ready, so now we
* start ALL the threads */
for (i = 0; i < nb_output_files; i++) {
ret = thread_start(mux_from_of(output_files[i]));
if (ret < 0)
return ret;
}
} }
} else {
ret = thread_start(mux_from_of(of));
if (ret < 0)
return ret;
} }
/* flush the muxing queues */ return 0;
for (i = 0; i < of->ctx->nb_streams; i++) { }
OutputStream *ost = output_streams[of->ost_index + i];
AVPacket *pkt;
/* try to improve muxing time_base (only possible if nothing has been written yet) */ static int bsf_init(MuxStream *ms)
if (!av_fifo_can_read(ost->muxing_queue)) {
ost->mux_timebase = ost->st->time_base; OutputStream *ost = &ms->ost;
AVBSFContext *ctx = ms->bsf_ctx;
int ret;
while (av_fifo_read(ost->muxing_queue, &pkt, 1) >= 0) { if (!ctx)
ost->muxing_queue_data_size -= pkt->size; return 0;
of_write_packet(of, pkt, ost, 1);
av_packet_free(&pkt); ret = avcodec_parameters_copy(ctx->par_in, ost->st->codecpar);
} if (ret < 0)
return ret;
ctx->time_base_in = ost->st->time_base;
ret = av_bsf_init(ctx);
if (ret < 0) {
av_log(ms, AV_LOG_ERROR, "Error initializing bitstream filter: %s\n",
ctx->filter->name);
return ret;
} }
ret = avcodec_parameters_copy(ost->st->codecpar, ctx->par_out);
if (ret < 0)
return ret;
ost->st->time_base = ctx->time_base_out;
return 0; return 0;
} }
int of_stream_init(OutputFile *of, OutputStream *ost)
{
Muxer *mux = mux_from_of(of);
MuxStream *ms = ms_from_ost(ost);
int ret;
if (ost->sq_idx_mux >= 0)
sq_set_tb(mux->sq_mux, ost->sq_idx_mux, ost->mux_timebase);
/* initialize bitstream filters for the output stream
* needs to be done here, because the codec id for streamcopy is not
* known until now */
ret = bsf_init(ms);
if (ret < 0)
return ret;
ost->initialized = 1;
return mux_check_init(mux);
}
int of_write_trailer(OutputFile *of) int of_write_trailer(OutputFile *of)
{ {
Muxer *mux = mux_from_of(of);
AVFormatContext *fc = mux->fc;
int ret; int ret;
if (!of->header_written) { if (!mux->tq) {
av_log(NULL, AV_LOG_ERROR, av_log(mux, AV_LOG_ERROR,
"Nothing was written into output file %d (%s), because " "Nothing was written into output file, because "
"at least one of its streams received no packets.\n", "at least one of its streams received no packets.\n");
of->index, of->ctx->url);
return AVERROR(EINVAL); return AVERROR(EINVAL);
} }
ret = av_write_trailer(of->ctx); ret = thread_stop(mux);
if (ret < 0)
main_ffmpeg_return_code = ret;
ret = av_write_trailer(fc);
if (ret < 0) { if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error writing trailer of %s: %s\n", of->ctx->url, av_err2str(ret)); av_log(mux, AV_LOG_ERROR, "Error writing trailer: %s\n", av_err2str(ret));
return ret; return ret;
} }
mux->last_filesize = filesize(fc->pb);
if (!(of->format->flags & AVFMT_NOFILE)) {
ret = avio_closep(&fc->pb);
if (ret < 0) {
av_log(mux, AV_LOG_ERROR, "Error closing file: %s\n", av_err2str(ret));
return ret;
}
}
return 0; return 0;
} }
static void ost_free(OutputStream **post)
{
OutputStream *ost = *post;
MuxStream *ms;
if (!ost)
return;
ms = ms_from_ost(ost);
if (ost->logfile) {
if (fclose(ost->logfile))
av_log(ms, AV_LOG_ERROR,
"Error closing logfile, loss of information possible: %s\n",
av_err2str(AVERROR(errno)));
ost->logfile = NULL;
}
if (ms->muxing_queue) {
AVPacket *pkt;
while (av_fifo_read(ms->muxing_queue, &pkt, 1) >= 0)
av_packet_free(&pkt);
av_fifo_freep2(&ms->muxing_queue);
}
av_bsf_free(&ms->bsf_ctx);
av_frame_free(&ost->filtered_frame);
av_frame_free(&ost->sq_frame);
av_frame_free(&ost->last_frame);
av_packet_free(&ost->pkt);
av_dict_free(&ost->encoder_opts);
av_freep(&ost->kf.pts);
av_expr_free(ost->kf.pexpr);
av_freep(&ost->avfilter);
av_freep(&ost->logfile_prefix);
av_freep(&ost->apad);
#if FFMPEG_OPT_MAP_CHANNEL
av_freep(&ost->audio_channels_map);
ost->audio_channels_mapped = 0;
#endif
av_dict_free(&ost->sws_dict);
av_dict_free(&ost->swr_opts);
if (ost->enc_ctx)
av_freep(&ost->enc_ctx->stats_in);
avcodec_free_context(&ost->enc_ctx);
for (int i = 0; i < ost->enc_stats_pre.nb_components; i++)
av_freep(&ost->enc_stats_pre.components[i].str);
av_freep(&ost->enc_stats_pre.components);
for (int i = 0; i < ost->enc_stats_post.nb_components; i++)
av_freep(&ost->enc_stats_post.components[i].str);
av_freep(&ost->enc_stats_post.components);
for (int i = 0; i < ms->stats.nb_components; i++)
av_freep(&ms->stats.components[i].str);
av_freep(&ms->stats.components);
av_freep(post);
}
static void fc_close(AVFormatContext **pfc)
{
AVFormatContext *fc = *pfc;
if (!fc)
return;
if (!(fc->oformat->flags & AVFMT_NOFILE))
avio_closep(&fc->pb);
avformat_free_context(fc);
*pfc = NULL;
}
void of_close(OutputFile **pof) void of_close(OutputFile **pof)
{ {
OutputFile *of = *pof; OutputFile *of = *pof;
AVFormatContext *s; Muxer *mux;
if (!of) if (!of)
return; return;
mux = mux_from_of(of);
s = of->ctx; thread_stop(mux);
if (s && s->oformat && !(s->oformat->flags & AVFMT_NOFILE))
avio_closep(&s->pb); sq_free(&of->sq_encode);
avformat_free_context(s); sq_free(&mux->sq_mux);
av_dict_free(&of->opts);
for (int i = 0; i < of->nb_streams; i++)
ost_free(&of->streams[i]);
av_freep(&of->streams);
av_dict_free(&mux->opts);
av_packet_free(&mux->sq_pkt);
fc_close(&mux->fc);
av_freep(pof); av_freep(pof);
} }
int64_t of_filesize(OutputFile *of)
{
Muxer *mux = mux_from_of(of);
return atomic_load(&mux->last_filesize);
}

@ -0,0 +1,165 @@
/*
* Muxer internal APIs - should not be included outside of ffmpeg_mux*
* Copyright (c) 2023 ARTHENICA LTD
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of ffmpeg_mux.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
* - want_sdp made thread-local
* - EncStatsFile declaration migrated from ffmpeg_mux_init.c
* - WARN_MULTIPLE_OPT_USAGE, MATCH_PER_STREAM_OPT, MATCH_PER_TYPE_OPT, SPECIFIER_OPT_FMT declarations migrated from
* ffmpeg.h
* - ms_from_ost migrated to ffmpeg_mux.c
*/
#ifndef FFTOOLS_FFMPEG_MUX_H
#define FFTOOLS_FFMPEG_MUX_H
#include <stdatomic.h>
#include <stdint.h>
#include "fftools_thread_queue.h"
#include "libavformat/avformat.h"
#include "libavcodec/packet.h"
#include "libavutil/dict.h"
#include "libavutil/fifo.h"
#include "libavutil/thread.h"
#define SPECIFIER_OPT_FMT_str "%s"
#define SPECIFIER_OPT_FMT_i "%i"
#define SPECIFIER_OPT_FMT_i64 "%"PRId64
#define SPECIFIER_OPT_FMT_ui64 "%"PRIu64
#define SPECIFIER_OPT_FMT_f "%f"
#define SPECIFIER_OPT_FMT_dbl "%lf"
#define WARN_MULTIPLE_OPT_USAGE(name, type, so, st)\
{\
char namestr[128] = "";\
const char *spec = so->specifier && so->specifier[0] ? so->specifier : "";\
for (int _i = 0; opt_name_##name[_i]; _i++)\
av_strlcatf(namestr, sizeof(namestr), "-%s%s", opt_name_##name[_i], opt_name_##name[_i+1] ? (opt_name_##name[_i+2] ? ", " : " or ") : "");\
av_log(NULL, AV_LOG_WARNING, "Multiple %s options specified for stream %d, only the last option '-%s%s%s "SPECIFIER_OPT_FMT_##type"' will be used.\n",\
namestr, st->index, opt_name_##name[0], spec[0] ? ":" : "", spec, so->u.type);\
}
#define MATCH_PER_STREAM_OPT(name, type, outvar, fmtctx, st)\
{\
int _ret, _matches = 0;\
SpecifierOpt *so;\
for (int _i = 0; _i < o->nb_ ## name; _i++) {\
char *spec = o->name[_i].specifier;\
if ((_ret = check_stream_specifier(fmtctx, st, spec)) > 0) {\
outvar = o->name[_i].u.type;\
so = &o->name[_i];\
_matches++;\
} else if (_ret < 0)\
exit_program(1);\
}\
if (_matches > 1)\
WARN_MULTIPLE_OPT_USAGE(name, type, so, st);\
}
#define MATCH_PER_TYPE_OPT(name, type, outvar, fmtctx, mediatype)\
{\
int i;\
for (i = 0; i < o->nb_ ## name; i++) {\
char *spec = o->name[i].specifier;\
if (!strcmp(spec, mediatype))\
outvar = o->name[i].u.type;\
}\
}
typedef struct MuxStream {
OutputStream ost;
// name used for logging
char log_name[32];
/* the packets are buffered here until the muxer is ready to be initialized */
AVFifo *muxing_queue;
AVBSFContext *bsf_ctx;
EncStats stats;
int64_t max_frames;
/*
* The size of the AVPackets' buffers in queue.
* Updated when a packet is either pushed or pulled from the queue.
*/
size_t muxing_queue_data_size;
int max_muxing_queue_size;
/* Threshold after which max_muxing_queue_size will be in effect */
size_t muxing_queue_data_threshold;
/* dts of the last packet sent to the muxer, in the stream timebase
* used for making up missing dts values */
int64_t last_mux_dts;
} MuxStream;
typedef struct Muxer {
OutputFile of;
// name used for logging
char log_name[32];
AVFormatContext *fc;
pthread_t thread;
ThreadQueue *tq;
AVDictionary *opts;
int thread_queue_size;
/* filesize limit expressed in bytes */
int64_t limit_filesize;
atomic_int_least64_t last_filesize;
int header_written;
SyncQueue *sq_mux;
AVPacket *sq_pkt;
} Muxer;
typedef struct EncStatsFile {
char *path;
AVIOContext *io;
} EncStatsFile;
/* whether we want to print an SDP, set in of_open() */
extern __thread int want_sdp;
int mux_check_init(Muxer *mux);
#endif /* FFTOOLS_FFMPEG_MUX_H */

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -1,6 +1,7 @@
/* /*
* Copyright (c) 2007-2010 Stefano Sabatini * Copyright (c) 2007-2010 Stefano Sabatini
* Copyright (c) 2020-2022 Taner Sener * Copyright (c) 2020-2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -29,6 +30,13 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -52,9 +60,11 @@
#include "libavutil/ffversion.h" #include "libavutil/ffversion.h"
#include <string.h> #include <string.h>
#include <math.h>
#include "libavformat/avformat.h" #include "libavformat/avformat.h"
#include "libavcodec/avcodec.h" #include "libavcodec/avcodec.h"
#include "libavutil/ambient_viewing_environment.h"
#include "libavutil/avassert.h" #include "libavutil/avassert.h"
#include "libavutil/avstring.h" #include "libavutil/avstring.h"
#include "libavutil/bprint.h" #include "libavutil/bprint.h"
@ -163,6 +173,8 @@ typedef struct ReadInterval {
__thread ReadInterval *read_intervals; __thread ReadInterval *read_intervals;
__thread int read_intervals_nb = 0; __thread int read_intervals_nb = 0;
__thread int find_stream_info = 1;
/* section structure definition */ /* section structure definition */
#define SECTION_MAX_NB_CHILDREN 10 #define SECTION_MAX_NB_CHILDREN 10
@ -626,6 +638,7 @@ static inline void writer_put_str_printf(WriterContext *wctx, const char *str)
static inline void writer_printf_printf(WriterContext *wctx, const char *fmt, ...) static inline void writer_printf_printf(WriterContext *wctx, const char *fmt, ...)
{ {
va_list ap; va_list ap;
va_start(ap, fmt); va_start(ap, fmt);
av_vlog(NULL, AV_LOG_STDERR, fmt, ap); av_vlog(NULL, AV_LOG_STDERR, fmt, ap);
va_end(ap); va_end(ap);
@ -671,7 +684,7 @@ static int writer_open(WriterContext **wctx, const Writer *writer, const char *a
goto fail; goto fail;
} }
while ((opt = av_dict_get(opts, "", opt, AV_DICT_IGNORE_SUFFIX))) { while ((opt = av_dict_iterate(opts, opt))) {
if ((ret = av_opt_set(*wctx, opt->key, opt->value, AV_OPT_SEARCH_CHILDREN)) < 0) { if ((ret = av_opt_set(*wctx, opt->key, opt->value, AV_OPT_SEARCH_CHILDREN)) < 0) {
av_log(*wctx, AV_LOG_ERROR, "Failed to set option '%s' with value '%s' provided to writer context\n", av_log(*wctx, AV_LOG_ERROR, "Failed to set option '%s' with value '%s' provided to writer context\n",
opt->key, opt->value); opt->key, opt->value);
@ -1907,13 +1920,15 @@ static void writer_register_all(void)
writer_print_string(w, k, pbuf.str, 0); \ writer_print_string(w, k, pbuf.str, 0); \
} while (0) } while (0)
#define print_list_fmt(k, f, n, ...) do { \ #define print_list_fmt(k, f, n, m, ...) do { \
av_bprint_clear(&pbuf); \ av_bprint_clear(&pbuf); \
for (int idx = 0; idx < n; idx++) { \ for (int idx = 0; idx < n; idx++) { \
if (idx > 0) \ for (int idx2 = 0; idx2 < m; idx2++) { \
if (idx > 0 || idx2 > 0) \
av_bprint_chars(&pbuf, ' ', 1); \ av_bprint_chars(&pbuf, ' ', 1); \
av_bprintf(&pbuf, f, __VA_ARGS__); \ av_bprintf(&pbuf, f, __VA_ARGS__); \
} \ } \
} \
writer_print_string(w, k, pbuf.str, 0); \ writer_print_string(w, k, pbuf.str, 0); \
} while (0) } while (0)
@ -1953,7 +1968,7 @@ static inline int show_tags(WriterContext *w, AVDictionary *tags, int section_id
return 0; return 0;
writer_print_section_header(w, section_id); writer_print_section_header(w, section_id);
while ((tag = av_dict_get(tags, "", tag, AV_DICT_IGNORE_SUFFIX))) { while ((tag = av_dict_iterate(tags, tag))) {
if ((ret = print_str_validate(tag->key, tag->value)) < 0) if ((ret = print_str_validate(tag->key, tag->value)) < 0)
break; break;
} }
@ -2023,7 +2038,7 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
const AVDOVIReshapingCurve *curve = &mapping->curves[c]; const AVDOVIReshapingCurve *curve = &mapping->curves[c];
writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_COMPONENT); writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_COMPONENT);
print_list_fmt("pivots", "%"PRIu16, curve->num_pivots, curve->pivots[idx]); print_list_fmt("pivots", "%"PRIu16, curve->num_pivots, 1, curve->pivots[idx]);
writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_PIECE_LIST); writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_PIECE_LIST);
for (int i = 0; i < curve->num_pivots - 1; i++) { for (int i = 0; i < curve->num_pivots - 1; i++) {
@ -2035,7 +2050,7 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
print_str("mapping_idc_name", "polynomial"); print_str("mapping_idc_name", "polynomial");
print_int("poly_order", curve->poly_order[i]); print_int("poly_order", curve->poly_order[i]);
print_list_fmt("poly_coef", "%"PRIi64, print_list_fmt("poly_coef", "%"PRIi64,
curve->poly_order[i] + 1, curve->poly_order[i] + 1, 1,
curve->poly_coef[i][idx]); curve->poly_coef[i][idx]);
break; break;
case AV_DOVI_MAPPING_MMR: case AV_DOVI_MAPPING_MMR:
@ -2043,8 +2058,8 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
print_int("mmr_order", curve->mmr_order[i]); print_int("mmr_order", curve->mmr_order[i]);
print_int("mmr_constant", curve->mmr_constant[i]); print_int("mmr_constant", curve->mmr_constant[i]);
print_list_fmt("mmr_coef", "%"PRIi64, print_list_fmt("mmr_coef", "%"PRIi64,
curve->mmr_order[i] * 7, curve->mmr_order[i], 7,
curve->mmr_coef[i][0][idx]); curve->mmr_coef[i][idx][idx2]);
break; break;
default: default:
print_str("mapping_idc_name", "unknown"); print_str("mapping_idc_name", "unknown");
@ -2082,15 +2097,15 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
print_int("dm_metadata_id", color->dm_metadata_id); print_int("dm_metadata_id", color->dm_metadata_id);
print_int("scene_refresh_flag", color->scene_refresh_flag); print_int("scene_refresh_flag", color->scene_refresh_flag);
print_list_fmt("ycc_to_rgb_matrix", "%d/%d", print_list_fmt("ycc_to_rgb_matrix", "%d/%d",
FF_ARRAY_ELEMS(color->ycc_to_rgb_matrix), FF_ARRAY_ELEMS(color->ycc_to_rgb_matrix), 1,
color->ycc_to_rgb_matrix[idx].num, color->ycc_to_rgb_matrix[idx].num,
color->ycc_to_rgb_matrix[idx].den); color->ycc_to_rgb_matrix[idx].den);
print_list_fmt("ycc_to_rgb_offset", "%d/%d", print_list_fmt("ycc_to_rgb_offset", "%d/%d",
FF_ARRAY_ELEMS(color->ycc_to_rgb_offset), FF_ARRAY_ELEMS(color->ycc_to_rgb_offset), 1,
color->ycc_to_rgb_offset[idx].num, color->ycc_to_rgb_offset[idx].num,
color->ycc_to_rgb_offset[idx].den); color->ycc_to_rgb_offset[idx].den);
print_list_fmt("rgb_to_lms_matrix", "%d/%d", print_list_fmt("rgb_to_lms_matrix", "%d/%d",
FF_ARRAY_ELEMS(color->rgb_to_lms_matrix), FF_ARRAY_ELEMS(color->rgb_to_lms_matrix), 1,
color->rgb_to_lms_matrix[idx].num, color->rgb_to_lms_matrix[idx].num,
color->rgb_to_lms_matrix[idx].den); color->rgb_to_lms_matrix[idx].den);
print_int("signal_eotf", color->signal_eotf); print_int("signal_eotf", color->signal_eotf);
@ -2276,6 +2291,17 @@ static void print_dynamic_hdr_vivid(WriterContext *w, const AVDynamicHDRVivid *m
} }
} }
static void print_ambient_viewing_environment(WriterContext *w,
const AVAmbientViewingEnvironment *env)
{
if (!env)
return;
print_q("ambient_illuminance", env->ambient_illuminance, '/');
print_q("ambient_light_x", env->ambient_light_x, '/');
print_q("ambient_light_y", env->ambient_light_y, '/');
}
static void print_pkt_side_data(WriterContext *w, static void print_pkt_side_data(WriterContext *w,
AVCodecParameters *par, AVCodecParameters *par,
const AVPacketSideData *side_data, const AVPacketSideData *side_data,
@ -2293,8 +2319,11 @@ static void print_pkt_side_data(WriterContext *w,
writer_print_section_header(w, id_data); writer_print_section_header(w, id_data);
print_str("side_data_type", name ? name : "unknown"); print_str("side_data_type", name ? name : "unknown");
if (sd->type == AV_PKT_DATA_DISPLAYMATRIX && sd->size >= 9*4) { if (sd->type == AV_PKT_DATA_DISPLAYMATRIX && sd->size >= 9*4) {
double rotation = av_display_rotation_get((int32_t *)sd->data);
if (isnan(rotation))
rotation = 0;
writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1); writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1);
print_int("rotation", av_display_rotation_get((int32_t *)sd->data)); print_int("rotation", rotation);
} else if (sd->type == AV_PKT_DATA_STEREO3D) { } else if (sd->type == AV_PKT_DATA_STEREO3D) {
const AVStereo3D *stereo = (AVStereo3D *)sd->data; const AVStereo3D *stereo = (AVStereo3D *)sd->data;
print_str("type", av_stereo3d_type_name(stereo->type)); print_str("type", av_stereo3d_type_name(stereo->type));
@ -2506,8 +2535,12 @@ static void show_packet(WriterContext *w, InputFile *ifile, AVPacket *pkt, int p
print_val("size", pkt->size, unit_byte_str); print_val("size", pkt->size, unit_byte_str);
if (pkt->pos != -1) print_fmt ("pos", "%"PRId64, pkt->pos); if (pkt->pos != -1) print_fmt ("pos", "%"PRId64, pkt->pos);
else print_str_opt("pos", "N/A"); else print_str_opt("pos", "N/A");
print_fmt("flags", "%c%c", pkt->flags & AV_PKT_FLAG_KEY ? 'K' : '_', print_fmt("flags", "%c%c%c", pkt->flags & AV_PKT_FLAG_KEY ? 'K' : '_',
pkt->flags & AV_PKT_FLAG_DISCARD ? 'D' : '_'); pkt->flags & AV_PKT_FLAG_DISCARD ? 'D' : '_',
pkt->flags & AV_PKT_FLAG_CORRUPT ? 'C' : '_');
if (do_show_data)
writer_print_data(w, "data", pkt->data, pkt->size);
writer_print_data_hash(w, "data_hash", pkt->data, pkt->size);
if (pkt->side_data_elems) { if (pkt->side_data_elems) {
size_t size; size_t size;
@ -2526,9 +2559,6 @@ static void show_packet(WriterContext *w, InputFile *ifile, AVPacket *pkt, int p
SECTION_ID_PACKET_SIDE_DATA); SECTION_ID_PACKET_SIDE_DATA);
} }
if (do_show_data)
writer_print_data(w, "data", pkt->data, pkt->size);
writer_print_data_hash(w, "data_hash", pkt->data, pkt->size);
writer_print_section_footer(w); writer_print_section_footer(w);
av_bprint_finalize(&pbuf, NULL); av_bprint_finalize(&pbuf, NULL);
@ -2581,8 +2611,14 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
print_time("pkt_dts_time", frame->pkt_dts, &stream->time_base); print_time("pkt_dts_time", frame->pkt_dts, &stream->time_base);
print_ts ("best_effort_timestamp", frame->best_effort_timestamp); print_ts ("best_effort_timestamp", frame->best_effort_timestamp);
print_time("best_effort_timestamp_time", frame->best_effort_timestamp, &stream->time_base); print_time("best_effort_timestamp_time", frame->best_effort_timestamp, &stream->time_base);
#if LIBAVUTIL_VERSION_MAJOR < 59
AV_NOWARN_DEPRECATED(
print_duration_ts ("pkt_duration", frame->pkt_duration); print_duration_ts ("pkt_duration", frame->pkt_duration);
print_duration_time("pkt_duration_time", frame->pkt_duration, &stream->time_base); print_duration_time("pkt_duration_time", frame->pkt_duration, &stream->time_base);
)
#endif
print_duration_ts ("duration", frame->duration);
print_duration_time("duration_time", frame->duration, &stream->time_base);
if (frame->pkt_pos != -1) print_fmt ("pkt_pos", "%"PRId64, frame->pkt_pos); if (frame->pkt_pos != -1) print_fmt ("pkt_pos", "%"PRId64, frame->pkt_pos);
else print_str_opt("pkt_pos", "N/A"); else print_str_opt("pkt_pos", "N/A");
if (frame->pkt_size != -1) print_val ("pkt_size", frame->pkt_size, unit_byte_str); if (frame->pkt_size != -1) print_val ("pkt_size", frame->pkt_size, unit_byte_str);
@ -2604,8 +2640,12 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
print_str_opt("sample_aspect_ratio", "N/A"); print_str_opt("sample_aspect_ratio", "N/A");
} }
print_fmt("pict_type", "%c", av_get_picture_type_char(frame->pict_type)); print_fmt("pict_type", "%c", av_get_picture_type_char(frame->pict_type));
#if LIBAVUTIL_VERSION_MAJOR < 59
AV_NOWARN_DEPRECATED(
print_int("coded_picture_number", frame->coded_picture_number); print_int("coded_picture_number", frame->coded_picture_number);
print_int("display_picture_number", frame->display_picture_number); print_int("display_picture_number", frame->display_picture_number);
)
#endif
print_int("interlaced_frame", frame->interlaced_frame); print_int("interlaced_frame", frame->interlaced_frame);
print_int("top_field_first", frame->top_field_first); print_int("top_field_first", frame->top_field_first);
print_int("repeat_pict", frame->repeat_pict); print_int("repeat_pict", frame->repeat_pict);
@ -2644,8 +2684,11 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
name = av_frame_side_data_name(sd->type); name = av_frame_side_data_name(sd->type);
print_str("side_data_type", name ? name : "unknown"); print_str("side_data_type", name ? name : "unknown");
if (sd->type == AV_FRAME_DATA_DISPLAYMATRIX && sd->size >= 9*4) { if (sd->type == AV_FRAME_DATA_DISPLAYMATRIX && sd->size >= 9*4) {
double rotation = av_display_rotation_get((int32_t *)sd->data);
if (isnan(rotation))
rotation = 0;
writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1); writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1);
print_int("rotation", av_display_rotation_get((int32_t *)sd->data)); print_int("rotation", rotation);
} else if (sd->type == AV_FRAME_DATA_AFD && sd->size > 0) { } else if (sd->type == AV_FRAME_DATA_AFD && sd->size > 0) {
print_int("active_format", *sd->data); print_int("active_format", *sd->data);
} else if (sd->type == AV_FRAME_DATA_GOP_TIMECODE && sd->size >= 8) { } else if (sd->type == AV_FRAME_DATA_GOP_TIMECODE && sd->size >= 8) {
@ -2700,6 +2743,9 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
} else if (sd->type == AV_FRAME_DATA_DYNAMIC_HDR_VIVID) { } else if (sd->type == AV_FRAME_DATA_DYNAMIC_HDR_VIVID) {
AVDynamicHDRVivid *metadata = (AVDynamicHDRVivid *)sd->data; AVDynamicHDRVivid *metadata = (AVDynamicHDRVivid *)sd->data;
print_dynamic_hdr_vivid(w, metadata); print_dynamic_hdr_vivid(w, metadata);
} else if (sd->type == AV_FRAME_DATA_AMBIENT_VIEWING_ENVIRONMENT) {
print_ambient_viewing_environment(
w, (const AVAmbientViewingEnvironment *)sd->data);
} }
writer_print_section_footer(w); writer_print_section_footer(w);
} }
@ -2714,7 +2760,7 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
static av_always_inline int process_frame(WriterContext *w, static av_always_inline int process_frame(WriterContext *w,
InputFile *ifile, InputFile *ifile,
AVFrame *frame, AVPacket *pkt, AVFrame *frame, const AVPacket *pkt,
int *packet_new) int *packet_new)
{ {
AVFormatContext *fmt_ctx = ifile->fmt_ctx; AVFormatContext *fmt_ctx = ifile->fmt_ctx;
@ -2858,9 +2904,10 @@ static int read_interval_packets(WriterContext *w, InputFile *ifile,
} }
if (selected_streams[pkt->stream_index]) { if (selected_streams[pkt->stream_index]) {
AVRational tb = ifile->streams[pkt->stream_index].st->time_base; AVRational tb = ifile->streams[pkt->stream_index].st->time_base;
int64_t pts = pkt->pts != AV_NOPTS_VALUE ? pkt->pts : pkt->dts;
if (pkt->pts != AV_NOPTS_VALUE) if (pts != AV_NOPTS_VALUE)
*cur_ts = av_rescale_q(pkt->pts, tb, AV_TIME_BASE_Q); *cur_ts = av_rescale_q(pts, tb, AV_TIME_BASE_Q);
if (!has_start && *cur_ts != AV_NOPTS_VALUE) { if (!has_start && *cur_ts != AV_NOPTS_VALUE) {
start = *cur_ts; start = *cur_ts;
@ -2894,7 +2941,7 @@ static int read_interval_packets(WriterContext *w, InputFile *ifile,
} }
av_packet_unref(pkt); av_packet_unref(pkt);
//Flush remaining frames that are cached in the decoder //Flush remaining frames that are cached in the decoder
for (i = 0; i < fmt_ctx->nb_streams; i++) { for (i = 0; i < ifile->nb_streams; i++) {
pkt->stream_index = i; pkt->stream_index = i;
if (do_read_frames) { if (do_read_frames) {
while (process_frame(w, ifile, frame, pkt, &(int){1}) > 0); while (process_frame(w, ifile, frame, pkt, &(int){1}) > 0);
@ -3052,6 +3099,8 @@ static int show_stream(WriterContext *w, AVFormatContext *fmt_ctx, int stream_id
} }
print_int("bits_per_sample", av_get_bits_per_sample(par->codec_id)); print_int("bits_per_sample", av_get_bits_per_sample(par->codec_id));
print_int("initial_padding", par->initial_padding);
break; break;
case AVMEDIA_TYPE_SUBTITLE: case AVMEDIA_TYPE_SUBTITLE:
@ -3278,15 +3327,9 @@ static int show_format(WriterContext *w, InputFile *ifile)
static void show_error(WriterContext *w, int err) static void show_error(WriterContext *w, int err)
{ {
char errbuf[128];
const char *errbuf_ptr = errbuf;
if (av_strerror(err, errbuf, sizeof(errbuf)) < 0)
errbuf_ptr = strerror(AVUNERROR(err));
writer_print_section_header(w, SECTION_ID_ERROR); writer_print_section_header(w, SECTION_ID_ERROR);
print_int("code", err); print_int("code", err);
print_str("string", errbuf_ptr); print_str("string", av_err2str(err));
writer_print_section_footer(w); writer_print_section_footer(w);
} }
@ -3299,10 +3342,8 @@ static int open_input_file(InputFile *ifile, const char *filename,
int scan_all_pmts_set = 0; int scan_all_pmts_set = 0;
fmt_ctx = avformat_alloc_context(); fmt_ctx = avformat_alloc_context();
if (!fmt_ctx) { if (!fmt_ctx)
print_error(filename, AVERROR(ENOMEM)); report_and_exit(AVERROR(ENOMEM));
exit_program(1);
}
if (!av_dict_get(format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) { if (!av_dict_get(format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) {
av_dict_set(&format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE); av_dict_set(&format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE);
@ -3320,7 +3361,7 @@ static int open_input_file(InputFile *ifile, const char *filename,
ifile->fmt_ctx = fmt_ctx; ifile->fmt_ctx = fmt_ctx;
if (scan_all_pmts_set) if (scan_all_pmts_set)
av_dict_set(&format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE); av_dict_set(&format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE);
while ((t = av_dict_get(format_opts, "", t, AV_DICT_IGNORE_SUFFIX))) while ((t = av_dict_iterate(format_opts, t)))
av_log(NULL, AV_LOG_WARNING, "Option %s skipped - not known to demuxer.\n", t->key); av_log(NULL, AV_LOG_WARNING, "Option %s skipped - not known to demuxer.\n", t->key);
if (find_stream_info) { if (find_stream_info) {
@ -3718,7 +3759,7 @@ static void opt_input_file(void *optctx, const char *arg)
exit_program(1); exit_program(1);
} }
if (!strcmp(arg, "-")) if (!strcmp(arg, "-"))
arg = "pipe:"; arg = "fd:";
input_filename = arg; input_filename = arg;
} }
@ -3737,7 +3778,7 @@ static void opt_output_file(void *optctx, const char *arg)
exit_program(1); exit_program(1);
} }
if (!strcmp(arg, "-")) if (!strcmp(arg, "-"))
arg = "pipe:"; arg = "fd:";
output_filename = arg; output_filename = arg;
} }

@ -0,0 +1,145 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of objpool.c file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*/
#include <stdint.h>
#include "libavcodec/packet.h"
#include "libavutil/common.h"
#include "libavutil/error.h"
#include "libavutil/frame.h"
#include "libavutil/mem.h"
#include "fftools_objpool.h"
struct ObjPool {
void *pool[32];
unsigned int pool_count;
ObjPoolCBAlloc alloc;
ObjPoolCBReset reset;
ObjPoolCBFree free;
};
ObjPool *objpool_alloc(ObjPoolCBAlloc cb_alloc, ObjPoolCBReset cb_reset,
ObjPoolCBFree cb_free)
{
ObjPool *op = av_mallocz(sizeof(*op));
if (!op)
return NULL;
op->alloc = cb_alloc;
op->reset = cb_reset;
op->free = cb_free;
return op;
}
void objpool_free(ObjPool **pop)
{
ObjPool *op = *pop;
if (!op)
return;
for (unsigned int i = 0; i < op->pool_count; i++)
op->free(&op->pool[i]);
av_freep(pop);
}
int objpool_get(ObjPool *op, void **obj)
{
if (op->pool_count) {
*obj = op->pool[--op->pool_count];
op->pool[op->pool_count] = NULL;
} else
*obj = op->alloc();
return *obj ? 0 : AVERROR(ENOMEM);
}
void objpool_release(ObjPool *op, void **obj)
{
if (!*obj)
return;
op->reset(*obj);
if (op->pool_count < FF_ARRAY_ELEMS(op->pool))
op->pool[op->pool_count++] = *obj;
else
op->free(obj);
*obj = NULL;
}
static void *alloc_packet(void)
{
return av_packet_alloc();
}
static void *alloc_frame(void)
{
return av_frame_alloc();
}
static void reset_packet(void *obj)
{
av_packet_unref(obj);
}
static void reset_frame(void *obj)
{
av_frame_unref(obj);
}
static void free_packet(void **obj)
{
AVPacket *pkt = *obj;
av_packet_free(&pkt);
*obj = NULL;
}
static void free_frame(void **obj)
{
AVFrame *frame = *obj;
av_frame_free(&frame);
*obj = NULL;
}
ObjPool *objpool_alloc_packets(void)
{
return objpool_alloc(alloc_packet, reset_packet, free_packet);
}
ObjPool *objpool_alloc_frames(void)
{
return objpool_alloc(alloc_frame, reset_frame, free_frame);
}

@ -0,0 +1,50 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of objpool.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*/
#ifndef FFTOOLS_OBJPOOL_H
#define FFTOOLS_OBJPOOL_H
typedef struct ObjPool ObjPool;
typedef void* (*ObjPoolCBAlloc)(void);
typedef void (*ObjPoolCBReset)(void *);
typedef void (*ObjPoolCBFree)(void **);
void objpool_free(ObjPool **op);
ObjPool *objpool_alloc(ObjPoolCBAlloc cb_alloc, ObjPoolCBReset cb_reset,
ObjPoolCBFree cb_free);
ObjPool *objpool_alloc_packets(void);
ObjPool *objpool_alloc_frames(void);
int objpool_get(ObjPool *op, void **obj);
void objpool_release(ObjPool *op, void **obj);
#endif // FFTOOLS_OBJPOOL_H

@ -1,6 +1,7 @@
/* /*
* Option handlers shared between the tools. * Option handlers shared between the tools.
* Copyright (c) 2022 Taner Sener * Copyright (c) 2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -24,6 +25,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop the ffmpeg-kit library. * by us to develop the ffmpeg-kit library.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - time field in report_callback updated as double
*
* ffmpeg-kit changes by Taner Sener * ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -88,7 +95,7 @@ static __thread FILE *report_file = NULL;
static __thread int report_file_level = AV_LOG_DEBUG; static __thread int report_file_level = AV_LOG_DEBUG;
extern void ffmpegkit_log_callback_function(void *ptr, int level, const char* format, va_list vargs); extern void ffmpegkit_log_callback_function(void *ptr, int level, const char* format, va_list vargs);
extern void (*report_callback)(int, float, float, int64_t, int, double, double); extern void (*report_callback)(int, float, float, int64_t, double, double, double);
extern __thread char *program_name; extern __thread char *program_name;
int show_license(void *optctx, const char *opt, const char *arg) int show_license(void *optctx, const char *opt, const char *arg)

@ -0,0 +1,462 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of sync_queue.c file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*/
#include <stdint.h>
#include <string.h>
#include "libavutil/avassert.h"
#include "libavutil/error.h"
#include "libavutil/fifo.h"
#include "libavutil/mathematics.h"
#include "libavutil/mem.h"
#include "fftools_objpool.h"
#include "fftools_sync_queue.h"
typedef struct SyncQueueStream {
AVFifo *fifo;
AVRational tb;
/* stream head: largest timestamp seen */
int64_t head_ts;
int limiting;
/* no more frames will be sent for this stream */
int finished;
uint64_t frames_sent;
uint64_t frames_max;
} SyncQueueStream;
struct SyncQueue {
enum SyncQueueType type;
/* no more frames will be sent for any stream */
int finished;
/* sync head: the stream with the _smallest_ head timestamp
* this stream determines which frames can be output */
int head_stream;
/* the finished stream with the smallest finish timestamp or -1 */
int head_finished_stream;
// maximum buffering duration in microseconds
int64_t buf_size_us;
SyncQueueStream *streams;
unsigned int nb_streams;
// pool of preallocated frames to avoid constant allocations
ObjPool *pool;
};
static void frame_move(const SyncQueue *sq, SyncQueueFrame dst,
SyncQueueFrame src)
{
if (sq->type == SYNC_QUEUE_PACKETS)
av_packet_move_ref(dst.p, src.p);
else
av_frame_move_ref(dst.f, src.f);
}
static int64_t frame_ts(const SyncQueue *sq, SyncQueueFrame frame)
{
return (sq->type == SYNC_QUEUE_PACKETS) ?
frame.p->pts + frame.p->duration :
frame.f->pts + frame.f->duration;
}
static int frame_null(const SyncQueue *sq, SyncQueueFrame frame)
{
return (sq->type == SYNC_QUEUE_PACKETS) ? (frame.p == NULL) : (frame.f == NULL);
}
static void finish_stream(SyncQueue *sq, unsigned int stream_idx)
{
SyncQueueStream *st = &sq->streams[stream_idx];
st->finished = 1;
if (st->limiting && st->head_ts != AV_NOPTS_VALUE) {
/* check if this stream is the new finished head */
if (sq->head_finished_stream < 0 ||
av_compare_ts(st->head_ts, st->tb,
sq->streams[sq->head_finished_stream].head_ts,
sq->streams[sq->head_finished_stream].tb) < 0) {
sq->head_finished_stream = stream_idx;
}
/* mark as finished all streams that should no longer receive new frames,
* due to them being ahead of some finished stream */
st = &sq->streams[sq->head_finished_stream];
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st1 = &sq->streams[i];
if (st != st1 && st1->head_ts != AV_NOPTS_VALUE &&
av_compare_ts(st->head_ts, st->tb, st1->head_ts, st1->tb) <= 0)
st1->finished = 1;
}
}
/* mark the whole queue as finished if all streams are finished */
for (unsigned int i = 0; i < sq->nb_streams; i++) {
if (!sq->streams[i].finished)
return;
}
sq->finished = 1;
}
static void queue_head_update(SyncQueue *sq)
{
if (sq->head_stream < 0) {
/* wait for one timestamp in each stream before determining
* the queue head */
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st = &sq->streams[i];
if (st->limiting && st->head_ts == AV_NOPTS_VALUE)
return;
}
// placeholder value, correct one will be found below
sq->head_stream = 0;
}
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st_head = &sq->streams[sq->head_stream];
SyncQueueStream *st_other = &sq->streams[i];
if (st_other->limiting && st_other->head_ts != AV_NOPTS_VALUE &&
av_compare_ts(st_other->head_ts, st_other->tb,
st_head->head_ts, st_head->tb) < 0)
sq->head_stream = i;
}
}
/* update this stream's head timestamp */
static void stream_update_ts(SyncQueue *sq, unsigned int stream_idx, int64_t ts)
{
SyncQueueStream *st = &sq->streams[stream_idx];
if (ts == AV_NOPTS_VALUE ||
(st->head_ts != AV_NOPTS_VALUE && st->head_ts >= ts))
return;
st->head_ts = ts;
/* if this stream is now ahead of some finished stream, then
* this stream is also finished */
if (sq->head_finished_stream >= 0 &&
av_compare_ts(sq->streams[sq->head_finished_stream].head_ts,
sq->streams[sq->head_finished_stream].tb,
ts, st->tb) <= 0)
finish_stream(sq, stream_idx);
/* update the overall head timestamp if it could have changed */
if (st->limiting &&
(sq->head_stream < 0 || sq->head_stream == stream_idx))
queue_head_update(sq);
}
/* If the queue for the given stream (or all streams when stream_idx=-1)
* is overflowing, trigger a fake heartbeat on lagging streams.
*
* @return 1 if heartbeat triggered, 0 otherwise
*/
static int overflow_heartbeat(SyncQueue *sq, int stream_idx)
{
SyncQueueStream *st;
SyncQueueFrame frame;
int64_t tail_ts = AV_NOPTS_VALUE;
/* if no stream specified, pick the one that is most ahead */
if (stream_idx < 0) {
int64_t ts = AV_NOPTS_VALUE;
for (int i = 0; i < sq->nb_streams; i++) {
st = &sq->streams[i];
if (st->head_ts != AV_NOPTS_VALUE &&
(ts == AV_NOPTS_VALUE ||
av_compare_ts(ts, sq->streams[stream_idx].tb,
st->head_ts, st->tb) < 0)) {
ts = st->head_ts;
stream_idx = i;
}
}
/* no stream has a timestamp yet -> nothing to do */
if (stream_idx < 0)
return 0;
}
st = &sq->streams[stream_idx];
/* get the chosen stream's tail timestamp */
for (size_t i = 0; tail_ts == AV_NOPTS_VALUE &&
av_fifo_peek(st->fifo, &frame, 1, i) >= 0; i++)
tail_ts = frame_ts(sq, frame);
/* overflow triggers when the tail is over specified duration behind the head */
if (tail_ts == AV_NOPTS_VALUE || tail_ts >= st->head_ts ||
av_rescale_q(st->head_ts - tail_ts, st->tb, AV_TIME_BASE_Q) < sq->buf_size_us)
return 0;
/* signal a fake timestamp for all streams that prevent tail_ts from being output */
tail_ts++;
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st1 = &sq->streams[i];
int64_t ts;
if (st == st1 || st1->finished ||
(st1->head_ts != AV_NOPTS_VALUE &&
av_compare_ts(tail_ts, st->tb, st1->head_ts, st1->tb) <= 0))
continue;
ts = av_rescale_q(tail_ts, st->tb, st1->tb);
if (st1->head_ts != AV_NOPTS_VALUE)
ts = FFMAX(st1->head_ts + 1, ts);
stream_update_ts(sq, i, ts);
}
return 1;
}
int sq_send(SyncQueue *sq, unsigned int stream_idx, SyncQueueFrame frame)
{
SyncQueueStream *st;
SyncQueueFrame dst;
int64_t ts;
int ret;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
av_assert0(st->tb.num > 0 && st->tb.den > 0);
if (frame_null(sq, frame)) {
finish_stream(sq, stream_idx);
return 0;
}
if (st->finished)
return AVERROR_EOF;
ret = objpool_get(sq->pool, (void**)&dst);
if (ret < 0)
return ret;
frame_move(sq, dst, frame);
ts = frame_ts(sq, dst);
ret = av_fifo_write(st->fifo, &dst, 1);
if (ret < 0) {
frame_move(sq, frame, dst);
objpool_release(sq->pool, (void**)&dst);
return ret;
}
stream_update_ts(sq, stream_idx, ts);
st->frames_sent++;
if (st->frames_sent >= st->frames_max)
finish_stream(sq, stream_idx);
return 0;
}
static int receive_for_stream(SyncQueue *sq, unsigned int stream_idx,
SyncQueueFrame frame)
{
SyncQueueStream *st_head = sq->head_stream >= 0 ?
&sq->streams[sq->head_stream] : NULL;
SyncQueueStream *st;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
if (av_fifo_can_read(st->fifo)) {
SyncQueueFrame peek;
int64_t ts;
int cmp = 1;
av_fifo_peek(st->fifo, &peek, 1, 0);
ts = frame_ts(sq, peek);
/* check if this stream's tail timestamp does not overtake
* the overall queue head */
if (ts != AV_NOPTS_VALUE && st_head)
cmp = av_compare_ts(ts, st->tb, st_head->head_ts, st_head->tb);
/* We can release frames that do not end after the queue head.
* Frames with no timestamps are just passed through with no conditions.
*/
if (cmp <= 0 || ts == AV_NOPTS_VALUE) {
frame_move(sq, frame, peek);
objpool_release(sq->pool, (void**)&peek);
av_fifo_drain2(st->fifo, 1);
return 0;
}
}
return (sq->finished || (st->finished && !av_fifo_can_read(st->fifo))) ?
AVERROR_EOF : AVERROR(EAGAIN);
}
static int receive_internal(SyncQueue *sq, int stream_idx, SyncQueueFrame frame)
{
int nb_eof = 0;
int ret;
/* read a frame for a specific stream */
if (stream_idx >= 0) {
ret = receive_for_stream(sq, stream_idx, frame);
return (ret < 0) ? ret : stream_idx;
}
/* read a frame for any stream with available output */
for (unsigned int i = 0; i < sq->nb_streams; i++) {
ret = receive_for_stream(sq, i, frame);
if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) {
nb_eof += (ret == AVERROR_EOF);
continue;
}
return (ret < 0) ? ret : i;
}
return (nb_eof == sq->nb_streams) ? AVERROR_EOF : AVERROR(EAGAIN);
}
int sq_receive(SyncQueue *sq, int stream_idx, SyncQueueFrame frame)
{
int ret = receive_internal(sq, stream_idx, frame);
/* try again if the queue overflowed and triggered a fake heartbeat
* for lagging streams */
if (ret == AVERROR(EAGAIN) && overflow_heartbeat(sq, stream_idx))
ret = receive_internal(sq, stream_idx, frame);
return ret;
}
int sq_add_stream(SyncQueue *sq, int limiting)
{
SyncQueueStream *tmp, *st;
tmp = av_realloc_array(sq->streams, sq->nb_streams + 1, sizeof(*sq->streams));
if (!tmp)
return AVERROR(ENOMEM);
sq->streams = tmp;
st = &sq->streams[sq->nb_streams];
memset(st, 0, sizeof(*st));
st->fifo = av_fifo_alloc2(1, sizeof(SyncQueueFrame), AV_FIFO_FLAG_AUTO_GROW);
if (!st->fifo)
return AVERROR(ENOMEM);
/* we set a valid default, so that a pathological stream that never
* receives even a real timebase (and no frames) won't stall all other
* streams forever; cf. overflow_heartbeat() */
st->tb = (AVRational){ 1, 1 };
st->head_ts = AV_NOPTS_VALUE;
st->frames_max = UINT64_MAX;
st->limiting = limiting;
return sq->nb_streams++;
}
void sq_set_tb(SyncQueue *sq, unsigned int stream_idx, AVRational tb)
{
SyncQueueStream *st;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
av_assert0(!av_fifo_can_read(st->fifo));
if (st->head_ts != AV_NOPTS_VALUE)
st->head_ts = av_rescale_q(st->head_ts, st->tb, tb);
st->tb = tb;
}
void sq_limit_frames(SyncQueue *sq, unsigned int stream_idx, uint64_t frames)
{
SyncQueueStream *st;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
st->frames_max = frames;
if (st->frames_sent >= st->frames_max)
finish_stream(sq, stream_idx);
}
SyncQueue *sq_alloc(enum SyncQueueType type, int64_t buf_size_us)
{
SyncQueue *sq = av_mallocz(sizeof(*sq));
if (!sq)
return NULL;
sq->type = type;
sq->buf_size_us = buf_size_us;
sq->head_stream = -1;
sq->head_finished_stream = -1;
sq->pool = (type == SYNC_QUEUE_PACKETS) ? objpool_alloc_packets() :
objpool_alloc_frames();
if (!sq->pool) {
av_freep(&sq);
return NULL;
}
return sq;
}
void sq_free(SyncQueue **psq)
{
SyncQueue *sq = *psq;
if (!sq)
return;
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueFrame frame;
while (av_fifo_read(sq->streams[i].fifo, &frame, 1) >= 0)
objpool_release(sq->pool, (void**)&frame);
av_fifo_freep2(&sq->streams[i].fifo);
}
av_freep(&sq->streams);
objpool_free(&sq->pool);
av_freep(psq);
}

@ -0,0 +1,122 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of sync_queue.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*/
#ifndef FFTOOLS_SYNC_QUEUE_H
#define FFTOOLS_SYNC_QUEUE_H
#include <stdint.h>
#include "libavcodec/packet.h"
#include "libavutil/frame.h"
enum SyncQueueType {
SYNC_QUEUE_PACKETS,
SYNC_QUEUE_FRAMES,
};
typedef union SyncQueueFrame {
AVFrame *f;
AVPacket *p;
} SyncQueueFrame;
#define SQFRAME(frame) ((SyncQueueFrame){ .f = (frame) })
#define SQPKT(pkt) ((SyncQueueFrame){ .p = (pkt) })
typedef struct SyncQueue SyncQueue;
/**
* Allocate a sync queue of the given type.
*
* @param buf_size_us maximum duration that will be buffered in microseconds
*/
SyncQueue *sq_alloc(enum SyncQueueType type, int64_t buf_size_us);
void sq_free(SyncQueue **sq);
/**
* Add a new stream to the sync queue.
*
* @param limiting whether the stream is limiting, i.e. no other stream can be
* longer than this one
* @return
* - a non-negative stream index on success
* - a negative error code on error
*/
int sq_add_stream(SyncQueue *sq, int limiting);
/**
* Set the timebase for the stream with index stream_idx. Should be called
* before sending any frames for this stream.
*/
void sq_set_tb(SyncQueue *sq, unsigned int stream_idx, AVRational tb);
/**
* Limit the number of output frames for stream with index stream_idx
* to max_frames.
*/
void sq_limit_frames(SyncQueue *sq, unsigned int stream_idx,
uint64_t max_frames);
/**
* Submit a frame for the stream with index stream_idx.
*
* On success, the sync queue takes ownership of the frame and will reset the
* contents of the supplied frame. On failure, the frame remains owned by the
* caller.
*
* Sending a frame with NULL contents marks the stream as finished.
*
* @return
* - 0 on success
* - AVERROR_EOF when no more frames should be submitted for this stream
* - another a negative error code on failure
*/
int sq_send(SyncQueue *sq, unsigned int stream_idx, SyncQueueFrame frame);
/**
* Read a frame from the queue.
*
* @param stream_idx index of the stream to read a frame for. May be -1, then
* try to read a frame from any stream that is ready for
* output.
* @param frame output frame will be written here on success. The frame is owned
* by the caller.
*
* @return
* - a non-negative index of the stream to which the returned frame belongs
* - AVERROR(EAGAIN) when more frames need to be submitted to the queue
* - AVERROR_EOF when no more frames will be available for this stream (for any
* stream if stream_idx is -1)
* - another negative error code on failure
*/
int sq_receive(SyncQueue *sq, int stream_idx, SyncQueueFrame frame);
#endif // FFTOOLS_SYNC_QUEUE_H

@ -0,0 +1,259 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of thread_queue.c file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*/
#include <stdint.h>
#include <string.h>
#include "libavutil/avassert.h"
#include "libavutil/error.h"
#include "libavutil/fifo.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/mem.h"
#include "libavutil/thread.h"
#include "fftools_objpool.h"
#include "fftools_thread_queue.h"
enum {
FINISHED_SEND = (1 << 0),
FINISHED_RECV = (1 << 1),
};
typedef struct FifoElem {
void *obj;
unsigned int stream_idx;
} FifoElem;
struct ThreadQueue {
int *finished;
unsigned int nb_streams;
AVFifo *fifo;
ObjPool *obj_pool;
void (*obj_move)(void *dst, void *src);
pthread_mutex_t lock;
pthread_cond_t cond;
};
void tq_free(ThreadQueue **ptq)
{
ThreadQueue *tq = *ptq;
if (!tq)
return;
if (tq->fifo) {
FifoElem elem;
while (av_fifo_read(tq->fifo, &elem, 1) >= 0)
objpool_release(tq->obj_pool, &elem.obj);
}
av_fifo_freep2(&tq->fifo);
objpool_free(&tq->obj_pool);
av_freep(&tq->finished);
pthread_cond_destroy(&tq->cond);
pthread_mutex_destroy(&tq->lock);
av_freep(ptq);
}
ThreadQueue *tq_alloc(unsigned int nb_streams, size_t queue_size,
ObjPool *obj_pool, void (*obj_move)(void *dst, void *src))
{
ThreadQueue *tq;
int ret;
tq = av_mallocz(sizeof(*tq));
if (!tq)
return NULL;
ret = pthread_cond_init(&tq->cond, NULL);
if (ret) {
av_freep(&tq);
return NULL;
}
ret = pthread_mutex_init(&tq->lock, NULL);
if (ret) {
pthread_cond_destroy(&tq->cond);
av_freep(&tq);
return NULL;
}
tq->finished = av_calloc(nb_streams, sizeof(*tq->finished));
if (!tq->finished)
goto fail;
tq->nb_streams = nb_streams;
tq->fifo = av_fifo_alloc2(queue_size, sizeof(FifoElem), 0);
if (!tq->fifo)
goto fail;
tq->obj_pool = obj_pool;
tq->obj_move = obj_move;
return tq;
fail:
tq_free(&tq);
return NULL;
}
int tq_send(ThreadQueue *tq, unsigned int stream_idx, void *data)
{
int *finished;
int ret;
av_assert0(stream_idx < tq->nb_streams);
finished = &tq->finished[stream_idx];
pthread_mutex_lock(&tq->lock);
if (*finished & FINISHED_SEND) {
ret = AVERROR(EINVAL);
goto finish;
}
while (!(*finished & FINISHED_RECV) && !av_fifo_can_write(tq->fifo))
pthread_cond_wait(&tq->cond, &tq->lock);
if (*finished & FINISHED_RECV) {
ret = AVERROR_EOF;
*finished |= FINISHED_SEND;
} else {
FifoElem elem = { .stream_idx = stream_idx };
ret = objpool_get(tq->obj_pool, &elem.obj);
if (ret < 0)
goto finish;
tq->obj_move(elem.obj, data);
ret = av_fifo_write(tq->fifo, &elem, 1);
av_assert0(ret >= 0);
pthread_cond_broadcast(&tq->cond);
}
finish:
pthread_mutex_unlock(&tq->lock);
return ret;
}
static int receive_locked(ThreadQueue *tq, int *stream_idx,
void *data)
{
FifoElem elem;
unsigned int nb_finished = 0;
if (av_fifo_read(tq->fifo, &elem, 1) >= 0) {
tq->obj_move(data, elem.obj);
objpool_release(tq->obj_pool, &elem.obj);
*stream_idx = elem.stream_idx;
return 0;
}
for (unsigned int i = 0; i < tq->nb_streams; i++) {
if (!(tq->finished[i] & FINISHED_SEND))
continue;
/* return EOF to the consumer at most once for each stream */
if (!(tq->finished[i] & FINISHED_RECV)) {
tq->finished[i] |= FINISHED_RECV;
*stream_idx = i;
return AVERROR_EOF;
}
nb_finished++;
}
return nb_finished == tq->nb_streams ? AVERROR_EOF : AVERROR(EAGAIN);
}
int tq_receive(ThreadQueue *tq, int *stream_idx, void *data)
{
int ret;
*stream_idx = -1;
pthread_mutex_lock(&tq->lock);
while (1) {
ret = receive_locked(tq, stream_idx, data);
if (ret == AVERROR(EAGAIN)) {
pthread_cond_wait(&tq->cond, &tq->lock);
continue;
}
break;
}
if (ret == 0)
pthread_cond_broadcast(&tq->cond);
pthread_mutex_unlock(&tq->lock);
return ret;
}
void tq_send_finish(ThreadQueue *tq, unsigned int stream_idx)
{
av_assert0(stream_idx < tq->nb_streams);
pthread_mutex_lock(&tq->lock);
/* mark the stream as send-finished;
* next time the consumer thread tries to read this stream it will get
* an EOF and recv-finished flag will be set */
tq->finished[stream_idx] |= FINISHED_SEND;
pthread_cond_broadcast(&tq->cond);
pthread_mutex_unlock(&tq->lock);
}
void tq_receive_finish(ThreadQueue *tq, unsigned int stream_idx)
{
av_assert0(stream_idx < tq->nb_streams);
pthread_mutex_lock(&tq->lock);
/* mark the stream as recv-finished;
* next time the producer thread tries to send for this stream, it will
* get an EOF and send-finished flag will be set */
tq->finished[stream_idx] |= FINISHED_RECV;
pthread_cond_broadcast(&tq->cond);
pthread_mutex_unlock(&tq->lock);
}

@ -0,0 +1,94 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of thread_queue.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*/
#ifndef FFTOOLS_THREAD_QUEUE_H
#define FFTOOLS_THREAD_QUEUE_H
#include <string.h>
#include "fftools_objpool.h"
typedef struct ThreadQueue ThreadQueue;
/**
* Allocate a queue for sending data between threads.
*
* @param nb_streams number of streams for which a distinct EOF state is
* maintained
* @param queue_size number of items that can be stored in the queue without
* blocking
* @param obj_pool object pool that will be used to allocate items stored in the
* queue; the pool becomes owned by the queue
* @param callback that moves the contents between two data pointers
*/
ThreadQueue *tq_alloc(unsigned int nb_streams, size_t queue_size,
ObjPool *obj_pool, void (*obj_move)(void *dst, void *src));
void tq_free(ThreadQueue **tq);
/**
* Send an item for the given stream to the queue.
*
* @param data the item to send, its contents will be moved using the callback
* provided to tq_alloc(); on failure the item will be left
* untouched
* @return
* - 0 the item was successfully sent
* - AVERROR(ENOMEM) could not allocate an item for writing to the FIFO
* - AVERROR(EINVAL) the sending side has previously been marked as finished
* - AVERROR_EOF the receiving side has marked the given stream as finished
*/
int tq_send(ThreadQueue *tq, unsigned int stream_idx, void *data);
/**
* Mark the given stream finished from the sending side.
*/
void tq_send_finish(ThreadQueue *tq, unsigned int stream_idx);
/**
* Read the next item from the queue.
*
* @param stream_idx the index of the stream that was processed or -1 will be
* written here
* @param data the data item will be written here on success using the
* callback provided to tq_alloc()
* @return
* - 0 a data item was successfully read; *stream_idx contains a non-negative
* stream index
* - AVERROR_EOF When *stream_idx is non-negative, this signals that the sending
* side has marked the given stream as finished. This will happen at most once
* for each stream. When *stream_idx is -1, all streams are done.
*/
int tq_receive(ThreadQueue *tq, int *stream_idx, void *data);
/**
* Mark the given stream finished from the receiving side.
*/
void tq_receive_finish(ThreadQueue *tq, unsigned int stream_idx);
#endif // FFTOOLS_THREAD_QUEUE_H

@ -38,7 +38,7 @@ PROJECT_NAME = "FFmpegKit Linux API"
# could be handy for archiving the generated documentation or if some version # could be handy for archiving the generated documentation or if some version
# control system is used. # control system is used.
PROJECT_NUMBER = 5.1 PROJECT_NUMBER = 6.0
# Using the PROJECT_BRIEF tag one can provide an optional one line description # Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a # for a project that appears at the top of each page and should give viewer a

@ -1,6 +1,6 @@
# ffmpeg-kit 5.1 configure.ac # ffmpeg-kit 6.0 configure.ac
AC_INIT([ffmpeg-kit], [5.1], [https://github.com/arthenica/ffmpeg-kit/issues/new]) AC_INIT([ffmpeg-kit], [6.0], [https://github.com/arthenica/ffmpeg-kit/issues/new])
AC_CONFIG_MACRO_DIR([m4]) AC_CONFIG_MACRO_DIR([m4])
AC_CONFIG_SRCDIR([src/FFmpegKit.cpp]) AC_CONFIG_SRCDIR([src/FFmpegKit.cpp])
@ -28,7 +28,8 @@ CFLAGS="$cflags_bckup"
FFMPEG_LIBS="-lavcodec -lavfilter -lavformat -lavutil -lswscale -lswresample" FFMPEG_LIBS="-lavcodec -lavfilter -lavformat -lavutil -lswscale -lswresample"
AC_SUBST(FFMPEG_LIBS) AC_SUBST(FFMPEG_LIBS)
VERSION_INFO="9:1:5" # CURRENT INTERFACE:REVISION:AGE
VERSION_INFO="11:0:6"
AC_SUBST(VERSION_INFO) AC_SUBST(VERSION_INFO)
LT_INIT LT_INIT

@ -17,14 +17,13 @@
* along with FFmpegKit. If not, see <http://www.gnu.org/licenses/>. * along with FFmpegKit. If not, see <http://www.gnu.org/licenses/>.
*/ */
extern "C" {
#include "fftools_ffmpeg.h"
}
#include "ArchDetect.h" #include "ArchDetect.h"
#include "FFmpegKit.h" #include "FFmpegKit.h"
#include "FFmpegKitConfig.h" #include "FFmpegKitConfig.h"
#include "Packages.h" #include "Packages.h"
extern void cancel_operation(long id);
extern void* ffmpegKitInitialize(); extern void* ffmpegKitInitialize();
const void* _ffmpegKitInitializeri{ffmpegKitInitialize()}; const void* _ffmpegKitInitializeri{ffmpegKitInitialize()};

@ -23,7 +23,7 @@
extern "C" { extern "C" {
#include "libavutil/ffversion.h" #include "libavutil/ffversion.h"
#include "libavutil/bprint.h" #include "libavutil/bprint.h"
#include "fftools_ffmpeg.h" #include "fftools_cmdutils.h"
} }
#include "ArchDetect.h" #include "ArchDetect.h"
#include "FFmpegKit.h" #include "FFmpegKit.h"
@ -44,6 +44,10 @@ extern "C" {
#include <fstream> #include <fstream>
#include <algorithm> #include <algorithm>
extern void set_report_callback(void (*callback)(int, float, float, int64_t, double, double, double));
extern void cancel_operation(long id);
/** /**
* Generates ids for named ffmpeg kit pipes. * Generates ids for named ffmpeg kit pipes.
*/ */
@ -193,7 +197,7 @@ class CallbackData {
const float videoFps, const float videoFps,
const float videoQuality, const float videoQuality,
const int64_t size, const int64_t size,
const int time, const double time,
const double bitrate, const double bitrate,
const double speed) : const double speed) :
_type{StatisticsType}, _type{StatisticsType},
@ -239,7 +243,7 @@ class CallbackData {
return _statisticsSize; return _statisticsSize;
} }
int getStatisticsTime() { double getStatisticsTime() {
return _statisticsTime; return _statisticsTime;
} }
@ -262,7 +266,7 @@ class CallbackData {
float _statisticsFps; // statistics fps float _statisticsFps; // statistics fps
float _statisticsQuality; // statistics quality float _statisticsQuality; // statistics quality
int64_t _statisticsSize; // statistics size int64_t _statisticsSize; // statistics size
int _statisticsTime; // statistics time double _statisticsTime; // statistics time
double _statisticsBitrate; // statistics bitrate double _statisticsBitrate; // statistics bitrate
double _statisticsSpeed; // statistics speed double _statisticsSpeed; // statistics speed
}; };
@ -516,7 +520,7 @@ void ffmpegkit_log_callback_function(void *ptr, int level, const char* format, v
* @param bitrate output bit rate in kbits/s * @param bitrate output bit rate in kbits/s
* @param speed processing speed = processed duration / operation duration * @param speed processing speed = processed duration / operation duration
*/ */
void ffmpegkit_statistics_callback_function(int frameNumber, float fps, float quality, int64_t size, int time, double bitrate, double speed) { void ffmpegkit_statistics_callback_function(int frameNumber, float fps, float quality, int64_t size, double time, double bitrate, double speed) {
statisticsCallbackDataAdd(frameNumber, fps, quality, size, time, bitrate, speed); statisticsCallbackDataAdd(frameNumber, fps, quality, size, time, bitrate, speed);
} }
@ -604,7 +608,7 @@ static void process_log(long sessionId, int levelValueInt, AVBPrint* logMessage)
} }
} }
void process_statistics(long sessionId, int videoFrameNumber, float videoFps, float videoQuality, long size, int time, double bitrate, double speed) { void process_statistics(long sessionId, int videoFrameNumber, float videoFps, float videoQuality, long size, double time, double bitrate, double speed) {
std::shared_ptr<ffmpegkit::Statistics> statistics = std::make_shared<ffmpegkit::Statistics>(sessionId, videoFrameNumber, videoFps, videoQuality, size, time, bitrate, speed); std::shared_ptr<ffmpegkit::Statistics> statistics = std::make_shared<ffmpegkit::Statistics>(sessionId, videoFrameNumber, videoFps, videoQuality, size, time, bitrate, speed);
auto session = ffmpegkit::FFmpegKitConfig::getSession(sessionId); auto session = ffmpegkit::FFmpegKitConfig::getSession(sessionId);

@ -42,7 +42,7 @@ namespace ffmpegkit {
public: public:
/** Global library version */ /** Global library version */
static constexpr const char* FFmpegKitVersion = "5.1"; static constexpr const char* FFmpegKitVersion = "6.0";
/** /**
* Prefix of named pipes created by ffmpeg-kit. * Prefix of named pipes created by ffmpeg-kit.

@ -17,7 +17,6 @@
* along with FFmpegKit. If not, see <http://www.gnu.org/licenses/>. * along with FFmpegKit. If not, see <http://www.gnu.org/licenses/>.
*/ */
#include "fftools_ffmpeg.h"
#include "FFmpegKit.h" #include "FFmpegKit.h"
#include "FFmpegKitConfig.h" #include "FFmpegKitConfig.h"
#include "FFprobeKit.h" #include "FFprobeKit.h"

@ -22,12 +22,17 @@ libffmpegkit_la_SOURCES = \
ffmpegkit_exception.cpp \ ffmpegkit_exception.cpp \
fftools_cmdutils.c \ fftools_cmdutils.c \
fftools_ffmpeg.c \ fftools_ffmpeg.c \
fftools_ffmpeg_demux.c \
fftools_ffmpeg_filter.c \ fftools_ffmpeg_filter.c \
fftools_ffmpeg_hw.c \ fftools_ffmpeg_hw.c \
fftools_ffmpeg_mux.c \ fftools_ffmpeg_mux.c \
fftools_ffmpeg_mux_init.c \
fftools_ffmpeg_opt.c \ fftools_ffmpeg_opt.c \
fftools_ffprobe.c \ fftools_ffprobe.c \
fftools_opt_common.c fftools_objpool.c \
fftools_opt_common.c \
fftools_sync_queue.c \
fftools_thread_queue.c
include_HEADERS = \ include_HEADERS = \
AbstractSession.h \ AbstractSession.h \
@ -59,8 +64,12 @@ include_HEADERS = \
ffmpegkit_exception.h \ ffmpegkit_exception.h \
fftools_cmdutils.h \ fftools_cmdutils.h \
fftools_ffmpeg.h \ fftools_ffmpeg.h \
fftools_ffmpeg_mux.h \
fftools_fopen_utf8.h \ fftools_fopen_utf8.h \
fftools_opt_common.h fftools_objpool.h \
fftools_opt_common.h \
fftools_sync_queue.h \
fftools_thread_queue.h
libffmpegkit_la_CFLAGS = $(CFLAGS) libffmpegkit_la_CFLAGS = $(CFLAGS)
libffmpegkit_la_OBJCFLAGS = $(CFLAGS) libffmpegkit_la_OBJCFLAGS = $(CFLAGS)

@ -147,12 +147,17 @@ am_libffmpegkit_la_OBJECTS = libffmpegkit_la-AbstractSession.lo \
libffmpegkit_la-ffmpegkit_exception.lo \ libffmpegkit_la-ffmpegkit_exception.lo \
libffmpegkit_la-fftools_cmdutils.lo \ libffmpegkit_la-fftools_cmdutils.lo \
libffmpegkit_la-fftools_ffmpeg.lo \ libffmpegkit_la-fftools_ffmpeg.lo \
libffmpegkit_la-fftools_ffmpeg_demux.lo \
libffmpegkit_la-fftools_ffmpeg_filter.lo \ libffmpegkit_la-fftools_ffmpeg_filter.lo \
libffmpegkit_la-fftools_ffmpeg_hw.lo \ libffmpegkit_la-fftools_ffmpeg_hw.lo \
libffmpegkit_la-fftools_ffmpeg_mux.lo \ libffmpegkit_la-fftools_ffmpeg_mux.lo \
libffmpegkit_la-fftools_ffmpeg_mux_init.lo \
libffmpegkit_la-fftools_ffmpeg_opt.lo \ libffmpegkit_la-fftools_ffmpeg_opt.lo \
libffmpegkit_la-fftools_ffprobe.lo \ libffmpegkit_la-fftools_ffprobe.lo \
libffmpegkit_la-fftools_opt_common.lo libffmpegkit_la-fftools_objpool.lo \
libffmpegkit_la-fftools_opt_common.lo \
libffmpegkit_la-fftools_sync_queue.lo \
libffmpegkit_la-fftools_thread_queue.lo
libffmpegkit_la_OBJECTS = $(am_libffmpegkit_la_OBJECTS) libffmpegkit_la_OBJECTS = $(am_libffmpegkit_la_OBJECTS)
AM_V_lt = $(am__v_lt_@AM_V@) AM_V_lt = $(am__v_lt_@AM_V@)
am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
@ -196,12 +201,17 @@ am__depfiles_remade = ./$(DEPDIR)/libffmpegkit_la-AbstractSession.Plo \
./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo \ ./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo \ ./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo ./$(DEPDIR)/libffmpegkit_la-fftools_objpool.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Plo \
./$(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Plo
am__mv = mv -f am__mv = mv -f
COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
@ -415,12 +425,17 @@ libffmpegkit_la_SOURCES = \
ffmpegkit_exception.cpp \ ffmpegkit_exception.cpp \
fftools_cmdutils.c \ fftools_cmdutils.c \
fftools_ffmpeg.c \ fftools_ffmpeg.c \
fftools_ffmpeg_demux.c \
fftools_ffmpeg_filter.c \ fftools_ffmpeg_filter.c \
fftools_ffmpeg_hw.c \ fftools_ffmpeg_hw.c \
fftools_ffmpeg_mux.c \ fftools_ffmpeg_mux.c \
fftools_ffmpeg_mux_init.c \
fftools_ffmpeg_opt.c \ fftools_ffmpeg_opt.c \
fftools_ffprobe.c \ fftools_ffprobe.c \
fftools_opt_common.c fftools_objpool.c \
fftools_opt_common.c \
fftools_sync_queue.c \
fftools_thread_queue.c
include_HEADERS = \ include_HEADERS = \
AbstractSession.h \ AbstractSession.h \
@ -452,8 +467,12 @@ include_HEADERS = \
ffmpegkit_exception.h \ ffmpegkit_exception.h \
fftools_cmdutils.h \ fftools_cmdutils.h \
fftools_ffmpeg.h \ fftools_ffmpeg.h \
fftools_ffmpeg_mux.h \
fftools_fopen_utf8.h \ fftools_fopen_utf8.h \
fftools_opt_common.h fftools_objpool.h \
fftools_opt_common.h \
fftools_sync_queue.h \
fftools_thread_queue.h
libffmpegkit_la_CFLAGS = $(CFLAGS) libffmpegkit_la_CFLAGS = $(CFLAGS)
libffmpegkit_la_OBJCFLAGS = $(CFLAGS) libffmpegkit_la_OBJCFLAGS = $(CFLAGS)
@ -556,12 +575,17 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_objpool.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo@am__quote@ # am--include-marker @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Plo@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Plo@am__quote@ # am--include-marker
$(am__depfiles_remade): $(am__depfiles_remade):
@$(MKDIR_P) $(@D) @$(MKDIR_P) $(@D)
@ -607,6 +631,13 @@ libffmpegkit_la-fftools_ffmpeg.lo: fftools_ffmpeg.c
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg.lo `test -f 'fftools_ffmpeg.c' || echo '$(srcdir)/'`fftools_ffmpeg.c @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg.lo `test -f 'fftools_ffmpeg.c' || echo '$(srcdir)/'`fftools_ffmpeg.c
libffmpegkit_la-fftools_ffmpeg_demux.lo: fftools_ffmpeg_demux.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_demux.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_demux.lo `test -f 'fftools_ffmpeg_demux.c' || echo '$(srcdir)/'`fftools_ffmpeg_demux.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='fftools_ffmpeg_demux.c' object='libffmpegkit_la-fftools_ffmpeg_demux.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg_demux.lo `test -f 'fftools_ffmpeg_demux.c' || echo '$(srcdir)/'`fftools_ffmpeg_demux.c
libffmpegkit_la-fftools_ffmpeg_filter.lo: fftools_ffmpeg_filter.c libffmpegkit_la-fftools_ffmpeg_filter.lo: fftools_ffmpeg_filter.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_filter.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_filter.lo `test -f 'fftools_ffmpeg_filter.c' || echo '$(srcdir)/'`fftools_ffmpeg_filter.c @am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_filter.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_filter.lo `test -f 'fftools_ffmpeg_filter.c' || echo '$(srcdir)/'`fftools_ffmpeg_filter.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo @am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo
@ -628,6 +659,13 @@ libffmpegkit_la-fftools_ffmpeg_mux.lo: fftools_ffmpeg_mux.c
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg_mux.lo `test -f 'fftools_ffmpeg_mux.c' || echo '$(srcdir)/'`fftools_ffmpeg_mux.c @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg_mux.lo `test -f 'fftools_ffmpeg_mux.c' || echo '$(srcdir)/'`fftools_ffmpeg_mux.c
libffmpegkit_la-fftools_ffmpeg_mux_init.lo: fftools_ffmpeg_mux_init.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_mux_init.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_mux_init.lo `test -f 'fftools_ffmpeg_mux_init.c' || echo '$(srcdir)/'`fftools_ffmpeg_mux_init.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='fftools_ffmpeg_mux_init.c' object='libffmpegkit_la-fftools_ffmpeg_mux_init.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffmpeg_mux_init.lo `test -f 'fftools_ffmpeg_mux_init.c' || echo '$(srcdir)/'`fftools_ffmpeg_mux_init.c
libffmpegkit_la-fftools_ffmpeg_opt.lo: fftools_ffmpeg_opt.c libffmpegkit_la-fftools_ffmpeg_opt.lo: fftools_ffmpeg_opt.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_opt.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_opt.lo `test -f 'fftools_ffmpeg_opt.c' || echo '$(srcdir)/'`fftools_ffmpeg_opt.c @am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_ffmpeg_opt.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Tpo -c -o libffmpegkit_la-fftools_ffmpeg_opt.lo `test -f 'fftools_ffmpeg_opt.c' || echo '$(srcdir)/'`fftools_ffmpeg_opt.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo @am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Tpo $(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo
@ -642,6 +680,13 @@ libffmpegkit_la-fftools_ffprobe.lo: fftools_ffprobe.c
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffprobe.lo `test -f 'fftools_ffprobe.c' || echo '$(srcdir)/'`fftools_ffprobe.c @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_ffprobe.lo `test -f 'fftools_ffprobe.c' || echo '$(srcdir)/'`fftools_ffprobe.c
libffmpegkit_la-fftools_objpool.lo: fftools_objpool.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_objpool.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_objpool.Tpo -c -o libffmpegkit_la-fftools_objpool.lo `test -f 'fftools_objpool.c' || echo '$(srcdir)/'`fftools_objpool.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_objpool.Tpo $(DEPDIR)/libffmpegkit_la-fftools_objpool.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='fftools_objpool.c' object='libffmpegkit_la-fftools_objpool.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_objpool.lo `test -f 'fftools_objpool.c' || echo '$(srcdir)/'`fftools_objpool.c
libffmpegkit_la-fftools_opt_common.lo: fftools_opt_common.c libffmpegkit_la-fftools_opt_common.lo: fftools_opt_common.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_opt_common.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Tpo -c -o libffmpegkit_la-fftools_opt_common.lo `test -f 'fftools_opt_common.c' || echo '$(srcdir)/'`fftools_opt_common.c @am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_opt_common.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Tpo -c -o libffmpegkit_la-fftools_opt_common.lo `test -f 'fftools_opt_common.c' || echo '$(srcdir)/'`fftools_opt_common.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Tpo $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo @am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Tpo $(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo
@ -649,6 +694,20 @@ libffmpegkit_la-fftools_opt_common.lo: fftools_opt_common.c
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_opt_common.lo `test -f 'fftools_opt_common.c' || echo '$(srcdir)/'`fftools_opt_common.c @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_opt_common.lo `test -f 'fftools_opt_common.c' || echo '$(srcdir)/'`fftools_opt_common.c
libffmpegkit_la-fftools_sync_queue.lo: fftools_sync_queue.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_sync_queue.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Tpo -c -o libffmpegkit_la-fftools_sync_queue.lo `test -f 'fftools_sync_queue.c' || echo '$(srcdir)/'`fftools_sync_queue.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Tpo $(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='fftools_sync_queue.c' object='libffmpegkit_la-fftools_sync_queue.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_sync_queue.lo `test -f 'fftools_sync_queue.c' || echo '$(srcdir)/'`fftools_sync_queue.c
libffmpegkit_la-fftools_thread_queue.lo: fftools_thread_queue.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -MT libffmpegkit_la-fftools_thread_queue.lo -MD -MP -MF $(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Tpo -c -o libffmpegkit_la-fftools_thread_queue.lo `test -f 'fftools_thread_queue.c' || echo '$(srcdir)/'`fftools_thread_queue.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Tpo $(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='fftools_thread_queue.c' object='libffmpegkit_la-fftools_thread_queue.lo' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(libffmpegkit_la_CFLAGS) $(CFLAGS) -c -o libffmpegkit_la-fftools_thread_queue.lo `test -f 'fftools_thread_queue.c' || echo '$(srcdir)/'`fftools_thread_queue.c
.cpp.o: .cpp.o:
@am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\ @am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\
@am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\ @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\
@ -966,12 +1025,17 @@ distclean: distclean-am
-rm -f ./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_objpool.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Plo
-rm -f Makefile -rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \ distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags distclean-tags
@ -1036,12 +1100,17 @@ maintainer-clean: maintainer-clean-am
-rm -f ./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-ffmpegkit_exception.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_cmdutils.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_demux.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_filter.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_hw.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_mux_init.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffmpeg_opt.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_ffprobe.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_objpool.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo -rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_opt_common.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_sync_queue.Plo
-rm -f ./$(DEPDIR)/libffmpegkit_la-fftools_thread_queue.Plo
-rm -f Makefile -rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic maintainer-clean-am: distclean-am maintainer-clean-generic

@ -19,7 +19,7 @@
#include "Statistics.h" #include "Statistics.h"
ffmpegkit::Statistics::Statistics(const long sessionId, const int videoFrameNumber, const float videoFps, const float videoQuality, const int64_t size, const int time, const double bitrate, const double speed) : ffmpegkit::Statistics::Statistics(const long sessionId, const int videoFrameNumber, const float videoFps, const float videoQuality, const int64_t size, const double time, const double bitrate, const double speed) :
_sessionId{sessionId}, _videoFrameNumber{videoFrameNumber}, _videoFps{videoFps}, _videoQuality{videoQuality}, _size{size}, _time{time}, _bitrate{bitrate}, _speed{speed} { _sessionId{sessionId}, _videoFrameNumber{videoFrameNumber}, _videoFps{videoFps}, _videoQuality{videoQuality}, _size{size}, _time{time}, _bitrate{bitrate}, _speed{speed} {
} }
@ -43,7 +43,7 @@ int64_t ffmpegkit::Statistics::getSize() {
return _size; return _size;
} }
int ffmpegkit::Statistics::getTime() { double ffmpegkit::Statistics::getTime() {
return _time; return _time;
} }

@ -30,13 +30,13 @@ namespace ffmpegkit {
class Statistics { class Statistics {
public: public:
Statistics(const long sessionId, const int videoFrameNumber, const float videoFps, const float videoQuality, const int64_t size, const int time, const double bitrate, const double speed); Statistics(const long sessionId, const int videoFrameNumber, const float videoFps, const float videoQuality, const int64_t size, const double time, const double bitrate, const double speed);
long getSessionId(); long getSessionId();
int getVideoFrameNumber(); int getVideoFrameNumber();
float getVideoFps(); float getVideoFps();
float getVideoQuality(); float getVideoQuality();
int64_t getSize(); int64_t getSize();
int getTime(); double getTime();
double getBitrate(); double getBitrate();
double getSpeed(); double getSpeed();
@ -46,7 +46,7 @@ namespace ffmpegkit {
float _videoFps; float _videoFps;
float _videoQuality; float _videoQuality;
int64_t _size; int64_t _size;
int _time; double _time;
double _bitrate; double _bitrate;
double _speed; double _speed;
}; };

@ -1,7 +1,8 @@
/* /*
* Various utilities for command line tools * Various utilities for command line tools
* Copyright (c) 2000-2003 Fabrice Bellard * Copyright (c) 2000-2003 Fabrice Bellard
* Copyright (c) 2018 Taner Sener * Copyright (c) 2018-2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -25,6 +26,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -129,11 +136,18 @@ void register_exit(void (*cb)(int ret))
program_exit = cb; program_exit = cb;
} }
void report_and_exit(int ret)
{
av_log(NULL, AV_LOG_FATAL, "%s\n", av_err2str(ret));
exit_program(AVUNERROR(ret));
}
void exit_program(int ret) void exit_program(int ret)
{ {
if (program_exit) if (program_exit)
program_exit(ret); program_exit(ret);
// FFmpegKit
// exit disabled and replaced with longjmp, exit value stored in longjmp_value // exit disabled and replaced with longjmp, exit value stored in longjmp_value
// exit(ret); // exit(ret);
longjmp_value = ret; longjmp_value = ret;
@ -696,7 +710,7 @@ static void init_parse_context(OptionParseContext *octx,
octx->nb_groups = nb_groups; octx->nb_groups = nb_groups;
octx->groups = av_calloc(octx->nb_groups, sizeof(*octx->groups)); octx->groups = av_calloc(octx->nb_groups, sizeof(*octx->groups));
if (!octx->groups) if (!octx->groups)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
for (i = 0; i < octx->nb_groups; i++) for (i = 0; i < octx->nb_groups; i++)
octx->groups[i].group_def = &groups[i]; octx->groups[i].group_def = &groups[i];
@ -843,12 +857,7 @@ do { \
void print_error(const char *filename, int err) void print_error(const char *filename, int err)
{ {
char errbuf[128]; av_log(NULL, AV_LOG_ERROR, "%s: %s\n", filename, av_err2str(err));
const char *errbuf_ptr = errbuf;
if (av_strerror(err, errbuf, sizeof(errbuf)) < 0)
errbuf_ptr = strerror(AVUNERROR(err));
av_log(NULL, AV_LOG_ERROR, "%s: %s\n", filename, errbuf_ptr);
} }
int read_yesno(void) int read_yesno(void)
@ -971,7 +980,7 @@ AVDictionary *filter_codec_opts(AVDictionary *opts, enum AVCodecID codec_id,
break; break;
} }
while ((t = av_dict_get(opts, "", t, AV_DICT_IGNORE_SUFFIX))) { while ((t = av_dict_iterate(opts, t))) {
const AVClass *priv_class; const AVClass *priv_class;
char *p = strchr(t->key, ':'); char *p = strchr(t->key, ':');
@ -1009,11 +1018,8 @@ AVDictionary **setup_find_stream_info_opts(AVFormatContext *s,
if (!s->nb_streams) if (!s->nb_streams)
return NULL; return NULL;
opts = av_calloc(s->nb_streams, sizeof(*opts)); opts = av_calloc(s->nb_streams, sizeof(*opts));
if (!opts) { if (!opts)
av_log(NULL, AV_LOG_ERROR, report_and_exit(AVERROR(ENOMEM));
"Could not alloc memory for stream options.\n");
exit_program(1);
}
for (i = 0; i < s->nb_streams; i++) for (i = 0; i < s->nb_streams; i++)
opts[i] = filter_codec_opts(codec_opts, s->streams[i]->codecpar->codec_id, opts[i] = filter_codec_opts(codec_opts, s->streams[i]->codecpar->codec_id,
s, s->streams[i], NULL); s, s->streams[i], NULL);
@ -1028,10 +1034,8 @@ void *grow_array(void *array, int elem_size, int *size, int new_size)
} }
if (*size < new_size) { if (*size < new_size) {
uint8_t *tmp = av_realloc_array(array, new_size, elem_size); uint8_t *tmp = av_realloc_array(array, new_size, elem_size);
if (!tmp) { if (!tmp)
av_log(NULL, AV_LOG_ERROR, "Could not alloc buffer.\n"); report_and_exit(AVERROR(ENOMEM));
exit_program(1);
}
memset(tmp + *size*elem_size, 0, (new_size-*size) * elem_size); memset(tmp + *size*elem_size, 0, (new_size-*size) * elem_size);
*size = new_size; *size = new_size;
return tmp; return tmp;
@ -1044,10 +1048,8 @@ void *allocate_array_elem(void *ptr, size_t elem_size, int *nb_elems)
void *new_elem; void *new_elem;
if (!(new_elem = av_mallocz(elem_size)) || if (!(new_elem = av_mallocz(elem_size)) ||
av_dynarray_add_nofree(ptr, nb_elems, new_elem) < 0) { av_dynarray_add_nofree(ptr, nb_elems, new_elem) < 0)
av_log(NULL, AV_LOG_ERROR, "Could not alloc buffer.\n"); report_and_exit(AVERROR(ENOMEM));
exit_program(1);
}
return new_elem; return new_elem;
} }

@ -1,7 +1,8 @@
/* /*
* Various utilities for command line tools * Various utilities for command line tools
* copyright (c) 2003 Fabrice Bellard * copyright (c) 2003 Fabrice Bellard
* copyright (c) 2018 Taner Sener * copyright (c) 2018-2022 Taner Sener
* copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -25,6 +26,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -95,6 +102,17 @@ extern __thread int find_stream_info;
*/ */
void register_exit(void (*cb)(int ret)); void register_exit(void (*cb)(int ret));
/**
* Reports an error corresponding to the provided
* AVERROR code and calls exit_program() with the
* corresponding POSIX error code.
* @note ret must be an AVERROR-value of a POSIX error code
* (i.e. AVERROR(EFOO) and not AVERROR_FOO).
* library functions can return both, so call this only
* with AVERROR(EFOO) of your own.
*/
void report_and_exit(int ret) av_noreturn;
/** /**
* Wraps exit with a program-specific cleanup routine. * Wraps exit with a program-specific cleanup routine.
*/ */
@ -232,11 +250,6 @@ void show_help_children(const AVClass *clazz, int flags);
void show_help_default_ffmpeg(const char *opt, const char *arg); void show_help_default_ffmpeg(const char *opt, const char *arg);
void show_help_default_ffprobe(const char *opt, const char *arg); void show_help_default_ffprobe(const char *opt, const char *arg);
/**
* Generic -h handler common to all fftools.
*/
int show_help(void *optctx, const char *opt, const char *arg);
/** /**
* Parse the command line arguments. * Parse the command line arguments.
* *

File diff suppressed because it is too large Load Diff

@ -1,6 +1,7 @@
/* /*
* This file is part of FFmpeg. * This file is part of FFmpeg.
* Copyright (c) 2018 Taner Sener * Copyright (c) 2018-2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* FFmpeg is free software; you can redistribute it and/or * FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public * modify it under the terms of the GNU Lesser General Public
@ -22,6 +23,16 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - WARN_MULTIPLE_OPT_USAGE, MATCH_PER_STREAM_OPT, MATCH_PER_TYPE_OPT, SPECIFIER_OPT_FMT declarations migrated to
* ffmpeg_mux.h
* - "class" member field renamed as clazz
* - time field in set_report_callback updated as double
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -59,11 +70,13 @@
#include "config.h" #include "config.h"
#include <stdatomic.h>
#include <stdint.h> #include <stdint.h>
#include <stdio.h> #include <stdio.h>
#include <signal.h> #include <signal.h>
#include "fftools_cmdutils.h" #include "fftools_cmdutils.h"
#include "fftools_sync_queue.h"
#include "libavformat/avformat.h" #include "libavformat/avformat.h"
#include "libavformat/avio.h" #include "libavformat/avio.h"
@ -85,6 +98,12 @@
#include "libswresample/swresample.h" #include "libswresample/swresample.h"
// deprecated features
#define FFMPEG_OPT_PSNR 1
#define FFMPEG_OPT_MAP_CHANNEL 1
#define FFMPEG_OPT_MAP_SYNC 1
#define FFMPEG_ROTATION_METADATA 1
enum VideoSyncMethod { enum VideoSyncMethod {
VSYNC_AUTO = -1, VSYNC_AUTO = -1,
VSYNC_PASSTHROUGH, VSYNC_PASSTHROUGH,
@ -113,15 +132,15 @@ typedef struct StreamMap {
int disabled; /* 1 is this mapping is disabled by a negative map */ int disabled; /* 1 is this mapping is disabled by a negative map */
int file_index; int file_index;
int stream_index; int stream_index;
int sync_file_index;
int sync_stream_index;
char *linklabel; /* name of an output link, for mapping lavfi outputs */ char *linklabel; /* name of an output link, for mapping lavfi outputs */
} StreamMap; } StreamMap;
#if FFMPEG_OPT_MAP_CHANNEL
typedef struct { typedef struct {
int file_idx, stream_idx, channel_idx; // input int file_idx, stream_idx, channel_idx; // input
int ofile_idx, ostream_idx; // output int ofile_idx, ostream_idx; // output
} AudioChannelMap; } AudioChannelMap;
#endif
typedef struct OptionsContext { typedef struct OptionsContext {
OptionGroup *g; OptionGroup *g;
@ -157,6 +176,7 @@ typedef struct OptionsContext {
int accurate_seek; int accurate_seek;
int thread_queue_size; int thread_queue_size;
int input_sync_ref; int input_sync_ref;
int find_stream_info;
SpecifierOpt *ts_scale; SpecifierOpt *ts_scale;
int nb_ts_scale; int nb_ts_scale;
@ -174,11 +194,10 @@ typedef struct OptionsContext {
/* output options */ /* output options */
StreamMap *stream_maps; StreamMap *stream_maps;
int nb_stream_maps; int nb_stream_maps;
#if FFMPEG_OPT_MAP_CHANNEL
AudioChannelMap *audio_channel_maps; /* one info entry per -map_channel */ AudioChannelMap *audio_channel_maps; /* one info entry per -map_channel */
int nb_audio_channel_maps; /* number of (valid) -map_channel settings */ int nb_audio_channel_maps; /* number of (valid) -map_channel settings */
int metadata_global_manual; #endif
int metadata_streams_manual;
int metadata_chapters_manual;
const char **attachments; const char **attachments;
int nb_attachments; int nb_attachments;
@ -186,9 +205,10 @@ typedef struct OptionsContext {
int64_t recording_time; int64_t recording_time;
int64_t stop_time; int64_t stop_time;
uint64_t limit_filesize; int64_t limit_filesize;
float mux_preload; float mux_preload;
float mux_max_delay; float mux_max_delay;
float shortest_buf_duration;
int shortest; int shortest;
int bitexact; int bitexact;
@ -221,6 +241,12 @@ typedef struct OptionsContext {
int nb_force_fps; int nb_force_fps;
SpecifierOpt *frame_aspect_ratios; SpecifierOpt *frame_aspect_ratios;
int nb_frame_aspect_ratios; int nb_frame_aspect_ratios;
SpecifierOpt *display_rotations;
int nb_display_rotations;
SpecifierOpt *display_hflips;
int nb_display_hflips;
SpecifierOpt *display_vflips;
int nb_display_vflips;
SpecifierOpt *rc_overrides; SpecifierOpt *rc_overrides;
int nb_rc_overrides; int nb_rc_overrides;
SpecifierOpt *intra_matrices; SpecifierOpt *intra_matrices;
@ -247,6 +273,8 @@ typedef struct OptionsContext {
int nb_reinit_filters; int nb_reinit_filters;
SpecifierOpt *fix_sub_duration; SpecifierOpt *fix_sub_duration;
int nb_fix_sub_duration; int nb_fix_sub_duration;
SpecifierOpt *fix_sub_duration_heartbeat;
int nb_fix_sub_duration_heartbeat;
SpecifierOpt *canvas_sizes; SpecifierOpt *canvas_sizes;
int nb_canvas_sizes; int nb_canvas_sizes;
SpecifierOpt *pass; SpecifierOpt *pass;
@ -275,6 +303,18 @@ typedef struct OptionsContext {
int nb_autoscale; int nb_autoscale;
SpecifierOpt *bits_per_raw_sample; SpecifierOpt *bits_per_raw_sample;
int nb_bits_per_raw_sample; int nb_bits_per_raw_sample;
SpecifierOpt *enc_stats_pre;
int nb_enc_stats_pre;
SpecifierOpt *enc_stats_post;
int nb_enc_stats_post;
SpecifierOpt *mux_stats;
int nb_mux_stats;
SpecifierOpt *enc_stats_pre_fmt;
int nb_enc_stats_pre_fmt;
SpecifierOpt *enc_stats_post_fmt;
int nb_enc_stats_post_fmt;
SpecifierOpt *mux_stats_fmt;
int nb_mux_stats_fmt;
} OptionsContext; } OptionsContext;
typedef struct InputFilter { typedef struct InputFilter {
@ -350,12 +390,22 @@ typedef struct InputStream {
#define DECODING_FOR_OST 1 #define DECODING_FOR_OST 1
#define DECODING_FOR_FILTER 2 #define DECODING_FOR_FILTER 2
int processing_needed; /* non zero if the packets must be processed */ int processing_needed; /* non zero if the packets must be processed */
// should attach FrameData as opaque_ref after decoding
int want_frame_data;
/**
* Codec parameters - to be used by the decoding/streamcopy code.
* st->codecpar should not be accessed, because it may be modified
* concurrently by the demuxing thread.
*/
AVCodecParameters *par;
AVCodecContext *dec_ctx; AVCodecContext *dec_ctx;
const AVCodec *dec; const AVCodec *dec;
AVFrame *decoded_frame; AVFrame *decoded_frame;
AVPacket *pkt; AVPacket *pkt;
AVRational framerate_guessed;
int64_t prev_pkt_pts; int64_t prev_pkt_pts;
int64_t start; /* time when read started */ int64_t start; /* time when read started */
/* predicted dts of the next packet read for this stream or (when there are /* predicted dts of the next packet read for this stream or (when there are
@ -368,6 +418,12 @@ typedef struct InputStream {
int64_t pts; ///< current pts of the decoded frame (in AV_TIME_BASE units) int64_t pts; ///< current pts of the decoded frame (in AV_TIME_BASE units)
int wrap_correction_done; int wrap_correction_done;
// the value of AVCodecParserContext.repeat_pict from the AVStream parser
// for the last packet returned from ifile_get_packet()
// -1 if unknown
// FIXME: this is a hack, the avstream parser should not be used
int last_pkt_repeat_pict;
int64_t filter_in_rescale_delta_last; int64_t filter_in_rescale_delta_last;
int64_t min_pts; /* pts with the smallest value in a current stream */ int64_t min_pts; /* pts with the smallest value in a current stream */
@ -417,12 +473,8 @@ typedef struct InputStream {
char *hwaccel_device; char *hwaccel_device;
enum AVPixelFormat hwaccel_output_format; enum AVPixelFormat hwaccel_output_format;
/* hwaccel context */
void *hwaccel_ctx;
void (*hwaccel_uninit)(AVCodecContext *s);
int (*hwaccel_retrieve_data)(AVCodecContext *s, AVFrame *frame); int (*hwaccel_retrieve_data)(AVCodecContext *s, AVFrame *frame);
enum AVPixelFormat hwaccel_pix_fmt; enum AVPixelFormat hwaccel_pix_fmt;
enum AVPixelFormat hwaccel_retrieved_pix_fmt;
/* stats */ /* stats */
// combined size of all the packets read // combined size of all the packets read
@ -439,38 +491,46 @@ typedef struct InputStream {
int got_output; int got_output;
} InputStream; } InputStream;
typedef struct LastFrameDuration {
int stream_idx;
int64_t duration;
} LastFrameDuration;
typedef struct InputFile { typedef struct InputFile {
int index;
AVFormatContext *ctx; AVFormatContext *ctx;
int eof_reached; /* true if eof reached */ int eof_reached; /* true if eof reached */
int eagain; /* true if last read attempt returned EAGAIN */ int eagain; /* true if last read attempt returned EAGAIN */
int ist_index; /* index of first stream in input_streams */
int loop; /* set number of times input stream should be looped */
int64_t duration; /* actual duration of the longest stream in a file
at the moment when looping happens */
AVRational time_base; /* time base of the duration */
int64_t input_ts_offset; int64_t input_ts_offset;
int input_sync_ref; int input_sync_ref;
/**
* Effective format start time based on enabled streams.
*/
int64_t start_time_effective;
int64_t ts_offset; int64_t ts_offset;
/**
* Extra timestamp offset added by discontinuity handling.
*/
int64_t ts_offset_discont;
int64_t last_ts; int64_t last_ts;
int64_t start_time; /* user-specified start time in AV_TIME_BASE or AV_NOPTS_VALUE */ int64_t start_time; /* user-specified start time in AV_TIME_BASE or AV_NOPTS_VALUE */
int64_t recording_time; int64_t recording_time;
int nb_streams; /* number of stream that ffmpeg is aware of; may be different
from ctx.nb_streams if new streams appear during av_read_frame() */ /* streams that ffmpeg is aware of;
int nb_streams_warn; /* number of streams that the user was warned of */ * there may be extra streams in ctx that are not mapped to an InputStream
* if new streams appear dynamically during demuxing */
InputStream **streams;
int nb_streams;
int rate_emu; int rate_emu;
float readrate; float readrate;
int accurate_seek; int accurate_seek;
AVPacket *pkt; /* when looping the input file, this queue is used by decoders to report
* the last frame duration back to the demuxer thread */
#if HAVE_THREADS AVThreadMessageQueue *audio_duration_queue;
AVThreadMessageQueue *in_thread_queue; int audio_duration_queue_size;
pthread_t thread; /* thread reading from this file */
int non_blocking; /* reading packets from the thread should not block */
int joined; /* the thread has been joined */
int thread_queue_size; /* maximum number of queued packets */
#endif
} InputFile; } InputFile;
enum forced_keyframes_const { enum forced_keyframes_const {
@ -485,6 +545,41 @@ enum forced_keyframes_const {
#define ABORT_ON_FLAG_EMPTY_OUTPUT (1 << 0) #define ABORT_ON_FLAG_EMPTY_OUTPUT (1 << 0)
#define ABORT_ON_FLAG_EMPTY_OUTPUT_STREAM (1 << 1) #define ABORT_ON_FLAG_EMPTY_OUTPUT_STREAM (1 << 1)
enum EncStatsType {
ENC_STATS_LITERAL = 0,
ENC_STATS_FILE_IDX,
ENC_STATS_STREAM_IDX,
ENC_STATS_FRAME_NUM,
ENC_STATS_FRAME_NUM_IN,
ENC_STATS_TIMEBASE,
ENC_STATS_TIMEBASE_IN,
ENC_STATS_PTS,
ENC_STATS_PTS_TIME,
ENC_STATS_PTS_IN,
ENC_STATS_PTS_TIME_IN,
ENC_STATS_DTS,
ENC_STATS_DTS_TIME,
ENC_STATS_SAMPLE_NUM,
ENC_STATS_NB_SAMPLES,
ENC_STATS_PKT_SIZE,
ENC_STATS_BITRATE,
ENC_STATS_AVG_BITRATE,
};
typedef struct EncStatsComponent {
enum EncStatsType type;
uint8_t *str;
size_t str_len;
} EncStatsComponent;
typedef struct EncStats {
EncStatsComponent *components;
int nb_components;
AVIOContext *io;
} EncStats;
extern const char *const forced_keyframes_const_names[]; extern const char *const forced_keyframes_const_names[];
typedef enum { typedef enum {
@ -492,68 +587,92 @@ typedef enum {
MUXER_FINISHED = 2, MUXER_FINISHED = 2,
} OSTFinished ; } OSTFinished ;
enum {
KF_FORCE_SOURCE = 1,
KF_FORCE_SOURCE_NO_DROP = 2,
};
typedef struct KeyframeForceCtx {
int type;
int64_t ref_pts;
// timestamps of the forced keyframes, in AV_TIME_BASE_Q
int64_t *pts;
int nb_pts;
int index;
AVExpr *pexpr;
double expr_const_values[FKF_NB];
int dropped_keyframe;
} KeyframeForceCtx;
typedef struct OutputStream { typedef struct OutputStream {
const AVClass *clazz;
int file_index; /* file index */ int file_index; /* file index */
int index; /* stream index in the output file */ int index; /* stream index in the output file */
int source_index; /* InputStream index */
/* input stream that is the source for this output stream;
* may be NULL for streams with no well-defined source, e.g.
* attachments or outputs from complex filtergraphs */
InputStream *ist;
AVStream *st; /* stream in the output file */ AVStream *st; /* stream in the output file */
int encoding_needed; /* true if encoding needed for this stream */ /* number of frames emitted by the video-encoding sync code */
int64_t frame_number; int64_t vsync_frame_number;
/* input pts and corresponding output pts /* predicted pts of the next frame to be encoded
for A/V sync */ * audio/video encoding only */
struct InputStream *sync_ist; /* input stream to sync against */ int64_t next_pts;
int64_t sync_opts; /* output frame counter, could be changed to some true timestamp */ // FIXME look at frame_number /* dts of the last packet sent to the muxing queue, in AV_TIME_BASE_Q */
/* pts of the first frame encoded for this stream, used for limiting
* recording time */
int64_t first_pts;
/* dts of the last packet sent to the muxer */
int64_t last_mux_dts; int64_t last_mux_dts;
/* pts of the last frame received from the filters, in AV_TIME_BASE_Q */
int64_t last_filter_pts;
// timestamp from which the streamcopied streams should start,
// in AV_TIME_BASE_Q;
// everything before it should be discarded
int64_t ts_copy_start;
// the timebase of the packets sent to the muxer // the timebase of the packets sent to the muxer
AVRational mux_timebase; AVRational mux_timebase;
AVRational enc_timebase; AVRational enc_timebase;
AVBSFContext *bsf_ctx;
AVCodecContext *enc_ctx; AVCodecContext *enc_ctx;
AVCodecParameters *ref_par; /* associated input codec parameters with encoders options applied */
const AVCodec *enc;
int64_t max_frames;
AVFrame *filtered_frame; AVFrame *filtered_frame;
AVFrame *last_frame; AVFrame *last_frame;
AVFrame *sq_frame;
AVPacket *pkt; AVPacket *pkt;
int64_t last_dropped; int64_t last_dropped;
int64_t last_nb0_frames[3]; int64_t last_nb0_frames[3];
void *hwaccel_ctx;
/* video only */ /* video only */
AVRational frame_rate; AVRational frame_rate;
AVRational max_frame_rate; AVRational max_frame_rate;
enum VideoSyncMethod vsync_method; enum VideoSyncMethod vsync_method;
int is_cfr; int is_cfr;
const char *fps_mode;
int force_fps; int force_fps;
int top_field_first; int top_field_first;
#if FFMPEG_ROTATION_METADATA
int rotate_overridden; int rotate_overridden;
#endif
int autoscale; int autoscale;
int bitexact;
int bits_per_raw_sample; int bits_per_raw_sample;
#if FFMPEG_ROTATION_METADATA
double rotate_override_value; double rotate_override_value;
#endif
AVRational frame_aspect_ratio; AVRational frame_aspect_ratio;
/* forced key frames */ KeyframeForceCtx kf;
int64_t forced_kf_ref_pts;
int64_t *forced_kf_pts;
int forced_kf_count;
int forced_kf_index;
char *forced_keyframes;
AVExpr *forced_keyframes_pexpr;
double forced_keyframes_expr_const_values[FKF_NB];
int dropped_keyframe;
/* audio only */ /* audio only */
#if FFMPEG_OPT_MAP_CHANNEL
int *audio_channels_map; /* list of the channels id to pick from the source stream */ int *audio_channels_map; /* list of the channels id to pick from the source stream */
int audio_channels_mapped; /* number of channels in audio_channels_map */ int audio_channels_mapped; /* number of channels in audio_channels_map */
#endif
char *logfile_prefix; char *logfile_prefix;
FILE *logfile; FILE *logfile;
@ -569,7 +688,6 @@ typedef struct OutputStream {
char *apad; char *apad;
OSTFinished finished; /* no more packets should be written for this stream */ OSTFinished finished; /* no more packets should be written for this stream */
int unavailable; /* true if the steram is unavailable (possibly temporarily) */ int unavailable; /* true if the steram is unavailable (possibly temporarily) */
int stream_copy;
// init_output_stream() has been called for this stream // init_output_stream() has been called for this stream
// The encoder and the bitstream filters have been initialized and the stream // The encoder and the bitstream filters have been initialized and the stream
@ -582,15 +700,16 @@ typedef struct OutputStream {
int streamcopy_started; int streamcopy_started;
int copy_initial_nonkeyframes; int copy_initial_nonkeyframes;
int copy_prior_start; int copy_prior_start;
char *disposition;
int keep_pix_fmt; int keep_pix_fmt;
/* stats */ /* stats */
// combined size of all the packets written // combined size of all the packets sent to the muxer
uint64_t data_size; uint64_t data_size_mux;
// combined size of all the packets received from the encoder
uint64_t data_size_enc;
// number of packets send to the muxer // number of packets send to the muxer
uint64_t packets_written; atomic_uint_least64_t packets_written;
// number of frames/samples sent to the encoder // number of frames/samples sent to the encoder
uint64_t frames_encoded; uint64_t frames_encoded;
uint64_t samples_encoded; uint64_t samples_encoded;
@ -600,51 +719,48 @@ typedef struct OutputStream {
/* packet quality factor */ /* packet quality factor */
int quality; int quality;
int max_muxing_queue_size;
/* the packets are buffered here until the muxer is ready to be initialized */
AVFifo *muxing_queue;
/*
* The size of the AVPackets' buffers in queue.
* Updated when a packet is either pushed or pulled from the queue.
*/
size_t muxing_queue_data_size;
/* Threshold after which max_muxing_queue_size will be in effect */
size_t muxing_queue_data_threshold;
/* packet picture type */ /* packet picture type */
int pict_type; int pict_type;
/* frame encode sum of squared error values */ /* frame encode sum of squared error values */
int64_t error[4]; int64_t error[4];
int sq_idx_encode;
int sq_idx_mux;
EncStats enc_stats_pre;
EncStats enc_stats_post;
/*
* bool on whether this stream should be utilized for splitting
* subtitles utilizing fix_sub_duration at random access points.
*/
unsigned int fix_sub_duration_heartbeat;
} OutputStream; } OutputStream;
typedef struct OutputFile { typedef struct OutputFile {
const AVClass *clazz;
int index; int index;
const AVOutputFormat *format; const AVOutputFormat *format;
const char *url;
OutputStream **streams;
int nb_streams;
SyncQueue *sq_encode;
AVFormatContext *ctx;
AVDictionary *opts;
int ost_index; /* index of the first stream in output_streams */
int64_t recording_time; ///< desired length of the resulting file in microseconds == AV_TIME_BASE units int64_t recording_time; ///< desired length of the resulting file in microseconds == AV_TIME_BASE units
int64_t start_time; ///< start time in microseconds == AV_TIME_BASE units int64_t start_time; ///< start time in microseconds == AV_TIME_BASE units
uint64_t limit_filesize; /* filesize limit expressed in bytes */
int shortest; int shortest;
int bitexact;
int header_written;
} OutputFile; } OutputFile;
extern __thread InputStream **input_streams;
extern __thread int nb_input_streams;
extern __thread InputFile **input_files; extern __thread InputFile **input_files;
extern __thread int nb_input_files; extern __thread int nb_input_files;
extern __thread OutputStream **output_streams;
extern __thread int nb_output_streams;
extern __thread OutputFile **output_files; extern __thread OutputFile **output_files;
extern __thread int nb_output_files; extern __thread int nb_output_files;
@ -658,13 +774,10 @@ extern __thread float audio_drift_threshold;
extern __thread float dts_delta_threshold; extern __thread float dts_delta_threshold;
extern __thread float dts_error_threshold; extern __thread float dts_error_threshold;
extern __thread int audio_volume;
extern __thread int audio_sync_method;
extern __thread enum VideoSyncMethod video_sync_method; extern __thread enum VideoSyncMethod video_sync_method;
extern __thread float frame_drop_threshold; extern __thread float frame_drop_threshold;
extern __thread int do_benchmark; extern __thread int do_benchmark;
extern __thread int do_benchmark_all; extern __thread int do_benchmark_all;
extern __thread int do_deinterlace;
extern __thread int do_hex_dump; extern __thread int do_hex_dump;
extern __thread int do_pkt_dump; extern __thread int do_pkt_dump;
extern __thread int copy_ts; extern __thread int copy_ts;
@ -677,7 +790,6 @@ extern __thread int print_stats;
extern __thread int64_t stats_period; extern __thread int64_t stats_period;
extern __thread int qp_hist; extern __thread int qp_hist;
extern __thread int stdin_interaction; extern __thread int stdin_interaction;
extern __thread int frame_bits_per_raw_sample;
extern __thread AVIOContext *progress_avio; extern __thread AVIOContext *progress_avio;
extern __thread float max_error_rate; extern __thread float max_error_rate;
@ -688,15 +800,20 @@ extern __thread int auto_conversion_filters;
extern __thread const AVIOInterruptCB int_cb; extern __thread const AVIOInterruptCB int_cb;
#if CONFIG_QSV
extern __thread char *qsv_device;
#endif
extern __thread HWDevice *filter_hw_device; extern __thread HWDevice *filter_hw_device;
extern __thread int want_sdp;
extern __thread unsigned nb_output_dumped; extern __thread unsigned nb_output_dumped;
extern __thread int main_ffmpeg_return_code; extern __thread int main_ffmpeg_return_code;
extern __thread int ignore_unknown_streams;
extern __thread int copy_unknown_streams;
extern __thread int recast_media;
#if FFMPEG_OPT_PSNR
extern __thread int do_psnr;
#endif
void term_init(void); void term_init(void);
void term_exit(void); void term_exit(void);
@ -705,7 +822,12 @@ void show_usage(void);
void remove_avoptions(AVDictionary **a, AVDictionary *b); void remove_avoptions(AVDictionary **a, AVDictionary *b);
void assert_avoptions(AVDictionary *m); void assert_avoptions(AVDictionary *m);
int guess_input_channel_layout(InputStream *ist); void assert_file_overwrite(const char *filename);
char *file_read(const char *filename);
AVDictionary *strip_specifiers(const AVDictionary *dict);
const AVCodec *find_codec_or_die(void *logctx, const char *name,
enum AVMediaType type, int encoder);
int parse_and_set_vsync(const char *arg, int *vsync_var, int file_idx, int st_idx, int is_global);
int configure_filtergraph(FilterGraph *fg); int configure_filtergraph(FilterGraph *fg);
void check_filter_outputs(void); void check_filter_outputs(void);
@ -719,8 +841,9 @@ int ifilter_parameters_from_frame(InputFilter *ifilter, const AVFrame *frame);
int ffmpeg_parse_options(int argc, char **argv); int ffmpeg_parse_options(int argc, char **argv);
int videotoolbox_init(AVCodecContext *s); void enc_stats_write(OutputStream *ost, EncStats *es,
int qsv_init(AVCodecContext *s); const AVFrame *frame, const AVPacket *pkt,
uint64_t frame_num);
HWDevice *hw_device_get_by_name(const char *name); HWDevice *hw_device_get_by_name(const char *name);
int hw_device_init_from_string(const char *arg, HWDevice **dev); int hw_device_init_from_string(const char *arg, HWDevice **dev);
@ -732,15 +855,58 @@ int hw_device_setup_for_filter(FilterGraph *fg);
int hwaccel_decode_init(AVCodecContext *avctx); int hwaccel_decode_init(AVCodecContext *avctx);
/* open the muxer when all the streams are initialized */ /*
int of_check_init(OutputFile *of); * Initialize muxing state for the given stream, should be called
* after the codec/streamcopy setup has been done.
*
* Open the muxer once all the streams have been initialized.
*/
int of_stream_init(OutputFile *of, OutputStream *ost);
int of_write_trailer(OutputFile *of); int of_write_trailer(OutputFile *of);
int of_open(const OptionsContext *o, const char *filename);
void of_close(OutputFile **pof); void of_close(OutputFile **pof);
void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, void of_enc_stats_close(void);
int unqueue);
/*
* Send a single packet to the output, applying any bitstream filters
* associated with the output stream. This may result in any number
* of packets actually being written, depending on what bitstream
* filters are applied. The supplied packet is consumed and will be
* blank (as if newly-allocated) when this function returns.
*
* If eof is set, instead indicate EOF to all bitstream filters and
* therefore flush any delayed packets to the output. A blank packet
* must be supplied in this case.
*/
void of_output_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, int eof);
int64_t of_filesize(OutputFile *of);
int ifile_open(const OptionsContext *o, const char *filename);
void ifile_close(InputFile **f);
/**
* Get next input packet from the demuxer.
*
* @param pkt the packet is written here when this function returns 0
* @return
* - 0 when a packet has been read successfully
* - 1 when stream end was reached, but the stream is looped;
* caller should flush decoders and read from this demuxer again
* - a negative error code on failure
*/
int ifile_get_packet(InputFile *f, AVPacket **pkt);
/* iterate over all input streams in all input files;
* pass NULL to start iteration */
InputStream *ist_iter(InputStream *prev);
extern const char * const opt_name_codec_names[];
extern const char * const opt_name_codec_tags[];
extern const char * const opt_name_frame_rates[];
extern const char * const opt_name_top_field_first[];
void set_report_callback(void (*callback)(int, float, float, int64_t, int, double, double)); void set_report_callback(void (*callback)(int, float, float, int64_t, double, double, double));
void cancel_operation(long id); void cancel_operation(long id);
#endif /* FFTOOLS_FFMPEG_H */ #endif /* FFTOOLS_FFMPEG_H */

File diff suppressed because it is too large Load Diff

@ -1,6 +1,7 @@
/* /*
* ffmpeg filter configuration * ffmpeg filter configuration
* Copyright (c) 2018 Taner Sener * Copyright (c) 2018 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -24,6 +25,12 @@
* We manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * We manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 08.2018 * 08.2018
@ -69,8 +76,9 @@ static const enum AVPixelFormat *get_compliance_normal_pix_fmts(const AVCodec *c
} }
} }
enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx, enum AVPixelFormat
const AVCodec *codec, enum AVPixelFormat target) choose_pixel_fmt(const AVCodec *codec, enum AVPixelFormat target,
int strict_std_compliance)
{ {
if (codec && codec->pix_fmts) { if (codec && codec->pix_fmts) {
const enum AVPixelFormat *p = codec->pix_fmts; const enum AVPixelFormat *p = codec->pix_fmts;
@ -79,7 +87,7 @@ enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx,
int has_alpha = desc ? desc->nb_components % 2 == 0 : 0; int has_alpha = desc ? desc->nb_components % 2 == 0 : 0;
enum AVPixelFormat best= AV_PIX_FMT_NONE; enum AVPixelFormat best= AV_PIX_FMT_NONE;
if (enc_ctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) { if (strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
p = get_compliance_normal_pix_fmts(codec, p); p = get_compliance_normal_pix_fmts(codec, p);
} }
for (; *p != AV_PIX_FMT_NONE; p++) { for (; *p != AV_PIX_FMT_NONE; p++) {
@ -106,6 +114,7 @@ enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodecContext *enc_ctx,
static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint) static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint)
{ {
OutputStream *ost = ofilter->ost; OutputStream *ost = ofilter->ost;
AVCodecContext *enc = ost->enc_ctx;
const AVDictionaryEntry *strict_dict = av_dict_get(ost->encoder_opts, "strict", NULL, 0); const AVDictionaryEntry *strict_dict = av_dict_get(ost->encoder_opts, "strict", NULL, 0);
if (strict_dict) if (strict_dict)
// used by choose_pixel_fmt() and below // used by choose_pixel_fmt() and below
@ -119,13 +128,14 @@ static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint)
return av_get_pix_fmt_name(ost->enc_ctx->pix_fmt); return av_get_pix_fmt_name(ost->enc_ctx->pix_fmt);
} }
if (ost->enc_ctx->pix_fmt != AV_PIX_FMT_NONE) { if (ost->enc_ctx->pix_fmt != AV_PIX_FMT_NONE) {
return av_get_pix_fmt_name(choose_pixel_fmt(ost->st, ost->enc_ctx, ost->enc, ost->enc_ctx->pix_fmt)); return av_get_pix_fmt_name(choose_pixel_fmt(enc->codec, enc->pix_fmt,
} else if (ost->enc && ost->enc->pix_fmts) { ost->enc_ctx->strict_std_compliance));
} else if (enc->codec->pix_fmts) {
const enum AVPixelFormat *p; const enum AVPixelFormat *p;
p = ost->enc->pix_fmts; p = enc->codec->pix_fmts;
if (ost->enc_ctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) { if (ost->enc_ctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
p = get_compliance_normal_pix_fmts(ost->enc, p); p = get_compliance_normal_pix_fmts(enc->codec, p);
} }
for (; *p != AV_PIX_FMT_NONE; p++) { for (; *p != AV_PIX_FMT_NONE; p++) {
@ -133,7 +143,7 @@ static const char *choose_pix_fmts(OutputFilter *ofilter, AVBPrint *bprint)
av_bprintf(bprint, "%s%c", name, p[1] == AV_PIX_FMT_NONE ? '\0' : '|'); av_bprintf(bprint, "%s%c", name, p[1] == AV_PIX_FMT_NONE ? '\0' : '|');
} }
if (!av_bprint_is_complete(bprint)) if (!av_bprint_is_complete(bprint))
exit_program(1); report_and_exit(AVERROR(ENOMEM));
return bprint->str; return bprint->str;
} else } else
return NULL; return NULL;
@ -197,7 +207,7 @@ int init_simple_filtergraph(InputStream *ist, OutputStream *ost)
InputFilter *ifilter; InputFilter *ifilter;
if (!fg) if (!fg)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
fg->index = nb_filtergraphs; fg->index = nb_filtergraphs;
ofilter = ALLOC_ARRAY_ELEM(fg->outputs, fg->nb_outputs); ofilter = ALLOC_ARRAY_ELEM(fg->outputs, fg->nb_outputs);
@ -214,7 +224,7 @@ int init_simple_filtergraph(InputStream *ist, OutputStream *ost)
ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW); ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW);
if (!ifilter->frame_queue) if (!ifilter->frame_queue)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
GROW_ARRAY(ist->filters, ist->nb_filters); GROW_ARRAY(ist->filters, ist->nb_filters);
ist->filters[ist->nb_filters - 1] = ifilter; ist->filters[ist->nb_filters - 1] = ifilter;
@ -238,7 +248,7 @@ static char *describe_filter_link(FilterGraph *fg, AVFilterInOut *inout, int in)
res = av_asprintf("%s:%s", ctx->filter->name, res = av_asprintf("%s:%s", ctx->filter->name,
avfilter_pad_get_name(pads, inout->pad_idx)); avfilter_pad_get_name(pads, inout->pad_idx));
if (!res) if (!res)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
return res; return res;
} }
@ -285,7 +295,7 @@ static void init_input_filter(FilterGraph *fg, AVFilterInOut *in)
"matches no streams.\n", p, fg->graph_desc); "matches no streams.\n", p, fg->graph_desc);
exit_program(1); exit_program(1);
} }
ist = input_streams[input_files[file_idx]->ist_index + st->index]; ist = input_files[file_idx]->streams[st->index];
if (ist->user_set_discard == AVDISCARD_ALL) { if (ist->user_set_discard == AVDISCARD_ALL) {
av_log(NULL, AV_LOG_FATAL, "Stream specifier '%s' in filtergraph description %s " av_log(NULL, AV_LOG_FATAL, "Stream specifier '%s' in filtergraph description %s "
"matches a disabled input stream.\n", p, fg->graph_desc); "matches a disabled input stream.\n", p, fg->graph_desc);
@ -293,14 +303,13 @@ static void init_input_filter(FilterGraph *fg, AVFilterInOut *in)
} }
} else { } else {
/* find the first unused stream of corresponding type */ /* find the first unused stream of corresponding type */
for (i = 0; i < nb_input_streams; i++) { for (ist = ist_iter(NULL); ist; ist = ist_iter(ist)) {
ist = input_streams[i];
if (ist->user_set_discard == AVDISCARD_ALL) if (ist->user_set_discard == AVDISCARD_ALL)
continue; continue;
if (ist->dec_ctx->codec_type == type && ist->discard) if (ist->dec_ctx->codec_type == type && ist->discard)
break; break;
} }
if (i == nb_input_streams) { if (!ist) {
av_log(NULL, AV_LOG_FATAL, "Cannot find a matching stream for " av_log(NULL, AV_LOG_FATAL, "Cannot find a matching stream for "
"unlabeled input pad %d on filter %s\n", in->pad_idx, "unlabeled input pad %d on filter %s\n", in->pad_idx,
in->filter_ctx->name); in->filter_ctx->name);
@ -323,12 +332,162 @@ static void init_input_filter(FilterGraph *fg, AVFilterInOut *in)
ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW); ifilter->frame_queue = av_fifo_alloc2(8, sizeof(AVFrame*), AV_FIFO_FLAG_AUTO_GROW);
if (!ifilter->frame_queue) if (!ifilter->frame_queue)
exit_program(1); report_and_exit(AVERROR(ENOMEM));
GROW_ARRAY(ist->filters, ist->nb_filters); GROW_ARRAY(ist->filters, ist->nb_filters);
ist->filters[ist->nb_filters - 1] = ifilter; ist->filters[ist->nb_filters - 1] = ifilter;
} }
static int read_binary(const char *path, uint8_t **data, int *len)
{
AVIOContext *io = NULL;
int64_t fsize;
int ret;
*data = NULL;
*len = 0;
ret = avio_open2(&io, path, AVIO_FLAG_READ, &int_cb, NULL);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Cannot open file '%s': %s\n",
path, av_err2str(ret));
return ret;
}
fsize = avio_size(io);
if (fsize < 0 || fsize > INT_MAX) {
av_log(NULL, AV_LOG_ERROR, "Cannot obtain size of file %s\n", path);
ret = AVERROR(EIO);
goto fail;
}
*data = av_malloc(fsize);
if (!*data) {
ret = AVERROR(ENOMEM);
goto fail;
}
ret = avio_read(io, *data, fsize);
if (ret != fsize) {
av_log(NULL, AV_LOG_ERROR, "Error reading file %s\n", path);
ret = ret < 0 ? ret : AVERROR(EIO);
goto fail;
}
*len = fsize;
return 0;
fail:
avio_close(io);
av_freep(data);
*len = 0;
return ret;
}
static int filter_opt_apply(AVFilterContext *f, const char *key, const char *val)
{
const AVOption *o = NULL;
int ret;
ret = av_opt_set(f, key, val, AV_OPT_SEARCH_CHILDREN);
if (ret >= 0)
return 0;
if (ret == AVERROR_OPTION_NOT_FOUND && key[0] == '/')
o = av_opt_find(f, key + 1, NULL, 0, AV_OPT_SEARCH_CHILDREN);
if (!o)
goto err_apply;
// key is a valid option name prefixed with '/'
// interpret value as a path from which to load the actual option value
key++;
if (o->type == AV_OPT_TYPE_BINARY) {
uint8_t *data;
int len;
ret = read_binary(val, &data, &len);
if (ret < 0)
goto err_load;
ret = av_opt_set_bin(f, key, data, len, AV_OPT_SEARCH_CHILDREN);
av_freep(&data);
} else {
char *data = file_read(val);
if (!data) {
ret = AVERROR(EIO);
goto err_load;
}
ret = av_opt_set(f, key, data, AV_OPT_SEARCH_CHILDREN);
av_freep(&data);
}
if (ret < 0)
goto err_apply;
return 0;
err_apply:
av_log(NULL, AV_LOG_ERROR,
"Error applying option '%s' to filter '%s': %s\n",
key, f->filter->name, av_err2str(ret));
return ret;
err_load:
av_log(NULL, AV_LOG_ERROR,
"Error loading value for option '%s' from file '%s'\n",
key, val);
return ret;
}
static int graph_opts_apply(AVFilterGraphSegment *seg)
{
for (size_t i = 0; i < seg->nb_chains; i++) {
AVFilterChain *ch = seg->chains[i];
for (size_t j = 0; j < ch->nb_filters; j++) {
AVFilterParams *p = ch->filters[j];
const AVDictionaryEntry *e = NULL;
av_assert0(p->filter);
while ((e = av_dict_iterate(p->opts, e))) {
int ret = filter_opt_apply(p->filter, e->key, e->value);
if (ret < 0)
return ret;
}
av_dict_free(&p->opts);
}
}
return 0;
}
static int graph_parse(AVFilterGraph *graph, const char *desc,
AVFilterInOut **inputs, AVFilterInOut **outputs)
{
AVFilterGraphSegment *seg;
int ret;
ret = avfilter_graph_segment_parse(graph, desc, 0, &seg);
if (ret < 0)
return ret;
ret = avfilter_graph_segment_create_filters(seg, 0);
if (ret < 0)
goto fail;
ret = graph_opts_apply(seg);
if (ret < 0)
goto fail;
ret = avfilter_graph_segment_apply(seg, 0, inputs, outputs);
fail:
avfilter_graph_segment_free(&seg);
return ret;
}
int init_complex_filtergraph(FilterGraph *fg) int init_complex_filtergraph(FilterGraph *fg)
{ {
AVFilterInOut *inputs, *outputs, *cur; AVFilterInOut *inputs, *outputs, *cur;
@ -342,7 +501,7 @@ int init_complex_filtergraph(FilterGraph *fg)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
graph->nb_threads = 1; graph->nb_threads = 1;
ret = avfilter_graph_parse2(graph, fg->graph_desc, &inputs, &outputs); ret = graph_parse(graph, fg->graph_desc, &inputs, &outputs);
if (ret < 0) if (ret < 0)
goto fail; goto fail;
@ -467,8 +626,7 @@ static int configure_output_video_filter(FilterGraph *fg, OutputFilter *ofilter,
snprintf(args, sizeof(args), "%d:%d", snprintf(args, sizeof(args), "%d:%d",
ofilter->width, ofilter->height); ofilter->width, ofilter->height);
while ((e = av_dict_get(ost->sws_dict, "", e, while ((e = av_dict_iterate(ost->sws_dict, e))) {
AV_DICT_IGNORE_SUFFIX))) {
av_strlcatf(args, sizeof(args), ":%s=%s", e->key, e->value); av_strlcatf(args, sizeof(args), ":%s=%s", e->key, e->value);
} }
@ -575,6 +733,7 @@ static int configure_output_audio_filter(FilterGraph *fg, OutputFilter *ofilter,
pad_idx = 0; \ pad_idx = 0; \
} while (0) } while (0)
av_bprint_init(&args, 0, AV_BPRINT_SIZE_UNLIMITED); av_bprint_init(&args, 0, AV_BPRINT_SIZE_UNLIMITED);
#if FFMPEG_OPT_MAP_CHANNEL
if (ost->audio_channels_mapped) { if (ost->audio_channels_mapped) {
AVChannelLayout mapped_layout = { 0 }; AVChannelLayout mapped_layout = { 0 };
int i; int i;
@ -587,6 +746,7 @@ static int configure_output_audio_filter(FilterGraph *fg, OutputFilter *ofilter,
AUTO_INSERT_FILTER("-map_channel", "pan", args.str); AUTO_INSERT_FILTER("-map_channel", "pan", args.str);
av_bprint_clear(&args); av_bprint_clear(&args);
} }
#endif
if (codec->ch_layout.order == AV_CHANNEL_ORDER_UNSPEC) if (codec->ch_layout.order == AV_CHANNEL_ORDER_UNSPEC)
av_channel_layout_default(&codec->ch_layout, codec->ch_layout.nb_channels); av_channel_layout_default(&codec->ch_layout, codec->ch_layout.nb_channels);
@ -620,11 +780,11 @@ static int configure_output_audio_filter(FilterGraph *fg, OutputFilter *ofilter,
if (ost->apad && of->shortest) { if (ost->apad && of->shortest) {
int i; int i;
for (i=0; i<of->ctx->nb_streams; i++) for (i = 0; i < of->nb_streams; i++)
if (of->ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) if (of->streams[i]->st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
break; break;
if (i<of->ctx->nb_streams) { if (i < of->nb_streams) {
AUTO_INSERT_FILTER("-apad", "apad", ost->apad); AUTO_INSERT_FILTER("-apad", "apad", ost->apad);
} }
} }
@ -751,7 +911,7 @@ static int configure_input_video_filter(FilterGraph *fg, InputFilter *ifilter,
} }
if (!fr.num) if (!fr.num)
fr = av_guess_frame_rate(input_files[ist->file_index]->ctx, ist->st, NULL); fr = ist->framerate_guessed;
if (ist->dec_ctx->codec_type == AVMEDIA_TYPE_SUBTITLE) { if (ist->dec_ctx->codec_type == AVMEDIA_TYPE_SUBTITLE) {
ret = sub2video_prepare(ist, ifilter); ret = sub2video_prepare(ist, ifilter);
@ -904,40 +1064,6 @@ static int configure_input_audio_filter(FilterGraph *fg, InputFilter *ifilter,
last_filter = filt_ctx; \ last_filter = filt_ctx; \
} while (0) } while (0)
if (audio_sync_method > 0) {
char args[256] = {0};
av_strlcatf(args, sizeof(args), "async=%d", audio_sync_method);
if (audio_drift_threshold != 0.1)
av_strlcatf(args, sizeof(args), ":min_hard_comp=%f", audio_drift_threshold);
if (!fg->reconfiguration)
av_strlcatf(args, sizeof(args), ":first_pts=0");
AUTO_INSERT_FILTER_INPUT("-async", "aresample", args);
}
// if (ost->audio_channels_mapped) {
// int i;
// AVBPrint pan_buf;
// av_bprint_init(&pan_buf, 256, 8192);
// av_bprintf(&pan_buf, "0x%"PRIx64,
// av_get_default_channel_layout(ost->audio_channels_mapped));
// for (i = 0; i < ost->audio_channels_mapped; i++)
// if (ost->audio_channels_map[i] != -1)
// av_bprintf(&pan_buf, ":c%d=c%d", i, ost->audio_channels_map[i]);
// AUTO_INSERT_FILTER_INPUT("-map_channel", "pan", pan_buf.str);
// av_bprint_finalize(&pan_buf, NULL);
// }
if (audio_volume != 256) {
char args[256];
av_log(NULL, AV_LOG_WARNING, "-vol has been deprecated. Use the volume "
"audio filter instead.\n");
snprintf(args, sizeof(args), "%f", audio_volume / 256.);
AUTO_INSERT_FILTER_INPUT("-vol", "volume", args);
}
snprintf(name, sizeof(name), "trim for input stream %d:%d", snprintf(name, sizeof(name), "trim for input stream %d:%d",
ist->file_index, ist->st->index); ist->file_index, ist->st->index);
if (copy_ts) { if (copy_ts) {
@ -1020,44 +1146,39 @@ int configure_filtergraph(FilterGraph *fg)
if (simple) { if (simple) {
OutputStream *ost = fg->outputs[0]->ost; OutputStream *ost = fg->outputs[0]->ost;
char args[512];
const AVDictionaryEntry *e = NULL;
if (filter_nbthreads) { if (filter_nbthreads) {
ret = av_opt_set(fg->graph, "threads", filter_nbthreads, 0); ret = av_opt_set(fg->graph, "threads", filter_nbthreads, 0);
if (ret < 0) if (ret < 0)
goto fail; goto fail;
} else { } else {
const AVDictionaryEntry *e = NULL;
e = av_dict_get(ost->encoder_opts, "threads", NULL, 0); e = av_dict_get(ost->encoder_opts, "threads", NULL, 0);
if (e) if (e)
av_opt_set(fg->graph, "threads", e->value, 0); av_opt_set(fg->graph, "threads", e->value, 0);
} }
args[0] = 0; if (av_dict_count(ost->sws_dict)) {
e = NULL; ret = av_dict_get_string(ost->sws_dict,
while ((e = av_dict_get(ost->sws_dict, "", e, &fg->graph->scale_sws_opts,
AV_DICT_IGNORE_SUFFIX))) { '=', ':');
av_strlcatf(args, sizeof(args), "%s=%s:", e->key, e->value); if (ret < 0)
} goto fail;
if (strlen(args)) {
args[strlen(args)-1] = 0;
fg->graph->scale_sws_opts = av_strdup(args);
} }
args[0] = 0; if (av_dict_count(ost->swr_opts)) {
e = NULL; char *args;
while ((e = av_dict_get(ost->swr_opts, "", e, ret = av_dict_get_string(ost->swr_opts, &args, '=', ':');
AV_DICT_IGNORE_SUFFIX))) { if (ret < 0)
av_strlcatf(args, sizeof(args), "%s=%s:", e->key, e->value); goto fail;
}
if (strlen(args))
args[strlen(args)-1] = 0;
av_opt_set(fg->graph, "aresample_swr_opts", args, 0); av_opt_set(fg->graph, "aresample_swr_opts", args, 0);
av_free(args);
}
} else { } else {
fg->graph->nb_threads = filter_complex_nbthreads; fg->graph->nb_threads = filter_complex_nbthreads;
} }
if ((ret = avfilter_graph_parse2(fg->graph, graph_desc, &inputs, &outputs)) < 0) if ((ret = graph_parse(fg->graph, graph_desc, &inputs, &outputs)) < 0)
goto fail; goto fail;
ret = hw_device_setup_for_filter(fg); ret = hw_device_setup_for_filter(fg);
@ -1131,16 +1252,8 @@ int configure_filtergraph(FilterGraph *fg)
for (i = 0; i < fg->nb_outputs; i++) { for (i = 0; i < fg->nb_outputs; i++) {
OutputStream *ost = fg->outputs[i]->ost; OutputStream *ost = fg->outputs[i]->ost;
if (!ost->enc) { if (ost->enc_ctx->codec_type == AVMEDIA_TYPE_AUDIO &&
/* identical to the same check in ffmpeg.c, needed because !(ost->enc_ctx->codec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE))
complex filter graphs are initialized earlier */
av_log(NULL, AV_LOG_ERROR, "Encoder (codec %s) not found for output stream #%d:%d\n",
avcodec_get_name(ost->st->codecpar->codec_id), ost->file_index, ost->index);
ret = AVERROR(EINVAL);
goto fail;
}
if (ost->enc->type == AVMEDIA_TYPE_AUDIO &&
!(ost->enc->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE))
av_buffersink_set_frame_size(ost->filter->filter, av_buffersink_set_frame_size(ost->filter->filter,
ost->enc_ctx->frame_size); ost->enc_ctx->frame_size);
} }

@ -1,5 +1,6 @@
/* /*
* Copyright (c) 2018 Taner Sener * Copyright (c) 2018-2019 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -23,6 +24,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 12.2019 * 12.2019
@ -357,7 +364,7 @@ int hw_device_setup_for_decode(InputStream *ist)
if (ist->hwaccel_id == HWACCEL_AUTO) { if (ist->hwaccel_id == HWACCEL_AUTO) {
ist->hwaccel_device_type = dev->type; ist->hwaccel_device_type = dev->type;
} else if (ist->hwaccel_device_type != dev->type) { } else if (ist->hwaccel_device_type != dev->type) {
av_log(ist->dec_ctx, AV_LOG_ERROR, "Invalid hwaccel device " av_log(NULL, AV_LOG_ERROR, "Invalid hwaccel device "
"specified for decoder: device %s of type %s is not " "specified for decoder: device %s of type %s is not "
"usable with hwaccel %s.\n", dev->name, "usable with hwaccel %s.\n", dev->name,
av_hwdevice_get_type_name(dev->type), av_hwdevice_get_type_name(dev->type),
@ -408,7 +415,7 @@ int hw_device_setup_for_decode(InputStream *ist)
type = config->device_type; type = config->device_type;
dev = hw_device_get_by_type(type); dev = hw_device_get_by_type(type);
if (dev) { if (dev) {
av_log(ist->dec_ctx, AV_LOG_INFO, "Using auto " av_log(NULL, AV_LOG_INFO, "Using auto "
"hwaccel type %s with existing device %s.\n", "hwaccel type %s with existing device %s.\n",
av_hwdevice_get_type_name(type), dev->name); av_hwdevice_get_type_name(type), dev->name);
} }
@ -426,12 +433,12 @@ int hw_device_setup_for_decode(InputStream *ist)
continue; continue;
} }
if (ist->hwaccel_device) { if (ist->hwaccel_device) {
av_log(ist->dec_ctx, AV_LOG_INFO, "Using auto " av_log(NULL, AV_LOG_INFO, "Using auto "
"hwaccel type %s with new device created " "hwaccel type %s with new device created "
"from %s.\n", av_hwdevice_get_type_name(type), "from %s.\n", av_hwdevice_get_type_name(type),
ist->hwaccel_device); ist->hwaccel_device);
} else { } else {
av_log(ist->dec_ctx, AV_LOG_INFO, "Using auto " av_log(NULL, AV_LOG_INFO, "Using auto "
"hwaccel type %s with new default device.\n", "hwaccel type %s with new default device.\n",
av_hwdevice_get_type_name(type)); av_hwdevice_get_type_name(type));
} }
@ -439,7 +446,7 @@ int hw_device_setup_for_decode(InputStream *ist)
if (dev) { if (dev) {
ist->hwaccel_device_type = type; ist->hwaccel_device_type = type;
} else { } else {
av_log(ist->dec_ctx, AV_LOG_INFO, "Auto hwaccel " av_log(NULL, AV_LOG_INFO, "Auto hwaccel "
"disabled: no device found.\n"); "disabled: no device found.\n");
ist->hwaccel_id = HWACCEL_NONE; ist->hwaccel_id = HWACCEL_NONE;
return 0; return 0;
@ -447,7 +454,7 @@ int hw_device_setup_for_decode(InputStream *ist)
} }
if (!dev) { if (!dev) {
av_log(ist->dec_ctx, AV_LOG_ERROR, "No device available " av_log(NULL, AV_LOG_ERROR, "No device available "
"for decoder: device type %s needed for codec %s.\n", "for decoder: device type %s needed for codec %s.\n",
av_hwdevice_get_type_name(type), ist->dec->name); av_hwdevice_get_type_name(type), ist->dec->name);
return err; return err;
@ -479,7 +486,7 @@ int hw_device_setup_for_encode(OutputStream *ost)
} }
for (i = 0;; i++) { for (i = 0;; i++) {
config = avcodec_get_hw_config(ost->enc, i); config = avcodec_get_hw_config(ost->enc_ctx->codec, i);
if (!config) if (!config)
break; break;
@ -490,7 +497,7 @@ int hw_device_setup_for_encode(OutputStream *ost)
av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using input " av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using input "
"frames context (format %s) with %s encoder.\n", "frames context (format %s) with %s encoder.\n",
av_get_pix_fmt_name(ost->enc_ctx->pix_fmt), av_get_pix_fmt_name(ost->enc_ctx->pix_fmt),
ost->enc->name); ost->enc_ctx->codec->name);
ost->enc_ctx->hw_frames_ctx = av_buffer_ref(frames_ref); ost->enc_ctx->hw_frames_ctx = av_buffer_ref(frames_ref);
if (!ost->enc_ctx->hw_frames_ctx) if (!ost->enc_ctx->hw_frames_ctx)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
@ -505,7 +512,7 @@ int hw_device_setup_for_encode(OutputStream *ost)
if (dev) { if (dev) {
av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using device %s " av_log(ost->enc_ctx, AV_LOG_VERBOSE, "Using device %s "
"(type %s) with %s encoder.\n", dev->name, "(type %s) with %s encoder.\n", dev->name,
av_hwdevice_get_type_name(dev->type), ost->enc->name); av_hwdevice_get_type_name(dev->type), ost->enc_ctx->codec->name);
ost->enc_ctx->hw_device_ctx = av_buffer_ref(dev->device_ref); ost->enc_ctx->hw_device_ctx = av_buffer_ref(dev->device_ref);
if (!ost->enc_ctx->hw_device_ctx) if (!ost->enc_ctx->hw_device_ctx)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);

@ -1,6 +1,7 @@
/* /*
* This file is part of FFmpeg. * This file is part of FFmpeg.
* Copyright (c) 2022 Taner Sener * Copyright (c) 2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* FFmpeg is free software; you can redistribute it and/or * FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public * modify it under the terms of the GNU Lesser General Public
@ -22,108 +23,101 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop the ffmpeg-kit library. * by us to develop the ffmpeg-kit library.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
* - want_sdp marked as thread-local
* - ms_from_ost migrated from ffmpeg_mux.c and marked as non-static
*
* ffmpeg-kit changes by Taner Sener * ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
* -------------------------------------------------------- * --------------------------------------------------------
* - fftools_ prefix added to fftools headers * - fftools_ prefix added to fftools headers
* - using main_ffmpeg_return_code instead of main_return_code * - using main_ffmpeg_return_code instead of main_return_code
* - printf replaced with av_log statements
*/ */
#include <stdatomic.h>
#include <stdio.h> #include <stdio.h>
#include <string.h> #include <string.h>
#include "fftools_ffmpeg.h" #include "fftools_ffmpeg.h"
#include "fftools_ffmpeg_mux.h"
#include "fftools_objpool.h"
#include "fftools_sync_queue.h"
#include "fftools_thread_queue.h"
#include "libavutil/fifo.h" #include "libavutil/fifo.h"
#include "libavutil/intreadwrite.h" #include "libavutil/intreadwrite.h"
#include "libavutil/log.h" #include "libavutil/log.h"
#include "libavutil/mem.h" #include "libavutil/mem.h"
#include "libavutil/timestamp.h" #include "libavutil/timestamp.h"
#include "libavutil/thread.h"
#include "libavcodec/packet.h" #include "libavcodec/packet.h"
#include "libavformat/avformat.h" #include "libavformat/avformat.h"
#include "libavformat/avio.h" #include "libavformat/avio.h"
static void close_all_output_streams(OutputStream *ost, OSTFinished this_stream, OSTFinished others) __thread int want_sdp = 1;
MuxStream *ms_from_ost(OutputStream *ost)
{ {
int i; return (MuxStream*)ost;
for (i = 0; i < nb_output_streams; i++) {
OutputStream *ost2 = output_streams[i];
ost2->finished |= ost == ost2 ? this_stream : others;
}
} }
void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, static Muxer *mux_from_of(OutputFile *of)
int unqueue)
{ {
AVFormatContext *s = of->ctx; return (Muxer*)of;
AVStream *st = ost->st; }
int ret;
/* static int64_t filesize(AVIOContext *pb)
* Audio encoders may split the packets -- #frames in != #packets out. {
* But there is no reordering, so we can limit the number of output packets int64_t ret = -1;
* by simply dropping them here.
* Counting encoded video frames needs to be done separately because of if (pb) {
* reordering, see do_video_out(). ret = avio_size(pb);
* Do not count the packet when unqueued because it has been counted when queued. if (ret <= 0) // FIXME improve avio_size() so it works with non seekable output too
*/ ret = avio_tell(pb);
if (!(st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && ost->encoding_needed) && !unqueue) {
if (ost->frame_number >= ost->max_frames) {
av_packet_unref(pkt);
return;
}
ost->frame_number++;
} }
if (!of->header_written) { return ret;
AVPacket *tmp_pkt; }
/* the muxer is not initialized yet, buffer the packet */
if (!av_fifo_can_write(ost->muxing_queue)) {
size_t cur_size = av_fifo_can_read(ost->muxing_queue);
unsigned int are_we_over_size =
(ost->muxing_queue_data_size + pkt->size) > ost->muxing_queue_data_threshold;
size_t limit = are_we_over_size ? ost->max_muxing_queue_size : SIZE_MAX;
size_t new_size = FFMIN(2 * cur_size, limit);
if (new_size <= cur_size) { static int write_packet(Muxer *mux, OutputStream *ost, AVPacket *pkt)
av_log(NULL, AV_LOG_ERROR, {
"Too many packets buffered for output stream %d:%d.\n", MuxStream *ms = ms_from_ost(ost);
ost->file_index, ost->st->index); AVFormatContext *s = mux->fc;
exit_program(1); AVStream *st = ost->st;
} int64_t fs;
ret = av_fifo_grow2(ost->muxing_queue, new_size - cur_size); uint64_t frame_num;
if (ret < 0) int ret;
exit_program(1);
} fs = filesize(s->pb);
ret = av_packet_make_refcounted(pkt); atomic_store(&mux->last_filesize, fs);
if (ret < 0) if (fs >= mux->limit_filesize) {
exit_program(1); ret = AVERROR_EOF;
tmp_pkt = av_packet_alloc(); goto fail;
if (!tmp_pkt)
exit_program(1);
av_packet_move_ref(tmp_pkt, pkt);
ost->muxing_queue_data_size += tmp_pkt->size;
av_fifo_write(ost->muxing_queue, &tmp_pkt, 1);
return;
} }
if ((st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && ost->vsync_method == VSYNC_DROP) || if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && ost->vsync_method == VSYNC_DROP)
(st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO && audio_sync_method < 0))
pkt->pts = pkt->dts = AV_NOPTS_VALUE; pkt->pts = pkt->dts = AV_NOPTS_VALUE;
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
if (ost->frame_rate.num && ost->is_cfr) { if (ost->frame_rate.num && ost->is_cfr) {
if (pkt->duration > 0) if (pkt->duration > 0)
av_log(NULL, AV_LOG_WARNING, "Overriding packet duration by frame rate, this should not happen\n"); av_log(ost, AV_LOG_WARNING, "Overriding packet duration by frame rate, this should not happen\n");
pkt->duration = av_rescale_q(1, av_inv_q(ost->frame_rate), pkt->duration = av_rescale_q(1, av_inv_q(ost->frame_rate),
ost->mux_timebase); pkt->time_base);
} }
} }
av_packet_rescale_ts(pkt, ost->mux_timebase, ost->st->time_base); av_packet_rescale_ts(pkt, pkt->time_base, ost->st->time_base);
pkt->time_base = ost->st->time_base;
if (!(s->oformat->flags & AVFMT_NOTIMESTAMPS)) { if (!(s->oformat->flags & AVFMT_NOTIMESTAMPS)) {
if (pkt->dts != AV_NOPTS_VALUE && if (pkt->dts != AV_NOPTS_VALUE &&
@ -133,25 +127,26 @@ void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost,
pkt->dts, pkt->pts, pkt->dts, pkt->pts,
ost->file_index, ost->st->index); ost->file_index, ost->st->index);
pkt->pts = pkt->pts =
pkt->dts = pkt->pts + pkt->dts + ost->last_mux_dts + 1 pkt->dts = pkt->pts + pkt->dts + ms->last_mux_dts + 1
- FFMIN3(pkt->pts, pkt->dts, ost->last_mux_dts + 1) - FFMIN3(pkt->pts, pkt->dts, ms->last_mux_dts + 1)
- FFMAX3(pkt->pts, pkt->dts, ost->last_mux_dts + 1); - FFMAX3(pkt->pts, pkt->dts, ms->last_mux_dts + 1);
} }
if ((st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO || st->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE) && if ((st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO || st->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE) &&
pkt->dts != AV_NOPTS_VALUE && pkt->dts != AV_NOPTS_VALUE &&
ost->last_mux_dts != AV_NOPTS_VALUE) { ms->last_mux_dts != AV_NOPTS_VALUE) {
int64_t max = ost->last_mux_dts + !(s->oformat->flags & AVFMT_TS_NONSTRICT); int64_t max = ms->last_mux_dts + !(s->oformat->flags & AVFMT_TS_NONSTRICT);
if (pkt->dts < max) { if (pkt->dts < max) {
int loglevel = max - pkt->dts > 2 || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO ? AV_LOG_WARNING : AV_LOG_DEBUG; int loglevel = max - pkt->dts > 2 || st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO ? AV_LOG_WARNING : AV_LOG_DEBUG;
if (exit_on_error) if (exit_on_error)
loglevel = AV_LOG_ERROR; loglevel = AV_LOG_ERROR;
av_log(s, loglevel, "Non-monotonous DTS in output stream " av_log(s, loglevel, "Non-monotonous DTS in output stream "
"%d:%d; previous: %"PRId64", current: %"PRId64"; ", "%d:%d; previous: %"PRId64", current: %"PRId64"; ",
ost->file_index, ost->st->index, ost->last_mux_dts, pkt->dts); ost->file_index, ost->st->index, ms->last_mux_dts, pkt->dts);
if (exit_on_error) { if (exit_on_error) {
av_log(NULL, AV_LOG_FATAL, "aborting.\n"); ret = AVERROR(EINVAL);
exit_program(1); goto fail;
} }
av_log(s, loglevel, "changing to %"PRId64". This may result " av_log(s, loglevel, "changing to %"PRId64". This may result "
"in incorrect timestamps in the output file.\n", "in incorrect timestamps in the output file.\n",
max); max);
@ -161,17 +156,17 @@ void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost,
} }
} }
} }
ost->last_mux_dts = pkt->dts; ms->last_mux_dts = pkt->dts;
ost->data_size += pkt->size; ost->data_size_mux += pkt->size;
ost->packets_written++; frame_num = atomic_fetch_add(&ost->packets_written, 1);
pkt->stream_index = ost->index; pkt->stream_index = ost->index;
if (debug_ts) { if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "muxer <- type:%s " av_log(ost, AV_LOG_INFO, "muxer <- type:%s "
"pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s duration:%s duration_time:%s size:%d\n", "pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s duration:%s duration_time:%s size:%d\n",
av_get_media_type_string(ost->enc_ctx->codec_type), av_get_media_type_string(st->codecpar->codec_type),
av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, &ost->st->time_base), av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, &ost->st->time_base),
av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, &ost->st->time_base), av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, &ost->st->time_base),
av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, &ost->st->time_base), av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, &ost->st->time_base),
@ -179,12 +174,307 @@ void of_write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost,
); );
} }
if (ms->stats.io)
enc_stats_write(ost, &ms->stats, NULL, pkt, frame_num);
ret = av_interleaved_write_frame(s, pkt); ret = av_interleaved_write_frame(s, pkt);
if (ret < 0) { if (ret < 0) {
print_error("av_interleaved_write_frame()", ret); print_error("av_interleaved_write_frame()", ret);
main_ffmpeg_return_code = 1; goto fail;
close_all_output_streams(ost, MUXER_FINISHED | ENCODER_FINISHED, ENCODER_FINISHED); }
return 0;
fail:
av_packet_unref(pkt);
return ret;
}
static int sync_queue_process(Muxer *mux, OutputStream *ost, AVPacket *pkt, int *stream_eof)
{
OutputFile *of = &mux->of;
if (ost->sq_idx_mux >= 0) {
int ret = sq_send(mux->sq_mux, ost->sq_idx_mux, SQPKT(pkt));
if (ret < 0) {
if (ret == AVERROR_EOF)
*stream_eof = 1;
return ret;
}
while (1) {
ret = sq_receive(mux->sq_mux, -1, SQPKT(mux->sq_pkt));
if (ret < 0)
return (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) ? 0 : ret;
ret = write_packet(mux, of->streams[ret],
mux->sq_pkt);
if (ret < 0)
return ret;
}
} else if (pkt)
return write_packet(mux, ost, pkt);
return 0;
}
static void thread_set_name(OutputFile *of)
{
char name[16];
snprintf(name, sizeof(name), "mux%d:%s", of->index, of->format->name);
ff_thread_setname(name);
}
static void *muxer_thread(void *arg)
{
Muxer *mux = arg;
OutputFile *of = &mux->of;
AVPacket *pkt = NULL;
int ret = 0;
pkt = av_packet_alloc();
if (!pkt) {
ret = AVERROR(ENOMEM);
goto finish;
}
thread_set_name(of);
while (1) {
OutputStream *ost;
int stream_idx, stream_eof = 0;
ret = tq_receive(mux->tq, &stream_idx, pkt);
if (stream_idx < 0) {
av_log(mux, AV_LOG_VERBOSE, "All streams finished\n");
ret = 0;
break;
}
ost = of->streams[stream_idx];
ret = sync_queue_process(mux, ost, ret < 0 ? NULL : pkt, &stream_eof);
av_packet_unref(pkt);
if (ret == AVERROR_EOF && stream_eof)
tq_receive_finish(mux->tq, stream_idx);
else if (ret < 0) {
av_log(mux, AV_LOG_ERROR, "Error muxing a packet\n");
break;
}
}
finish:
av_packet_free(&pkt);
for (unsigned int i = 0; i < mux->fc->nb_streams; i++)
tq_receive_finish(mux->tq, i);
av_log(mux, AV_LOG_VERBOSE, "Terminating muxer thread\n");
return (void*)(intptr_t)ret;
}
static int thread_submit_packet(Muxer *mux, OutputStream *ost, AVPacket *pkt)
{
int ret = 0;
if (!pkt || ost->finished & MUXER_FINISHED)
goto finish;
ret = tq_send(mux->tq, ost->index, pkt);
if (ret < 0)
goto finish;
return 0;
finish:
if (pkt)
av_packet_unref(pkt);
ost->finished |= MUXER_FINISHED;
tq_send_finish(mux->tq, ost->index);
return ret == AVERROR_EOF ? 0 : ret;
}
static int queue_packet(Muxer *mux, OutputStream *ost, AVPacket *pkt)
{
MuxStream *ms = ms_from_ost(ost);
AVPacket *tmp_pkt = NULL;
int ret;
if (!av_fifo_can_write(ms->muxing_queue)) {
size_t cur_size = av_fifo_can_read(ms->muxing_queue);
size_t pkt_size = pkt ? pkt->size : 0;
unsigned int are_we_over_size =
(ms->muxing_queue_data_size + pkt_size) > ms->muxing_queue_data_threshold;
size_t limit = are_we_over_size ? ms->max_muxing_queue_size : SIZE_MAX;
size_t new_size = FFMIN(2 * cur_size, limit);
if (new_size <= cur_size) {
av_log(ost, AV_LOG_ERROR,
"Too many packets buffered for output stream %d:%d.\n",
ost->file_index, ost->st->index);
return AVERROR(ENOSPC);
}
ret = av_fifo_grow2(ms->muxing_queue, new_size - cur_size);
if (ret < 0)
return ret;
}
if (pkt) {
ret = av_packet_make_refcounted(pkt);
if (ret < 0)
return ret;
tmp_pkt = av_packet_alloc();
if (!tmp_pkt)
return AVERROR(ENOMEM);
av_packet_move_ref(tmp_pkt, pkt);
ms->muxing_queue_data_size += tmp_pkt->size;
}
av_fifo_write(ms->muxing_queue, &tmp_pkt, 1);
return 0;
}
static int submit_packet(Muxer *mux, AVPacket *pkt, OutputStream *ost)
{
int ret;
if (mux->tq) {
return thread_submit_packet(mux, ost, pkt);
} else {
/* the muxer is not initialized yet, buffer the packet */
ret = queue_packet(mux, ost, pkt);
if (ret < 0) {
if (pkt)
av_packet_unref(pkt);
return ret;
}
} }
return 0;
}
void of_output_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, int eof)
{
Muxer *mux = mux_from_of(of);
MuxStream *ms = ms_from_ost(ost);
const char *err_msg;
int ret = 0;
if (!eof && pkt->dts != AV_NOPTS_VALUE)
ost->last_mux_dts = av_rescale_q(pkt->dts, pkt->time_base, AV_TIME_BASE_Q);
/* apply the output bitstream filters */
if (ms->bsf_ctx) {
int bsf_eof = 0;
ret = av_bsf_send_packet(ms->bsf_ctx, eof ? NULL : pkt);
if (ret < 0) {
err_msg = "submitting a packet for bitstream filtering";
goto fail;
}
while (!bsf_eof) {
ret = av_bsf_receive_packet(ms->bsf_ctx, pkt);
if (ret == AVERROR(EAGAIN))
return;
else if (ret == AVERROR_EOF)
bsf_eof = 1;
else if (ret < 0) {
err_msg = "applying bitstream filters to a packet";
goto fail;
}
ret = submit_packet(mux, bsf_eof ? NULL : pkt, ost);
if (ret < 0)
goto mux_fail;
}
} else {
ret = submit_packet(mux, eof ? NULL : pkt, ost);
if (ret < 0)
goto mux_fail;
}
return;
mux_fail:
err_msg = "submitting a packet to the muxer";
fail:
av_log(ost, AV_LOG_ERROR, "Error %s\n", err_msg);
if (exit_on_error)
exit_program(1);
}
static int thread_stop(Muxer *mux)
{
void *ret;
if (!mux || !mux->tq)
return 0;
for (unsigned int i = 0; i < mux->fc->nb_streams; i++)
tq_send_finish(mux->tq, i);
pthread_join(mux->thread, &ret);
tq_free(&mux->tq);
return (int)(intptr_t)ret;
}
static void pkt_move(void *dst, void *src)
{
av_packet_move_ref(dst, src);
}
static int thread_start(Muxer *mux)
{
AVFormatContext *fc = mux->fc;
ObjPool *op;
int ret;
op = objpool_alloc_packets();
if (!op)
return AVERROR(ENOMEM);
mux->tq = tq_alloc(fc->nb_streams, mux->thread_queue_size, op, pkt_move);
if (!mux->tq) {
objpool_free(&op);
return AVERROR(ENOMEM);
}
ret = pthread_create(&mux->thread, NULL, muxer_thread, (void*)mux);
if (ret) {
tq_free(&mux->tq);
return AVERROR(ret);
}
/* flush the muxing queues */
for (int i = 0; i < fc->nb_streams; i++) {
OutputStream *ost = mux->of.streams[i];
MuxStream *ms = ms_from_ost(ost);
AVPacket *pkt;
/* try to improve muxing time_base (only possible if nothing has been written yet) */
if (!av_fifo_can_read(ms->muxing_queue))
ost->mux_timebase = ost->st->time_base;
while (av_fifo_read(ms->muxing_queue, &pkt, 1) >= 0) {
ret = thread_submit_packet(mux, ost, pkt);
if (pkt) {
ms->muxing_queue_data_size -= pkt->size;
av_packet_free(&pkt);
}
if (ret < 0)
return ret;
}
}
return 0;
} }
static int print_sdp(void) static int print_sdp(void)
@ -196,16 +486,16 @@ static int print_sdp(void)
AVFormatContext **avc; AVFormatContext **avc;
for (i = 0; i < nb_output_files; i++) { for (i = 0; i < nb_output_files; i++) {
if (!output_files[i]->header_written) if (!mux_from_of(output_files[i])->header_written)
return 0; return 0;
} }
avc = av_malloc_array(nb_output_files, sizeof(*avc)); avc = av_malloc_array(nb_output_files, sizeof(*avc));
if (!avc) if (!avc)
exit_program(1); return AVERROR(ENOMEM);
for (i = 0, j = 0; i < nb_output_files; i++) { for (i = 0, j = 0; i < nb_output_files; i++) {
if (!strcmp(output_files[i]->ctx->oformat->name, "rtp")) { if (!strcmp(output_files[i]->format->name, "rtp")) {
avc[j] = output_files[i]->ctx; avc[j] = mux_from_of(output_files[i])->fc;
j++; j++;
} }
} }
@ -221,7 +511,7 @@ static int print_sdp(void)
goto fail; goto fail;
if (!sdp_filename) { if (!sdp_filename) {
printf("SDP:\n%s\n", sdp); av_log(NULL, AV_LOG_ERROR, "SDP:\n%s\n", sdp);
fflush(stdout); fflush(stdout);
} else { } else {
ret = avio_open2(&sdp_pb, sdp_filename, AVIO_FLAG_WRITE, &int_cb, NULL); ret = avio_open2(&sdp_pb, sdp_filename, AVIO_FLAG_WRITE, &int_cb, NULL);
@ -235,34 +525,36 @@ static int print_sdp(void)
av_freep(&sdp_filename); av_freep(&sdp_filename);
} }
// SDP successfully written, allow muxer threads to start
ret = 1;
fail: fail:
av_freep(&avc); av_freep(&avc);
return ret; return ret;
} }
/* open the muxer when all the streams are initialized */ int mux_check_init(Muxer *mux)
int of_check_init(OutputFile *of)
{ {
OutputFile *of = &mux->of;
AVFormatContext *fc = mux->fc;
int ret, i; int ret, i;
for (i = 0; i < of->ctx->nb_streams; i++) { for (i = 0; i < fc->nb_streams; i++) {
OutputStream *ost = output_streams[of->ost_index + i]; OutputStream *ost = of->streams[i];
if (!ost->initialized) if (!ost->initialized)
return 0; return 0;
} }
ret = avformat_write_header(of->ctx, &of->opts); ret = avformat_write_header(fc, &mux->opts);
if (ret < 0) { if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, av_log(mux, AV_LOG_ERROR, "Could not write header (incorrect codec "
"Could not write header for output file #%d " "parameters ?): %s\n", av_err2str(ret));
"(incorrect codec parameters ?): %s\n",
of->index, av_err2str(ret));
return ret; return ret;
} }
//assert_avoptions(of->opts); //assert_avoptions(of->opts);
of->header_written = 1; mux->header_written = 1;
av_dump_format(of->ctx, of->index, of->ctx->url, 1); av_dump_format(fc, of->index, fc->url, 1);
nb_output_dumped++; nb_output_dumped++;
if (sdp_filename || want_sdp) { if (sdp_filename || want_sdp) {
@ -270,62 +562,220 @@ int of_check_init(OutputFile *of)
if (ret < 0) { if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error writing the SDP.\n"); av_log(NULL, AV_LOG_ERROR, "Error writing the SDP.\n");
return ret; return ret;
} else if (ret == 1) {
/* SDP is written only after all the muxers are ready, so now we
* start ALL the threads */
for (i = 0; i < nb_output_files; i++) {
ret = thread_start(mux_from_of(output_files[i]));
if (ret < 0)
return ret;
}
} }
} else {
ret = thread_start(mux_from_of(of));
if (ret < 0)
return ret;
} }
/* flush the muxing queues */ return 0;
for (i = 0; i < of->ctx->nb_streams; i++) { }
OutputStream *ost = output_streams[of->ost_index + i];
AVPacket *pkt;
/* try to improve muxing time_base (only possible if nothing has been written yet) */ static int bsf_init(MuxStream *ms)
if (!av_fifo_can_read(ost->muxing_queue)) {
ost->mux_timebase = ost->st->time_base; OutputStream *ost = &ms->ost;
AVBSFContext *ctx = ms->bsf_ctx;
int ret;
while (av_fifo_read(ost->muxing_queue, &pkt, 1) >= 0) { if (!ctx)
ost->muxing_queue_data_size -= pkt->size; return 0;
of_write_packet(of, pkt, ost, 1);
av_packet_free(&pkt); ret = avcodec_parameters_copy(ctx->par_in, ost->st->codecpar);
} if (ret < 0)
return ret;
ctx->time_base_in = ost->st->time_base;
ret = av_bsf_init(ctx);
if (ret < 0) {
av_log(ms, AV_LOG_ERROR, "Error initializing bitstream filter: %s\n",
ctx->filter->name);
return ret;
} }
ret = avcodec_parameters_copy(ost->st->codecpar, ctx->par_out);
if (ret < 0)
return ret;
ost->st->time_base = ctx->time_base_out;
return 0; return 0;
} }
int of_stream_init(OutputFile *of, OutputStream *ost)
{
Muxer *mux = mux_from_of(of);
MuxStream *ms = ms_from_ost(ost);
int ret;
if (ost->sq_idx_mux >= 0)
sq_set_tb(mux->sq_mux, ost->sq_idx_mux, ost->mux_timebase);
/* initialize bitstream filters for the output stream
* needs to be done here, because the codec id for streamcopy is not
* known until now */
ret = bsf_init(ms);
if (ret < 0)
return ret;
ost->initialized = 1;
return mux_check_init(mux);
}
int of_write_trailer(OutputFile *of) int of_write_trailer(OutputFile *of)
{ {
Muxer *mux = mux_from_of(of);
AVFormatContext *fc = mux->fc;
int ret; int ret;
if (!of->header_written) { if (!mux->tq) {
av_log(NULL, AV_LOG_ERROR, av_log(mux, AV_LOG_ERROR,
"Nothing was written into output file %d (%s), because " "Nothing was written into output file, because "
"at least one of its streams received no packets.\n", "at least one of its streams received no packets.\n");
of->index, of->ctx->url);
return AVERROR(EINVAL); return AVERROR(EINVAL);
} }
ret = av_write_trailer(of->ctx); ret = thread_stop(mux);
if (ret < 0)
main_ffmpeg_return_code = ret;
ret = av_write_trailer(fc);
if (ret < 0) { if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error writing trailer of %s: %s\n", of->ctx->url, av_err2str(ret)); av_log(mux, AV_LOG_ERROR, "Error writing trailer: %s\n", av_err2str(ret));
return ret; return ret;
} }
mux->last_filesize = filesize(fc->pb);
if (!(of->format->flags & AVFMT_NOFILE)) {
ret = avio_closep(&fc->pb);
if (ret < 0) {
av_log(mux, AV_LOG_ERROR, "Error closing file: %s\n", av_err2str(ret));
return ret;
}
}
return 0; return 0;
} }
static void ost_free(OutputStream **post)
{
OutputStream *ost = *post;
MuxStream *ms;
if (!ost)
return;
ms = ms_from_ost(ost);
if (ost->logfile) {
if (fclose(ost->logfile))
av_log(ms, AV_LOG_ERROR,
"Error closing logfile, loss of information possible: %s\n",
av_err2str(AVERROR(errno)));
ost->logfile = NULL;
}
if (ms->muxing_queue) {
AVPacket *pkt;
while (av_fifo_read(ms->muxing_queue, &pkt, 1) >= 0)
av_packet_free(&pkt);
av_fifo_freep2(&ms->muxing_queue);
}
av_bsf_free(&ms->bsf_ctx);
av_frame_free(&ost->filtered_frame);
av_frame_free(&ost->sq_frame);
av_frame_free(&ost->last_frame);
av_packet_free(&ost->pkt);
av_dict_free(&ost->encoder_opts);
av_freep(&ost->kf.pts);
av_expr_free(ost->kf.pexpr);
av_freep(&ost->avfilter);
av_freep(&ost->logfile_prefix);
av_freep(&ost->apad);
#if FFMPEG_OPT_MAP_CHANNEL
av_freep(&ost->audio_channels_map);
ost->audio_channels_mapped = 0;
#endif
av_dict_free(&ost->sws_dict);
av_dict_free(&ost->swr_opts);
if (ost->enc_ctx)
av_freep(&ost->enc_ctx->stats_in);
avcodec_free_context(&ost->enc_ctx);
for (int i = 0; i < ost->enc_stats_pre.nb_components; i++)
av_freep(&ost->enc_stats_pre.components[i].str);
av_freep(&ost->enc_stats_pre.components);
for (int i = 0; i < ost->enc_stats_post.nb_components; i++)
av_freep(&ost->enc_stats_post.components[i].str);
av_freep(&ost->enc_stats_post.components);
for (int i = 0; i < ms->stats.nb_components; i++)
av_freep(&ms->stats.components[i].str);
av_freep(&ms->stats.components);
av_freep(post);
}
static void fc_close(AVFormatContext **pfc)
{
AVFormatContext *fc = *pfc;
if (!fc)
return;
if (!(fc->oformat->flags & AVFMT_NOFILE))
avio_closep(&fc->pb);
avformat_free_context(fc);
*pfc = NULL;
}
void of_close(OutputFile **pof) void of_close(OutputFile **pof)
{ {
OutputFile *of = *pof; OutputFile *of = *pof;
AVFormatContext *s; Muxer *mux;
if (!of) if (!of)
return; return;
mux = mux_from_of(of);
s = of->ctx; thread_stop(mux);
if (s && s->oformat && !(s->oformat->flags & AVFMT_NOFILE))
avio_closep(&s->pb); sq_free(&of->sq_encode);
avformat_free_context(s); sq_free(&mux->sq_mux);
av_dict_free(&of->opts);
for (int i = 0; i < of->nb_streams; i++)
ost_free(&of->streams[i]);
av_freep(&of->streams);
av_dict_free(&mux->opts);
av_packet_free(&mux->sq_pkt);
fc_close(&mux->fc);
av_freep(pof); av_freep(pof);
} }
int64_t of_filesize(OutputFile *of)
{
Muxer *mux = mux_from_of(of);
return atomic_load(&mux->last_filesize);
}

@ -0,0 +1,165 @@
/*
* Muxer internal APIs - should not be included outside of ffmpeg_mux*
* Copyright (c) 2023 ARTHENICA LTD
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of ffmpeg_mux.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
* - want_sdp made thread-local
* - EncStatsFile declaration migrated from ffmpeg_mux_init.c
* - WARN_MULTIPLE_OPT_USAGE, MATCH_PER_STREAM_OPT, MATCH_PER_TYPE_OPT, SPECIFIER_OPT_FMT declarations migrated from
* ffmpeg.h
* - ms_from_ost migrated to ffmpeg_mux.c
*/
#ifndef FFTOOLS_FFMPEG_MUX_H
#define FFTOOLS_FFMPEG_MUX_H
#include <stdatomic.h>
#include <stdint.h>
#include "fftools_thread_queue.h"
#include "libavformat/avformat.h"
#include "libavcodec/packet.h"
#include "libavutil/dict.h"
#include "libavutil/fifo.h"
#include "libavutil/thread.h"
#define SPECIFIER_OPT_FMT_str "%s"
#define SPECIFIER_OPT_FMT_i "%i"
#define SPECIFIER_OPT_FMT_i64 "%"PRId64
#define SPECIFIER_OPT_FMT_ui64 "%"PRIu64
#define SPECIFIER_OPT_FMT_f "%f"
#define SPECIFIER_OPT_FMT_dbl "%lf"
#define WARN_MULTIPLE_OPT_USAGE(name, type, so, st)\
{\
char namestr[128] = "";\
const char *spec = so->specifier && so->specifier[0] ? so->specifier : "";\
for (int _i = 0; opt_name_##name[_i]; _i++)\
av_strlcatf(namestr, sizeof(namestr), "-%s%s", opt_name_##name[_i], opt_name_##name[_i+1] ? (opt_name_##name[_i+2] ? ", " : " or ") : "");\
av_log(NULL, AV_LOG_WARNING, "Multiple %s options specified for stream %d, only the last option '-%s%s%s "SPECIFIER_OPT_FMT_##type"' will be used.\n",\
namestr, st->index, opt_name_##name[0], spec[0] ? ":" : "", spec, so->u.type);\
}
#define MATCH_PER_STREAM_OPT(name, type, outvar, fmtctx, st)\
{\
int _ret, _matches = 0;\
SpecifierOpt *so;\
for (int _i = 0; _i < o->nb_ ## name; _i++) {\
char *spec = o->name[_i].specifier;\
if ((_ret = check_stream_specifier(fmtctx, st, spec)) > 0) {\
outvar = o->name[_i].u.type;\
so = &o->name[_i];\
_matches++;\
} else if (_ret < 0)\
exit_program(1);\
}\
if (_matches > 1)\
WARN_MULTIPLE_OPT_USAGE(name, type, so, st);\
}
#define MATCH_PER_TYPE_OPT(name, type, outvar, fmtctx, mediatype)\
{\
int i;\
for (i = 0; i < o->nb_ ## name; i++) {\
char *spec = o->name[i].specifier;\
if (!strcmp(spec, mediatype))\
outvar = o->name[i].u.type;\
}\
}
typedef struct MuxStream {
OutputStream ost;
// name used for logging
char log_name[32];
/* the packets are buffered here until the muxer is ready to be initialized */
AVFifo *muxing_queue;
AVBSFContext *bsf_ctx;
EncStats stats;
int64_t max_frames;
/*
* The size of the AVPackets' buffers in queue.
* Updated when a packet is either pushed or pulled from the queue.
*/
size_t muxing_queue_data_size;
int max_muxing_queue_size;
/* Threshold after which max_muxing_queue_size will be in effect */
size_t muxing_queue_data_threshold;
/* dts of the last packet sent to the muxer, in the stream timebase
* used for making up missing dts values */
int64_t last_mux_dts;
} MuxStream;
typedef struct Muxer {
OutputFile of;
// name used for logging
char log_name[32];
AVFormatContext *fc;
pthread_t thread;
ThreadQueue *tq;
AVDictionary *opts;
int thread_queue_size;
/* filesize limit expressed in bytes */
int64_t limit_filesize;
atomic_int_least64_t last_filesize;
int header_written;
SyncQueue *sq_mux;
AVPacket *sq_pkt;
} Muxer;
typedef struct EncStatsFile {
char *path;
AVIOContext *io;
} EncStatsFile;
/* whether we want to print an SDP, set in of_open() */
extern __thread int want_sdp;
int mux_check_init(Muxer *mux);
#endif /* FFTOOLS_FFMPEG_MUX_H */

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -1,6 +1,7 @@
/* /*
* Copyright (c) 2007-2010 Stefano Sabatini * Copyright (c) 2007-2010 Stefano Sabatini
* Copyright (c) 2020 Taner Sener * Copyright (c) 2020-2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -29,6 +30,13 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop mobile-ffmpeg and later ffmpeg-kit libraries. * by us to develop mobile-ffmpeg and later ffmpeg-kit libraries.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*
* mobile-ffmpeg / ffmpeg-kit changes by Taner Sener * mobile-ffmpeg / ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -52,9 +60,11 @@
#include "libavutil/ffversion.h" #include "libavutil/ffversion.h"
#include <string.h> #include <string.h>
#include <math.h>
#include "libavformat/avformat.h" #include "libavformat/avformat.h"
#include "libavcodec/avcodec.h" #include "libavcodec/avcodec.h"
#include "libavutil/ambient_viewing_environment.h"
#include "libavutil/avassert.h" #include "libavutil/avassert.h"
#include "libavutil/avstring.h" #include "libavutil/avstring.h"
#include "libavutil/bprint.h" #include "libavutil/bprint.h"
@ -163,6 +173,8 @@ typedef struct ReadInterval {
__thread ReadInterval *read_intervals; __thread ReadInterval *read_intervals;
__thread int read_intervals_nb = 0; __thread int read_intervals_nb = 0;
__thread int find_stream_info = 1;
/* section structure definition */ /* section structure definition */
#define SECTION_MAX_NB_CHILDREN 10 #define SECTION_MAX_NB_CHILDREN 10
@ -626,6 +638,7 @@ static inline void writer_put_str_printf(WriterContext *wctx, const char *str)
static inline void writer_printf_printf(WriterContext *wctx, const char *fmt, ...) static inline void writer_printf_printf(WriterContext *wctx, const char *fmt, ...)
{ {
va_list ap; va_list ap;
va_start(ap, fmt); va_start(ap, fmt);
av_vlog(NULL, AV_LOG_STDERR, fmt, ap); av_vlog(NULL, AV_LOG_STDERR, fmt, ap);
va_end(ap); va_end(ap);
@ -671,7 +684,7 @@ static int writer_open(WriterContext **wctx, const Writer *writer, const char *a
goto fail; goto fail;
} }
while ((opt = av_dict_get(opts, "", opt, AV_DICT_IGNORE_SUFFIX))) { while ((opt = av_dict_iterate(opts, opt))) {
if ((ret = av_opt_set(*wctx, opt->key, opt->value, AV_OPT_SEARCH_CHILDREN)) < 0) { if ((ret = av_opt_set(*wctx, opt->key, opt->value, AV_OPT_SEARCH_CHILDREN)) < 0) {
av_log(*wctx, AV_LOG_ERROR, "Failed to set option '%s' with value '%s' provided to writer context\n", av_log(*wctx, AV_LOG_ERROR, "Failed to set option '%s' with value '%s' provided to writer context\n",
opt->key, opt->value); opt->key, opt->value);
@ -1907,13 +1920,15 @@ static void writer_register_all(void)
writer_print_string(w, k, pbuf.str, 0); \ writer_print_string(w, k, pbuf.str, 0); \
} while (0) } while (0)
#define print_list_fmt(k, f, n, ...) do { \ #define print_list_fmt(k, f, n, m, ...) do { \
av_bprint_clear(&pbuf); \ av_bprint_clear(&pbuf); \
for (int idx = 0; idx < n; idx++) { \ for (int idx = 0; idx < n; idx++) { \
if (idx > 0) \ for (int idx2 = 0; idx2 < m; idx2++) { \
if (idx > 0 || idx2 > 0) \
av_bprint_chars(&pbuf, ' ', 1); \ av_bprint_chars(&pbuf, ' ', 1); \
av_bprintf(&pbuf, f, __VA_ARGS__); \ av_bprintf(&pbuf, f, __VA_ARGS__); \
} \ } \
} \
writer_print_string(w, k, pbuf.str, 0); \ writer_print_string(w, k, pbuf.str, 0); \
} while (0) } while (0)
@ -1953,7 +1968,7 @@ static inline int show_tags(WriterContext *w, AVDictionary *tags, int section_id
return 0; return 0;
writer_print_section_header(w, section_id); writer_print_section_header(w, section_id);
while ((tag = av_dict_get(tags, "", tag, AV_DICT_IGNORE_SUFFIX))) { while ((tag = av_dict_iterate(tags, tag))) {
if ((ret = print_str_validate(tag->key, tag->value)) < 0) if ((ret = print_str_validate(tag->key, tag->value)) < 0)
break; break;
} }
@ -2023,7 +2038,7 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
const AVDOVIReshapingCurve *curve = &mapping->curves[c]; const AVDOVIReshapingCurve *curve = &mapping->curves[c];
writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_COMPONENT); writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_COMPONENT);
print_list_fmt("pivots", "%"PRIu16, curve->num_pivots, curve->pivots[idx]); print_list_fmt("pivots", "%"PRIu16, curve->num_pivots, 1, curve->pivots[idx]);
writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_PIECE_LIST); writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_PIECE_LIST);
for (int i = 0; i < curve->num_pivots - 1; i++) { for (int i = 0; i < curve->num_pivots - 1; i++) {
@ -2035,7 +2050,7 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
print_str("mapping_idc_name", "polynomial"); print_str("mapping_idc_name", "polynomial");
print_int("poly_order", curve->poly_order[i]); print_int("poly_order", curve->poly_order[i]);
print_list_fmt("poly_coef", "%"PRIi64, print_list_fmt("poly_coef", "%"PRIi64,
curve->poly_order[i] + 1, curve->poly_order[i] + 1, 1,
curve->poly_coef[i][idx]); curve->poly_coef[i][idx]);
break; break;
case AV_DOVI_MAPPING_MMR: case AV_DOVI_MAPPING_MMR:
@ -2043,8 +2058,8 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
print_int("mmr_order", curve->mmr_order[i]); print_int("mmr_order", curve->mmr_order[i]);
print_int("mmr_constant", curve->mmr_constant[i]); print_int("mmr_constant", curve->mmr_constant[i]);
print_list_fmt("mmr_coef", "%"PRIi64, print_list_fmt("mmr_coef", "%"PRIi64,
curve->mmr_order[i] * 7, curve->mmr_order[i], 7,
curve->mmr_coef[i][0][idx]); curve->mmr_coef[i][idx][idx2]);
break; break;
default: default:
print_str("mapping_idc_name", "unknown"); print_str("mapping_idc_name", "unknown");
@ -2082,15 +2097,15 @@ static void print_dovi_metadata(WriterContext *w, const AVDOVIMetadata *dovi)
print_int("dm_metadata_id", color->dm_metadata_id); print_int("dm_metadata_id", color->dm_metadata_id);
print_int("scene_refresh_flag", color->scene_refresh_flag); print_int("scene_refresh_flag", color->scene_refresh_flag);
print_list_fmt("ycc_to_rgb_matrix", "%d/%d", print_list_fmt("ycc_to_rgb_matrix", "%d/%d",
FF_ARRAY_ELEMS(color->ycc_to_rgb_matrix), FF_ARRAY_ELEMS(color->ycc_to_rgb_matrix), 1,
color->ycc_to_rgb_matrix[idx].num, color->ycc_to_rgb_matrix[idx].num,
color->ycc_to_rgb_matrix[idx].den); color->ycc_to_rgb_matrix[idx].den);
print_list_fmt("ycc_to_rgb_offset", "%d/%d", print_list_fmt("ycc_to_rgb_offset", "%d/%d",
FF_ARRAY_ELEMS(color->ycc_to_rgb_offset), FF_ARRAY_ELEMS(color->ycc_to_rgb_offset), 1,
color->ycc_to_rgb_offset[idx].num, color->ycc_to_rgb_offset[idx].num,
color->ycc_to_rgb_offset[idx].den); color->ycc_to_rgb_offset[idx].den);
print_list_fmt("rgb_to_lms_matrix", "%d/%d", print_list_fmt("rgb_to_lms_matrix", "%d/%d",
FF_ARRAY_ELEMS(color->rgb_to_lms_matrix), FF_ARRAY_ELEMS(color->rgb_to_lms_matrix), 1,
color->rgb_to_lms_matrix[idx].num, color->rgb_to_lms_matrix[idx].num,
color->rgb_to_lms_matrix[idx].den); color->rgb_to_lms_matrix[idx].den);
print_int("signal_eotf", color->signal_eotf); print_int("signal_eotf", color->signal_eotf);
@ -2276,6 +2291,17 @@ static void print_dynamic_hdr_vivid(WriterContext *w, const AVDynamicHDRVivid *m
} }
} }
static void print_ambient_viewing_environment(WriterContext *w,
const AVAmbientViewingEnvironment *env)
{
if (!env)
return;
print_q("ambient_illuminance", env->ambient_illuminance, '/');
print_q("ambient_light_x", env->ambient_light_x, '/');
print_q("ambient_light_y", env->ambient_light_y, '/');
}
static void print_pkt_side_data(WriterContext *w, static void print_pkt_side_data(WriterContext *w,
AVCodecParameters *par, AVCodecParameters *par,
const AVPacketSideData *side_data, const AVPacketSideData *side_data,
@ -2293,8 +2319,11 @@ static void print_pkt_side_data(WriterContext *w,
writer_print_section_header(w, id_data); writer_print_section_header(w, id_data);
print_str("side_data_type", name ? name : "unknown"); print_str("side_data_type", name ? name : "unknown");
if (sd->type == AV_PKT_DATA_DISPLAYMATRIX && sd->size >= 9*4) { if (sd->type == AV_PKT_DATA_DISPLAYMATRIX && sd->size >= 9*4) {
double rotation = av_display_rotation_get((int32_t *)sd->data);
if (isnan(rotation))
rotation = 0;
writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1); writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1);
print_int("rotation", av_display_rotation_get((int32_t *)sd->data)); print_int("rotation", rotation);
} else if (sd->type == AV_PKT_DATA_STEREO3D) { } else if (sd->type == AV_PKT_DATA_STEREO3D) {
const AVStereo3D *stereo = (AVStereo3D *)sd->data; const AVStereo3D *stereo = (AVStereo3D *)sd->data;
print_str("type", av_stereo3d_type_name(stereo->type)); print_str("type", av_stereo3d_type_name(stereo->type));
@ -2506,8 +2535,12 @@ static void show_packet(WriterContext *w, InputFile *ifile, AVPacket *pkt, int p
print_val("size", pkt->size, unit_byte_str); print_val("size", pkt->size, unit_byte_str);
if (pkt->pos != -1) print_fmt ("pos", "%"PRId64, pkt->pos); if (pkt->pos != -1) print_fmt ("pos", "%"PRId64, pkt->pos);
else print_str_opt("pos", "N/A"); else print_str_opt("pos", "N/A");
print_fmt("flags", "%c%c", pkt->flags & AV_PKT_FLAG_KEY ? 'K' : '_', print_fmt("flags", "%c%c%c", pkt->flags & AV_PKT_FLAG_KEY ? 'K' : '_',
pkt->flags & AV_PKT_FLAG_DISCARD ? 'D' : '_'); pkt->flags & AV_PKT_FLAG_DISCARD ? 'D' : '_',
pkt->flags & AV_PKT_FLAG_CORRUPT ? 'C' : '_');
if (do_show_data)
writer_print_data(w, "data", pkt->data, pkt->size);
writer_print_data_hash(w, "data_hash", pkt->data, pkt->size);
if (pkt->side_data_elems) { if (pkt->side_data_elems) {
size_t size; size_t size;
@ -2526,9 +2559,6 @@ static void show_packet(WriterContext *w, InputFile *ifile, AVPacket *pkt, int p
SECTION_ID_PACKET_SIDE_DATA); SECTION_ID_PACKET_SIDE_DATA);
} }
if (do_show_data)
writer_print_data(w, "data", pkt->data, pkt->size);
writer_print_data_hash(w, "data_hash", pkt->data, pkt->size);
writer_print_section_footer(w); writer_print_section_footer(w);
av_bprint_finalize(&pbuf, NULL); av_bprint_finalize(&pbuf, NULL);
@ -2581,8 +2611,14 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
print_time("pkt_dts_time", frame->pkt_dts, &stream->time_base); print_time("pkt_dts_time", frame->pkt_dts, &stream->time_base);
print_ts ("best_effort_timestamp", frame->best_effort_timestamp); print_ts ("best_effort_timestamp", frame->best_effort_timestamp);
print_time("best_effort_timestamp_time", frame->best_effort_timestamp, &stream->time_base); print_time("best_effort_timestamp_time", frame->best_effort_timestamp, &stream->time_base);
#if LIBAVUTIL_VERSION_MAJOR < 59
AV_NOWARN_DEPRECATED(
print_duration_ts ("pkt_duration", frame->pkt_duration); print_duration_ts ("pkt_duration", frame->pkt_duration);
print_duration_time("pkt_duration_time", frame->pkt_duration, &stream->time_base); print_duration_time("pkt_duration_time", frame->pkt_duration, &stream->time_base);
)
#endif
print_duration_ts ("duration", frame->duration);
print_duration_time("duration_time", frame->duration, &stream->time_base);
if (frame->pkt_pos != -1) print_fmt ("pkt_pos", "%"PRId64, frame->pkt_pos); if (frame->pkt_pos != -1) print_fmt ("pkt_pos", "%"PRId64, frame->pkt_pos);
else print_str_opt("pkt_pos", "N/A"); else print_str_opt("pkt_pos", "N/A");
if (frame->pkt_size != -1) print_val ("pkt_size", frame->pkt_size, unit_byte_str); if (frame->pkt_size != -1) print_val ("pkt_size", frame->pkt_size, unit_byte_str);
@ -2604,8 +2640,12 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
print_str_opt("sample_aspect_ratio", "N/A"); print_str_opt("sample_aspect_ratio", "N/A");
} }
print_fmt("pict_type", "%c", av_get_picture_type_char(frame->pict_type)); print_fmt("pict_type", "%c", av_get_picture_type_char(frame->pict_type));
#if LIBAVUTIL_VERSION_MAJOR < 59
AV_NOWARN_DEPRECATED(
print_int("coded_picture_number", frame->coded_picture_number); print_int("coded_picture_number", frame->coded_picture_number);
print_int("display_picture_number", frame->display_picture_number); print_int("display_picture_number", frame->display_picture_number);
)
#endif
print_int("interlaced_frame", frame->interlaced_frame); print_int("interlaced_frame", frame->interlaced_frame);
print_int("top_field_first", frame->top_field_first); print_int("top_field_first", frame->top_field_first);
print_int("repeat_pict", frame->repeat_pict); print_int("repeat_pict", frame->repeat_pict);
@ -2644,8 +2684,11 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
name = av_frame_side_data_name(sd->type); name = av_frame_side_data_name(sd->type);
print_str("side_data_type", name ? name : "unknown"); print_str("side_data_type", name ? name : "unknown");
if (sd->type == AV_FRAME_DATA_DISPLAYMATRIX && sd->size >= 9*4) { if (sd->type == AV_FRAME_DATA_DISPLAYMATRIX && sd->size >= 9*4) {
double rotation = av_display_rotation_get((int32_t *)sd->data);
if (isnan(rotation))
rotation = 0;
writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1); writer_print_integers(w, "displaymatrix", sd->data, 9, " %11d", 3, 4, 1);
print_int("rotation", av_display_rotation_get((int32_t *)sd->data)); print_int("rotation", rotation);
} else if (sd->type == AV_FRAME_DATA_AFD && sd->size > 0) { } else if (sd->type == AV_FRAME_DATA_AFD && sd->size > 0) {
print_int("active_format", *sd->data); print_int("active_format", *sd->data);
} else if (sd->type == AV_FRAME_DATA_GOP_TIMECODE && sd->size >= 8) { } else if (sd->type == AV_FRAME_DATA_GOP_TIMECODE && sd->size >= 8) {
@ -2700,6 +2743,9 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
} else if (sd->type == AV_FRAME_DATA_DYNAMIC_HDR_VIVID) { } else if (sd->type == AV_FRAME_DATA_DYNAMIC_HDR_VIVID) {
AVDynamicHDRVivid *metadata = (AVDynamicHDRVivid *)sd->data; AVDynamicHDRVivid *metadata = (AVDynamicHDRVivid *)sd->data;
print_dynamic_hdr_vivid(w, metadata); print_dynamic_hdr_vivid(w, metadata);
} else if (sd->type == AV_FRAME_DATA_AMBIENT_VIEWING_ENVIRONMENT) {
print_ambient_viewing_environment(
w, (const AVAmbientViewingEnvironment *)sd->data);
} }
writer_print_section_footer(w); writer_print_section_footer(w);
} }
@ -2714,7 +2760,7 @@ static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream,
static av_always_inline int process_frame(WriterContext *w, static av_always_inline int process_frame(WriterContext *w,
InputFile *ifile, InputFile *ifile,
AVFrame *frame, AVPacket *pkt, AVFrame *frame, const AVPacket *pkt,
int *packet_new) int *packet_new)
{ {
AVFormatContext *fmt_ctx = ifile->fmt_ctx; AVFormatContext *fmt_ctx = ifile->fmt_ctx;
@ -2858,9 +2904,10 @@ static int read_interval_packets(WriterContext *w, InputFile *ifile,
} }
if (selected_streams[pkt->stream_index]) { if (selected_streams[pkt->stream_index]) {
AVRational tb = ifile->streams[pkt->stream_index].st->time_base; AVRational tb = ifile->streams[pkt->stream_index].st->time_base;
int64_t pts = pkt->pts != AV_NOPTS_VALUE ? pkt->pts : pkt->dts;
if (pkt->pts != AV_NOPTS_VALUE) if (pts != AV_NOPTS_VALUE)
*cur_ts = av_rescale_q(pkt->pts, tb, AV_TIME_BASE_Q); *cur_ts = av_rescale_q(pts, tb, AV_TIME_BASE_Q);
if (!has_start && *cur_ts != AV_NOPTS_VALUE) { if (!has_start && *cur_ts != AV_NOPTS_VALUE) {
start = *cur_ts; start = *cur_ts;
@ -2894,7 +2941,7 @@ static int read_interval_packets(WriterContext *w, InputFile *ifile,
} }
av_packet_unref(pkt); av_packet_unref(pkt);
//Flush remaining frames that are cached in the decoder //Flush remaining frames that are cached in the decoder
for (i = 0; i < fmt_ctx->nb_streams; i++) { for (i = 0; i < ifile->nb_streams; i++) {
pkt->stream_index = i; pkt->stream_index = i;
if (do_read_frames) { if (do_read_frames) {
while (process_frame(w, ifile, frame, pkt, &(int){1}) > 0); while (process_frame(w, ifile, frame, pkt, &(int){1}) > 0);
@ -3052,6 +3099,8 @@ static int show_stream(WriterContext *w, AVFormatContext *fmt_ctx, int stream_id
} }
print_int("bits_per_sample", av_get_bits_per_sample(par->codec_id)); print_int("bits_per_sample", av_get_bits_per_sample(par->codec_id));
print_int("initial_padding", par->initial_padding);
break; break;
case AVMEDIA_TYPE_SUBTITLE: case AVMEDIA_TYPE_SUBTITLE:
@ -3278,15 +3327,9 @@ static int show_format(WriterContext *w, InputFile *ifile)
static void show_error(WriterContext *w, int err) static void show_error(WriterContext *w, int err)
{ {
char errbuf[128];
const char *errbuf_ptr = errbuf;
if (av_strerror(err, errbuf, sizeof(errbuf)) < 0)
errbuf_ptr = strerror(AVUNERROR(err));
writer_print_section_header(w, SECTION_ID_ERROR); writer_print_section_header(w, SECTION_ID_ERROR);
print_int("code", err); print_int("code", err);
print_str("string", errbuf_ptr); print_str("string", av_err2str(err));
writer_print_section_footer(w); writer_print_section_footer(w);
} }
@ -3299,10 +3342,8 @@ static int open_input_file(InputFile *ifile, const char *filename,
int scan_all_pmts_set = 0; int scan_all_pmts_set = 0;
fmt_ctx = avformat_alloc_context(); fmt_ctx = avformat_alloc_context();
if (!fmt_ctx) { if (!fmt_ctx)
print_error(filename, AVERROR(ENOMEM)); report_and_exit(AVERROR(ENOMEM));
exit_program(1);
}
if (!av_dict_get(format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) { if (!av_dict_get(format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) {
av_dict_set(&format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE); av_dict_set(&format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE);
@ -3320,7 +3361,7 @@ static int open_input_file(InputFile *ifile, const char *filename,
ifile->fmt_ctx = fmt_ctx; ifile->fmt_ctx = fmt_ctx;
if (scan_all_pmts_set) if (scan_all_pmts_set)
av_dict_set(&format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE); av_dict_set(&format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE);
while ((t = av_dict_get(format_opts, "", t, AV_DICT_IGNORE_SUFFIX))) while ((t = av_dict_iterate(format_opts, t)))
av_log(NULL, AV_LOG_WARNING, "Option %s skipped - not known to demuxer.\n", t->key); av_log(NULL, AV_LOG_WARNING, "Option %s skipped - not known to demuxer.\n", t->key);
if (find_stream_info) { if (find_stream_info) {
@ -3718,7 +3759,7 @@ static void opt_input_file(void *optctx, const char *arg)
exit_program(1); exit_program(1);
} }
if (!strcmp(arg, "-")) if (!strcmp(arg, "-"))
arg = "pipe:"; arg = "fd:";
input_filename = arg; input_filename = arg;
} }
@ -3737,7 +3778,7 @@ static void opt_output_file(void *optctx, const char *arg)
exit_program(1); exit_program(1);
} }
if (!strcmp(arg, "-")) if (!strcmp(arg, "-"))
arg = "pipe:"; arg = "fd:";
output_filename = arg; output_filename = arg;
} }

@ -0,0 +1,145 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of objpool.c file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*/
#include <stdint.h>
#include "libavcodec/packet.h"
#include "libavutil/common.h"
#include "libavutil/error.h"
#include "libavutil/frame.h"
#include "libavutil/mem.h"
#include "fftools_objpool.h"
struct ObjPool {
void *pool[32];
unsigned int pool_count;
ObjPoolCBAlloc alloc;
ObjPoolCBReset reset;
ObjPoolCBFree free;
};
ObjPool *objpool_alloc(ObjPoolCBAlloc cb_alloc, ObjPoolCBReset cb_reset,
ObjPoolCBFree cb_free)
{
ObjPool *op = av_mallocz(sizeof(*op));
if (!op)
return NULL;
op->alloc = cb_alloc;
op->reset = cb_reset;
op->free = cb_free;
return op;
}
void objpool_free(ObjPool **pop)
{
ObjPool *op = *pop;
if (!op)
return;
for (unsigned int i = 0; i < op->pool_count; i++)
op->free(&op->pool[i]);
av_freep(pop);
}
int objpool_get(ObjPool *op, void **obj)
{
if (op->pool_count) {
*obj = op->pool[--op->pool_count];
op->pool[op->pool_count] = NULL;
} else
*obj = op->alloc();
return *obj ? 0 : AVERROR(ENOMEM);
}
void objpool_release(ObjPool *op, void **obj)
{
if (!*obj)
return;
op->reset(*obj);
if (op->pool_count < FF_ARRAY_ELEMS(op->pool))
op->pool[op->pool_count++] = *obj;
else
op->free(obj);
*obj = NULL;
}
static void *alloc_packet(void)
{
return av_packet_alloc();
}
static void *alloc_frame(void)
{
return av_frame_alloc();
}
static void reset_packet(void *obj)
{
av_packet_unref(obj);
}
static void reset_frame(void *obj)
{
av_frame_unref(obj);
}
static void free_packet(void **obj)
{
AVPacket *pkt = *obj;
av_packet_free(&pkt);
*obj = NULL;
}
static void free_frame(void **obj)
{
AVFrame *frame = *obj;
av_frame_free(&frame);
*obj = NULL;
}
ObjPool *objpool_alloc_packets(void)
{
return objpool_alloc(alloc_packet, reset_packet, free_packet);
}
ObjPool *objpool_alloc_frames(void)
{
return objpool_alloc(alloc_frame, reset_frame, free_frame);
}

@ -0,0 +1,50 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of objpool.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*/
#ifndef FFTOOLS_OBJPOOL_H
#define FFTOOLS_OBJPOOL_H
typedef struct ObjPool ObjPool;
typedef void* (*ObjPoolCBAlloc)(void);
typedef void (*ObjPoolCBReset)(void *);
typedef void (*ObjPoolCBFree)(void **);
void objpool_free(ObjPool **op);
ObjPool *objpool_alloc(ObjPoolCBAlloc cb_alloc, ObjPoolCBReset cb_reset,
ObjPoolCBFree cb_free);
ObjPool *objpool_alloc_packets(void);
ObjPool *objpool_alloc_frames(void);
int objpool_get(ObjPool *op, void **obj);
void objpool_release(ObjPool *op, void **obj);
#endif // FFTOOLS_OBJPOOL_H

@ -1,6 +1,7 @@
/* /*
* Option handlers shared between the tools. * Option handlers shared between the tools.
* Copyright (c) 2022 Taner Sener * Copyright (c) 2022 Taner Sener
* Copyright (c) 2023 ARTHENICA LTD
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -24,6 +25,12 @@
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied * manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop the ffmpeg-kit library. * by us to develop the ffmpeg-kit library.
* *
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - time field in report_callback updated as double
*
* ffmpeg-kit changes by Taner Sener * ffmpeg-kit changes by Taner Sener
* *
* 09.2022 * 09.2022
@ -88,7 +95,7 @@ static __thread FILE *report_file = NULL;
static __thread int report_file_level = AV_LOG_DEBUG; static __thread int report_file_level = AV_LOG_DEBUG;
extern void ffmpegkit_log_callback_function(void *ptr, int level, const char* format, va_list vargs); extern void ffmpegkit_log_callback_function(void *ptr, int level, const char* format, va_list vargs);
extern void (*report_callback)(int, float, float, int64_t, int, double, double); extern void (*report_callback)(int, float, float, int64_t, double, double, double);
extern __thread char *program_name; extern __thread char *program_name;
int show_license(void *optctx, const char *opt, const char *arg) int show_license(void *optctx, const char *opt, const char *arg)

@ -0,0 +1,462 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of sync_queue.c file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*/
#include <stdint.h>
#include <string.h>
#include "libavutil/avassert.h"
#include "libavutil/error.h"
#include "libavutil/fifo.h"
#include "libavutil/mathematics.h"
#include "libavutil/mem.h"
#include "fftools_objpool.h"
#include "fftools_sync_queue.h"
typedef struct SyncQueueStream {
AVFifo *fifo;
AVRational tb;
/* stream head: largest timestamp seen */
int64_t head_ts;
int limiting;
/* no more frames will be sent for this stream */
int finished;
uint64_t frames_sent;
uint64_t frames_max;
} SyncQueueStream;
struct SyncQueue {
enum SyncQueueType type;
/* no more frames will be sent for any stream */
int finished;
/* sync head: the stream with the _smallest_ head timestamp
* this stream determines which frames can be output */
int head_stream;
/* the finished stream with the smallest finish timestamp or -1 */
int head_finished_stream;
// maximum buffering duration in microseconds
int64_t buf_size_us;
SyncQueueStream *streams;
unsigned int nb_streams;
// pool of preallocated frames to avoid constant allocations
ObjPool *pool;
};
static void frame_move(const SyncQueue *sq, SyncQueueFrame dst,
SyncQueueFrame src)
{
if (sq->type == SYNC_QUEUE_PACKETS)
av_packet_move_ref(dst.p, src.p);
else
av_frame_move_ref(dst.f, src.f);
}
static int64_t frame_ts(const SyncQueue *sq, SyncQueueFrame frame)
{
return (sq->type == SYNC_QUEUE_PACKETS) ?
frame.p->pts + frame.p->duration :
frame.f->pts + frame.f->duration;
}
static int frame_null(const SyncQueue *sq, SyncQueueFrame frame)
{
return (sq->type == SYNC_QUEUE_PACKETS) ? (frame.p == NULL) : (frame.f == NULL);
}
static void finish_stream(SyncQueue *sq, unsigned int stream_idx)
{
SyncQueueStream *st = &sq->streams[stream_idx];
st->finished = 1;
if (st->limiting && st->head_ts != AV_NOPTS_VALUE) {
/* check if this stream is the new finished head */
if (sq->head_finished_stream < 0 ||
av_compare_ts(st->head_ts, st->tb,
sq->streams[sq->head_finished_stream].head_ts,
sq->streams[sq->head_finished_stream].tb) < 0) {
sq->head_finished_stream = stream_idx;
}
/* mark as finished all streams that should no longer receive new frames,
* due to them being ahead of some finished stream */
st = &sq->streams[sq->head_finished_stream];
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st1 = &sq->streams[i];
if (st != st1 && st1->head_ts != AV_NOPTS_VALUE &&
av_compare_ts(st->head_ts, st->tb, st1->head_ts, st1->tb) <= 0)
st1->finished = 1;
}
}
/* mark the whole queue as finished if all streams are finished */
for (unsigned int i = 0; i < sq->nb_streams; i++) {
if (!sq->streams[i].finished)
return;
}
sq->finished = 1;
}
static void queue_head_update(SyncQueue *sq)
{
if (sq->head_stream < 0) {
/* wait for one timestamp in each stream before determining
* the queue head */
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st = &sq->streams[i];
if (st->limiting && st->head_ts == AV_NOPTS_VALUE)
return;
}
// placeholder value, correct one will be found below
sq->head_stream = 0;
}
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st_head = &sq->streams[sq->head_stream];
SyncQueueStream *st_other = &sq->streams[i];
if (st_other->limiting && st_other->head_ts != AV_NOPTS_VALUE &&
av_compare_ts(st_other->head_ts, st_other->tb,
st_head->head_ts, st_head->tb) < 0)
sq->head_stream = i;
}
}
/* update this stream's head timestamp */
static void stream_update_ts(SyncQueue *sq, unsigned int stream_idx, int64_t ts)
{
SyncQueueStream *st = &sq->streams[stream_idx];
if (ts == AV_NOPTS_VALUE ||
(st->head_ts != AV_NOPTS_VALUE && st->head_ts >= ts))
return;
st->head_ts = ts;
/* if this stream is now ahead of some finished stream, then
* this stream is also finished */
if (sq->head_finished_stream >= 0 &&
av_compare_ts(sq->streams[sq->head_finished_stream].head_ts,
sq->streams[sq->head_finished_stream].tb,
ts, st->tb) <= 0)
finish_stream(sq, stream_idx);
/* update the overall head timestamp if it could have changed */
if (st->limiting &&
(sq->head_stream < 0 || sq->head_stream == stream_idx))
queue_head_update(sq);
}
/* If the queue for the given stream (or all streams when stream_idx=-1)
* is overflowing, trigger a fake heartbeat on lagging streams.
*
* @return 1 if heartbeat triggered, 0 otherwise
*/
static int overflow_heartbeat(SyncQueue *sq, int stream_idx)
{
SyncQueueStream *st;
SyncQueueFrame frame;
int64_t tail_ts = AV_NOPTS_VALUE;
/* if no stream specified, pick the one that is most ahead */
if (stream_idx < 0) {
int64_t ts = AV_NOPTS_VALUE;
for (int i = 0; i < sq->nb_streams; i++) {
st = &sq->streams[i];
if (st->head_ts != AV_NOPTS_VALUE &&
(ts == AV_NOPTS_VALUE ||
av_compare_ts(ts, sq->streams[stream_idx].tb,
st->head_ts, st->tb) < 0)) {
ts = st->head_ts;
stream_idx = i;
}
}
/* no stream has a timestamp yet -> nothing to do */
if (stream_idx < 0)
return 0;
}
st = &sq->streams[stream_idx];
/* get the chosen stream's tail timestamp */
for (size_t i = 0; tail_ts == AV_NOPTS_VALUE &&
av_fifo_peek(st->fifo, &frame, 1, i) >= 0; i++)
tail_ts = frame_ts(sq, frame);
/* overflow triggers when the tail is over specified duration behind the head */
if (tail_ts == AV_NOPTS_VALUE || tail_ts >= st->head_ts ||
av_rescale_q(st->head_ts - tail_ts, st->tb, AV_TIME_BASE_Q) < sq->buf_size_us)
return 0;
/* signal a fake timestamp for all streams that prevent tail_ts from being output */
tail_ts++;
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueStream *st1 = &sq->streams[i];
int64_t ts;
if (st == st1 || st1->finished ||
(st1->head_ts != AV_NOPTS_VALUE &&
av_compare_ts(tail_ts, st->tb, st1->head_ts, st1->tb) <= 0))
continue;
ts = av_rescale_q(tail_ts, st->tb, st1->tb);
if (st1->head_ts != AV_NOPTS_VALUE)
ts = FFMAX(st1->head_ts + 1, ts);
stream_update_ts(sq, i, ts);
}
return 1;
}
int sq_send(SyncQueue *sq, unsigned int stream_idx, SyncQueueFrame frame)
{
SyncQueueStream *st;
SyncQueueFrame dst;
int64_t ts;
int ret;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
av_assert0(st->tb.num > 0 && st->tb.den > 0);
if (frame_null(sq, frame)) {
finish_stream(sq, stream_idx);
return 0;
}
if (st->finished)
return AVERROR_EOF;
ret = objpool_get(sq->pool, (void**)&dst);
if (ret < 0)
return ret;
frame_move(sq, dst, frame);
ts = frame_ts(sq, dst);
ret = av_fifo_write(st->fifo, &dst, 1);
if (ret < 0) {
frame_move(sq, frame, dst);
objpool_release(sq->pool, (void**)&dst);
return ret;
}
stream_update_ts(sq, stream_idx, ts);
st->frames_sent++;
if (st->frames_sent >= st->frames_max)
finish_stream(sq, stream_idx);
return 0;
}
static int receive_for_stream(SyncQueue *sq, unsigned int stream_idx,
SyncQueueFrame frame)
{
SyncQueueStream *st_head = sq->head_stream >= 0 ?
&sq->streams[sq->head_stream] : NULL;
SyncQueueStream *st;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
if (av_fifo_can_read(st->fifo)) {
SyncQueueFrame peek;
int64_t ts;
int cmp = 1;
av_fifo_peek(st->fifo, &peek, 1, 0);
ts = frame_ts(sq, peek);
/* check if this stream's tail timestamp does not overtake
* the overall queue head */
if (ts != AV_NOPTS_VALUE && st_head)
cmp = av_compare_ts(ts, st->tb, st_head->head_ts, st_head->tb);
/* We can release frames that do not end after the queue head.
* Frames with no timestamps are just passed through with no conditions.
*/
if (cmp <= 0 || ts == AV_NOPTS_VALUE) {
frame_move(sq, frame, peek);
objpool_release(sq->pool, (void**)&peek);
av_fifo_drain2(st->fifo, 1);
return 0;
}
}
return (sq->finished || (st->finished && !av_fifo_can_read(st->fifo))) ?
AVERROR_EOF : AVERROR(EAGAIN);
}
static int receive_internal(SyncQueue *sq, int stream_idx, SyncQueueFrame frame)
{
int nb_eof = 0;
int ret;
/* read a frame for a specific stream */
if (stream_idx >= 0) {
ret = receive_for_stream(sq, stream_idx, frame);
return (ret < 0) ? ret : stream_idx;
}
/* read a frame for any stream with available output */
for (unsigned int i = 0; i < sq->nb_streams; i++) {
ret = receive_for_stream(sq, i, frame);
if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) {
nb_eof += (ret == AVERROR_EOF);
continue;
}
return (ret < 0) ? ret : i;
}
return (nb_eof == sq->nb_streams) ? AVERROR_EOF : AVERROR(EAGAIN);
}
int sq_receive(SyncQueue *sq, int stream_idx, SyncQueueFrame frame)
{
int ret = receive_internal(sq, stream_idx, frame);
/* try again if the queue overflowed and triggered a fake heartbeat
* for lagging streams */
if (ret == AVERROR(EAGAIN) && overflow_heartbeat(sq, stream_idx))
ret = receive_internal(sq, stream_idx, frame);
return ret;
}
int sq_add_stream(SyncQueue *sq, int limiting)
{
SyncQueueStream *tmp, *st;
tmp = av_realloc_array(sq->streams, sq->nb_streams + 1, sizeof(*sq->streams));
if (!tmp)
return AVERROR(ENOMEM);
sq->streams = tmp;
st = &sq->streams[sq->nb_streams];
memset(st, 0, sizeof(*st));
st->fifo = av_fifo_alloc2(1, sizeof(SyncQueueFrame), AV_FIFO_FLAG_AUTO_GROW);
if (!st->fifo)
return AVERROR(ENOMEM);
/* we set a valid default, so that a pathological stream that never
* receives even a real timebase (and no frames) won't stall all other
* streams forever; cf. overflow_heartbeat() */
st->tb = (AVRational){ 1, 1 };
st->head_ts = AV_NOPTS_VALUE;
st->frames_max = UINT64_MAX;
st->limiting = limiting;
return sq->nb_streams++;
}
void sq_set_tb(SyncQueue *sq, unsigned int stream_idx, AVRational tb)
{
SyncQueueStream *st;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
av_assert0(!av_fifo_can_read(st->fifo));
if (st->head_ts != AV_NOPTS_VALUE)
st->head_ts = av_rescale_q(st->head_ts, st->tb, tb);
st->tb = tb;
}
void sq_limit_frames(SyncQueue *sq, unsigned int stream_idx, uint64_t frames)
{
SyncQueueStream *st;
av_assert0(stream_idx < sq->nb_streams);
st = &sq->streams[stream_idx];
st->frames_max = frames;
if (st->frames_sent >= st->frames_max)
finish_stream(sq, stream_idx);
}
SyncQueue *sq_alloc(enum SyncQueueType type, int64_t buf_size_us)
{
SyncQueue *sq = av_mallocz(sizeof(*sq));
if (!sq)
return NULL;
sq->type = type;
sq->buf_size_us = buf_size_us;
sq->head_stream = -1;
sq->head_finished_stream = -1;
sq->pool = (type == SYNC_QUEUE_PACKETS) ? objpool_alloc_packets() :
objpool_alloc_frames();
if (!sq->pool) {
av_freep(&sq);
return NULL;
}
return sq;
}
void sq_free(SyncQueue **psq)
{
SyncQueue *sq = *psq;
if (!sq)
return;
for (unsigned int i = 0; i < sq->nb_streams; i++) {
SyncQueueFrame frame;
while (av_fifo_read(sq->streams[i].fifo, &frame, 1) >= 0)
objpool_release(sq->pool, (void**)&frame);
av_fifo_freep2(&sq->streams[i].fifo);
}
av_freep(&sq->streams);
objpool_free(&sq->pool);
av_freep(psq);
}

@ -0,0 +1,122 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of sync_queue.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*/
#ifndef FFTOOLS_SYNC_QUEUE_H
#define FFTOOLS_SYNC_QUEUE_H
#include <stdint.h>
#include "libavcodec/packet.h"
#include "libavutil/frame.h"
enum SyncQueueType {
SYNC_QUEUE_PACKETS,
SYNC_QUEUE_FRAMES,
};
typedef union SyncQueueFrame {
AVFrame *f;
AVPacket *p;
} SyncQueueFrame;
#define SQFRAME(frame) ((SyncQueueFrame){ .f = (frame) })
#define SQPKT(pkt) ((SyncQueueFrame){ .p = (pkt) })
typedef struct SyncQueue SyncQueue;
/**
* Allocate a sync queue of the given type.
*
* @param buf_size_us maximum duration that will be buffered in microseconds
*/
SyncQueue *sq_alloc(enum SyncQueueType type, int64_t buf_size_us);
void sq_free(SyncQueue **sq);
/**
* Add a new stream to the sync queue.
*
* @param limiting whether the stream is limiting, i.e. no other stream can be
* longer than this one
* @return
* - a non-negative stream index on success
* - a negative error code on error
*/
int sq_add_stream(SyncQueue *sq, int limiting);
/**
* Set the timebase for the stream with index stream_idx. Should be called
* before sending any frames for this stream.
*/
void sq_set_tb(SyncQueue *sq, unsigned int stream_idx, AVRational tb);
/**
* Limit the number of output frames for stream with index stream_idx
* to max_frames.
*/
void sq_limit_frames(SyncQueue *sq, unsigned int stream_idx,
uint64_t max_frames);
/**
* Submit a frame for the stream with index stream_idx.
*
* On success, the sync queue takes ownership of the frame and will reset the
* contents of the supplied frame. On failure, the frame remains owned by the
* caller.
*
* Sending a frame with NULL contents marks the stream as finished.
*
* @return
* - 0 on success
* - AVERROR_EOF when no more frames should be submitted for this stream
* - another a negative error code on failure
*/
int sq_send(SyncQueue *sq, unsigned int stream_idx, SyncQueueFrame frame);
/**
* Read a frame from the queue.
*
* @param stream_idx index of the stream to read a frame for. May be -1, then
* try to read a frame from any stream that is ready for
* output.
* @param frame output frame will be written here on success. The frame is owned
* by the caller.
*
* @return
* - a non-negative index of the stream to which the returned frame belongs
* - AVERROR(EAGAIN) when more frames need to be submitted to the queue
* - AVERROR_EOF when no more frames will be available for this stream (for any
* stream if stream_idx is -1)
* - another negative error code on failure
*/
int sq_receive(SyncQueue *sq, int stream_idx, SyncQueueFrame frame);
#endif // FFTOOLS_SYNC_QUEUE_H

@ -0,0 +1,259 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of thread_queue.c file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
* - fftools header names updated
*/
#include <stdint.h>
#include <string.h>
#include "libavutil/avassert.h"
#include "libavutil/error.h"
#include "libavutil/fifo.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/mem.h"
#include "libavutil/thread.h"
#include "fftools_objpool.h"
#include "fftools_thread_queue.h"
enum {
FINISHED_SEND = (1 << 0),
FINISHED_RECV = (1 << 1),
};
typedef struct FifoElem {
void *obj;
unsigned int stream_idx;
} FifoElem;
struct ThreadQueue {
int *finished;
unsigned int nb_streams;
AVFifo *fifo;
ObjPool *obj_pool;
void (*obj_move)(void *dst, void *src);
pthread_mutex_t lock;
pthread_cond_t cond;
};
void tq_free(ThreadQueue **ptq)
{
ThreadQueue *tq = *ptq;
if (!tq)
return;
if (tq->fifo) {
FifoElem elem;
while (av_fifo_read(tq->fifo, &elem, 1) >= 0)
objpool_release(tq->obj_pool, &elem.obj);
}
av_fifo_freep2(&tq->fifo);
objpool_free(&tq->obj_pool);
av_freep(&tq->finished);
pthread_cond_destroy(&tq->cond);
pthread_mutex_destroy(&tq->lock);
av_freep(ptq);
}
ThreadQueue *tq_alloc(unsigned int nb_streams, size_t queue_size,
ObjPool *obj_pool, void (*obj_move)(void *dst, void *src))
{
ThreadQueue *tq;
int ret;
tq = av_mallocz(sizeof(*tq));
if (!tq)
return NULL;
ret = pthread_cond_init(&tq->cond, NULL);
if (ret) {
av_freep(&tq);
return NULL;
}
ret = pthread_mutex_init(&tq->lock, NULL);
if (ret) {
pthread_cond_destroy(&tq->cond);
av_freep(&tq);
return NULL;
}
tq->finished = av_calloc(nb_streams, sizeof(*tq->finished));
if (!tq->finished)
goto fail;
tq->nb_streams = nb_streams;
tq->fifo = av_fifo_alloc2(queue_size, sizeof(FifoElem), 0);
if (!tq->fifo)
goto fail;
tq->obj_pool = obj_pool;
tq->obj_move = obj_move;
return tq;
fail:
tq_free(&tq);
return NULL;
}
int tq_send(ThreadQueue *tq, unsigned int stream_idx, void *data)
{
int *finished;
int ret;
av_assert0(stream_idx < tq->nb_streams);
finished = &tq->finished[stream_idx];
pthread_mutex_lock(&tq->lock);
if (*finished & FINISHED_SEND) {
ret = AVERROR(EINVAL);
goto finish;
}
while (!(*finished & FINISHED_RECV) && !av_fifo_can_write(tq->fifo))
pthread_cond_wait(&tq->cond, &tq->lock);
if (*finished & FINISHED_RECV) {
ret = AVERROR_EOF;
*finished |= FINISHED_SEND;
} else {
FifoElem elem = { .stream_idx = stream_idx };
ret = objpool_get(tq->obj_pool, &elem.obj);
if (ret < 0)
goto finish;
tq->obj_move(elem.obj, data);
ret = av_fifo_write(tq->fifo, &elem, 1);
av_assert0(ret >= 0);
pthread_cond_broadcast(&tq->cond);
}
finish:
pthread_mutex_unlock(&tq->lock);
return ret;
}
static int receive_locked(ThreadQueue *tq, int *stream_idx,
void *data)
{
FifoElem elem;
unsigned int nb_finished = 0;
if (av_fifo_read(tq->fifo, &elem, 1) >= 0) {
tq->obj_move(data, elem.obj);
objpool_release(tq->obj_pool, &elem.obj);
*stream_idx = elem.stream_idx;
return 0;
}
for (unsigned int i = 0; i < tq->nb_streams; i++) {
if (!(tq->finished[i] & FINISHED_SEND))
continue;
/* return EOF to the consumer at most once for each stream */
if (!(tq->finished[i] & FINISHED_RECV)) {
tq->finished[i] |= FINISHED_RECV;
*stream_idx = i;
return AVERROR_EOF;
}
nb_finished++;
}
return nb_finished == tq->nb_streams ? AVERROR_EOF : AVERROR(EAGAIN);
}
int tq_receive(ThreadQueue *tq, int *stream_idx, void *data)
{
int ret;
*stream_idx = -1;
pthread_mutex_lock(&tq->lock);
while (1) {
ret = receive_locked(tq, stream_idx, data);
if (ret == AVERROR(EAGAIN)) {
pthread_cond_wait(&tq->cond, &tq->lock);
continue;
}
break;
}
if (ret == 0)
pthread_cond_broadcast(&tq->cond);
pthread_mutex_unlock(&tq->lock);
return ret;
}
void tq_send_finish(ThreadQueue *tq, unsigned int stream_idx)
{
av_assert0(stream_idx < tq->nb_streams);
pthread_mutex_lock(&tq->lock);
/* mark the stream as send-finished;
* next time the consumer thread tries to read this stream it will get
* an EOF and recv-finished flag will be set */
tq->finished[stream_idx] |= FINISHED_SEND;
pthread_cond_broadcast(&tq->cond);
pthread_mutex_unlock(&tq->lock);
}
void tq_receive_finish(ThreadQueue *tq, unsigned int stream_idx)
{
av_assert0(stream_idx < tq->nb_streams);
pthread_mutex_lock(&tq->lock);
/* mark the stream as recv-finished;
* next time the producer thread tries to send for this stream, it will
* get an EOF and send-finished flag will be set */
tq->finished[stream_idx] |= FINISHED_RECV;
pthread_cond_broadcast(&tq->cond);
pthread_mutex_unlock(&tq->lock);
}

@ -0,0 +1,94 @@
/*
* This file is part of FFmpeg.
* Copyright (c) 2023 ARTHENICA LTD
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* This file is the modified version of thread_queue.h file living in ffmpeg source code under the fftools folder. We
* manually update it each time we depend on a new ffmpeg version. Below you can see the list of changes applied
* by us to develop ffmpeg-kit library.
*
* ffmpeg-kit changes by ARTHENICA LTD
*
* 07.2023
* --------------------------------------------------------
* - FFmpeg 6.0 changes migrated
*/
#ifndef FFTOOLS_THREAD_QUEUE_H
#define FFTOOLS_THREAD_QUEUE_H
#include <string.h>
#include "fftools_objpool.h"
typedef struct ThreadQueue ThreadQueue;
/**
* Allocate a queue for sending data between threads.
*
* @param nb_streams number of streams for which a distinct EOF state is
* maintained
* @param queue_size number of items that can be stored in the queue without
* blocking
* @param obj_pool object pool that will be used to allocate items stored in the
* queue; the pool becomes owned by the queue
* @param callback that moves the contents between two data pointers
*/
ThreadQueue *tq_alloc(unsigned int nb_streams, size_t queue_size,
ObjPool *obj_pool, void (*obj_move)(void *dst, void *src));
void tq_free(ThreadQueue **tq);
/**
* Send an item for the given stream to the queue.
*
* @param data the item to send, its contents will be moved using the callback
* provided to tq_alloc(); on failure the item will be left
* untouched
* @return
* - 0 the item was successfully sent
* - AVERROR(ENOMEM) could not allocate an item for writing to the FIFO
* - AVERROR(EINVAL) the sending side has previously been marked as finished
* - AVERROR_EOF the receiving side has marked the given stream as finished
*/
int tq_send(ThreadQueue *tq, unsigned int stream_idx, void *data);
/**
* Mark the given stream finished from the sending side.
*/
void tq_send_finish(ThreadQueue *tq, unsigned int stream_idx);
/**
* Read the next item from the queue.
*
* @param stream_idx the index of the stream that was processed or -1 will be
* written here
* @param data the data item will be written here on success using the
* callback provided to tq_alloc()
* @return
* - 0 a data item was successfully read; *stream_idx contains a non-negative
* stream index
* - AVERROR_EOF When *stream_idx is non-negative, this signals that the sending
* side has marked the given stream as finished. This will happen at most once
* for each stream. When *stream_idx is -1, all streams are done.
*/
int tq_receive(ThreadQueue *tq, int *stream_idx, void *data);
/**
* Mark the given stream finished from the receiving side.
*/
void tq_receive_finish(ThreadQueue *tq, unsigned int stream_idx);
#endif // FFTOOLS_THREAD_QUEUE_H

@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
# INIT SUBMODULES # INIT SUBMODULES
${SED_INLINE} 's|git://git.savannah.gnu.org|https://github.com/tanersener|g' "${BASEDIR}"/src/"${LIB_NAME}"/.gitmodules || return 1 ${SED_INLINE} 's|git://git.savannah.gnu.org|https://github.com/arthenica|g' "${BASEDIR}"/src/"${LIB_NAME}"/.gitmodules || return 1
./gitsub.sh pull || return 1 ./gitsub.sh pull || return 1
# ALWAYS CLEAN THE PREVIOUS BUILD # ALWAYS CLEAN THE PREVIOUS BUILD

@ -33,8 +33,8 @@ get_library_source() {
;; ;;
ffmpeg) ffmpeg)
SOURCE_REPO_URL="https://github.com/arthenica/FFmpeg" SOURCE_REPO_URL="https://github.com/arthenica/FFmpeg"
SOURCE_ID="65f96a965af415300f7533c75940eeae4e3a2287" SOURCE_ID="n6.0"
SOURCE_TYPE="COMMIT" SOURCE_TYPE="TAG"
;; ;;
fontconfig) fontconfig)
SOURCE_REPO_URL="https://github.com/arthenica/fontconfig" SOURCE_REPO_URL="https://github.com/arthenica/fontconfig"

@ -2,14 +2,14 @@ apply plugin: 'com.android.library'
android { android {
namespace 'com.arthenica.ffmpegkit' namespace 'com.arthenica.ffmpegkit'
compileSdk 31 compileSdk 33
ndkVersion "22.1.7171670" ndkVersion "22.1.7171670"
defaultConfig { defaultConfig {
minSdk 24 minSdk 24
targetSdk 31 targetSdk 33
versionCode 240510 versionCode 240600
versionName "5.1" versionName "6.0"
project.archivesBaseName = "ffmpeg-kit" project.archivesBaseName = "ffmpeg-kit"
consumerProguardFiles "consumer-rules.pro" consumerProguardFiles "consumer-rules.pro"
} }

@ -2,14 +2,14 @@ apply plugin: 'com.android.library'
android { android {
namespace 'com.arthenica.ffmpegkit' namespace 'com.arthenica.ffmpegkit'
compileSdk 31 compileSdk 33
ndkVersion "22.1.7171670" ndkVersion "22.1.7171670"
defaultConfig { defaultConfig {
minSdk 16 minSdk 16
targetSdk 31 targetSdk 33
versionCode 160510 versionCode 160600
versionName "5.1.LTS" versionName "6.0.LTS"
project.archivesBaseName = "ffmpeg-kit" project.archivesBaseName = "ffmpeg-kit"
consumerProguardFiles "consumer-rules.pro" consumerProguardFiles "consumer-rules.pro"
} }

@ -19,7 +19,7 @@
#include "libavutil/file.h" #include "libavutil/file.h"
static int64_t fd_seek(URLContext *h, int64_t pos, int whence) static int64_t saf_seek(URLContext *h, int64_t pos, int whence)
{ {
FileContext *c = h->priv_data; FileContext *c = h->priv_data;
int64_t ret; int64_t ret;
@ -35,7 +35,7 @@ static int64_t fd_seek(URLContext *h, int64_t pos, int whence)
return ret < 0 ? AVERROR(errno) : ret; return ret < 0 ? AVERROR(errno) : ret;
} }
static int fd_open(URLContext *h, const char *filename, int flags) static int saf_open(URLContext *h, const char *filename, int flags)
{ {
FileContext *c = h->priv_data; FileContext *c = h->priv_data;
int saf_id; int saf_id;
@ -79,7 +79,7 @@ static int fd_open(URLContext *h, const char *filename, int flags)
return 0; return 0;
} }
static int fd_check(URLContext *h, int mask) static int saf_check(URLContext *h, int mask)
{ {
int ret = 0; int ret = 0;
const char *filename = h->filename; const char *filename = h->filename;
@ -112,7 +112,7 @@ static int fd_check(URLContext *h, int mask)
return ret; return ret;
} }
static int fd_delete(URLContext *h) static int saf_delete(URLContext *h)
{ {
#if HAVE_UNISTD_H #if HAVE_UNISTD_H
int ret; int ret;
@ -135,7 +135,7 @@ static int fd_delete(URLContext *h)
#endif /* HAVE_UNISTD_H */ #endif /* HAVE_UNISTD_H */
} }
static int fd_move(URLContext *h_src, URLContext *h_dst) static int saf_move(URLContext *h_src, URLContext *h_dst)
{ {
const char *filename_src = h_src->filename; const char *filename_src = h_src->filename;
const char *filename_dst = h_dst->filename; const char *filename_dst = h_dst->filename;
@ -148,7 +148,7 @@ static int fd_move(URLContext *h_src, URLContext *h_dst)
return 0; return 0;
} }
static int fd_close(URLContext *h) static int saf_close(URLContext *h)
{ {
FileContext *c = h->priv_data; FileContext *c = h->priv_data;
@ -169,15 +169,15 @@ static const AVClass saf_class = {
const URLProtocol ff_saf_protocol = { const URLProtocol ff_saf_protocol = {
.name = "saf", .name = "saf",
.url_open = fd_open, .url_open = saf_open,
.url_read = file_read, .url_read = file_read,
.url_write = file_write, .url_write = file_write,
.url_seek = fd_seek, .url_seek = saf_seek,
.url_close = fd_close, .url_close = saf_close,
.url_get_file_handle = file_get_handle, .url_get_file_handle = file_get_handle,
.url_check = fd_check, .url_check = saf_check,
.url_delete = fd_delete, .url_delete = saf_delete,
.url_move = fd_move, .url_move = saf_move,
.priv_data_size = sizeof(FileContext), .priv_data_size = sizeof(FileContext),
.priv_data_class = &saf_class, .priv_data_class = &saf_class,
.default_whitelist = "saf,crypto,data" .default_whitelist = "saf,crypto,data"

Loading…
Cancel
Save