fix android ndk r22 compability issues for android

This commit is contained in:
Taner Sener 2021-01-10 10:30:24 +00:00
parent 8a3372f81c
commit e27b8e6fc1
14 changed files with 121 additions and 129 deletions

View File

@ -51,7 +51,7 @@ PROJECT_BRIEF =
# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
# the logo to the output directory.
PROJECT_LOGO =
PROJECT_LOGO = ../../docs/assets/ffmpeg-kit-icon-v4.png
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
# into which the generated documentation will be written. If a relative path is

View File

@ -39,6 +39,6 @@ task javadoc(type: Javadoc) {
}
dependencies {
testImplementation "androidx.test.ext:junit:1.1.1"
testImplementation "androidx.test.ext:junit:1.1.2"
testImplementation "org.json:json:20190722"
}

View File

@ -20,6 +20,9 @@
*/
/*
* CHANGES 01.2021
* - NDK r22 incompatibility issues regarding INT64_MAX fixed
*
* CHANGES 01.2020
* - ffprobe support changes
* - (optindex < argc) validation in parse_options() method updated with (optindex >= argc) check
@ -358,7 +361,7 @@ static int write_option(void *optctx, const OptionDef *po, const char *opt,
} else if (po->flags & OPT_BOOL || po->flags & OPT_INT) {
*(int *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
} else if (po->flags & OPT_INT64) {
*(int64_t *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT64_MIN, INT64_MAX);
*(int64_t *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT64_MIN, (double)INT64_MAX);
} else if (po->flags & OPT_TIME) {
*(int64_t *)dst = parse_time_or_die(opt, arg, 1);
} else if (po->flags & OPT_FLOAT) {

View File

@ -24,6 +24,9 @@
*/
/*
* CHANGES 01.2021
* - NDK r22 incompatibility issues regarding INT64_MAX fixed
*
* CHANGES 06.2020
* - ignoring signals implemented
* - cancel_operation() method signature updated with id
@ -1318,115 +1321,115 @@ static void do_video_out(OutputFile *of,
if (!check_recording_time(ost))
return;
if (enc->flags & (AV_CODEC_FLAG_INTERLACED_DCT | AV_CODEC_FLAG_INTERLACED_ME) &&
ost->top_field_first >= 0)
in_picture->top_field_first = !!ost->top_field_first;
if (enc->flags & (AV_CODEC_FLAG_INTERLACED_DCT | AV_CODEC_FLAG_INTERLACED_ME) &&
ost->top_field_first >= 0)
in_picture->top_field_first = !!ost->top_field_first;
if (in_picture->interlaced_frame) {
if (enc->codec->id == AV_CODEC_ID_MJPEG)
mux_par->field_order = in_picture->top_field_first ? AV_FIELD_TT:AV_FIELD_BB;
else
mux_par->field_order = in_picture->top_field_first ? AV_FIELD_TB:AV_FIELD_BT;
} else
mux_par->field_order = AV_FIELD_PROGRESSIVE;
if (in_picture->interlaced_frame) {
if (enc->codec->id == AV_CODEC_ID_MJPEG)
mux_par->field_order = in_picture->top_field_first ? AV_FIELD_TT:AV_FIELD_BB;
else
mux_par->field_order = in_picture->top_field_first ? AV_FIELD_TB:AV_FIELD_BT;
} else
mux_par->field_order = AV_FIELD_PROGRESSIVE;
in_picture->quality = enc->global_quality;
in_picture->pict_type = 0;
in_picture->quality = enc->global_quality;
in_picture->pict_type = 0;
if (ost->forced_kf_ref_pts == AV_NOPTS_VALUE &&
in_picture->pts != AV_NOPTS_VALUE)
ost->forced_kf_ref_pts = in_picture->pts;
if (ost->forced_kf_ref_pts == AV_NOPTS_VALUE &&
in_picture->pts != AV_NOPTS_VALUE)
ost->forced_kf_ref_pts = in_picture->pts;
pts_time = in_picture->pts != AV_NOPTS_VALUE ?
(in_picture->pts - ost->forced_kf_ref_pts) * av_q2d(enc->time_base) : NAN;
if (ost->forced_kf_index < ost->forced_kf_count &&
in_picture->pts >= ost->forced_kf_pts[ost->forced_kf_index]) {
ost->forced_kf_index++;
forced_keyframe = 1;
} else if (ost->forced_keyframes_pexpr) {
double res;
ost->forced_keyframes_expr_const_values[FKF_T] = pts_time;
res = av_expr_eval(ost->forced_keyframes_pexpr,
ost->forced_keyframes_expr_const_values, NULL);
ff_dlog(NULL, "force_key_frame: n:%f n_forced:%f prev_forced_n:%f t:%f prev_forced_t:%f -> res:%f\n",
ost->forced_keyframes_expr_const_values[FKF_N],
ost->forced_keyframes_expr_const_values[FKF_N_FORCED],
ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_N],
ost->forced_keyframes_expr_const_values[FKF_T],
ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_T],
res);
if (res) {
forced_keyframe = 1;
ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_N] =
ost->forced_keyframes_expr_const_values[FKF_N];
ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_T] =
ost->forced_keyframes_expr_const_values[FKF_T];
ost->forced_keyframes_expr_const_values[FKF_N_FORCED] += 1;
}
ost->forced_keyframes_expr_const_values[FKF_N] += 1;
} else if ( ost->forced_keyframes
&& !strncmp(ost->forced_keyframes, "source", 6)
&& in_picture->key_frame==1) {
pts_time = in_picture->pts != AV_NOPTS_VALUE ?
(in_picture->pts - ost->forced_kf_ref_pts) * av_q2d(enc->time_base) : NAN;
if (ost->forced_kf_index < ost->forced_kf_count &&
in_picture->pts >= ost->forced_kf_pts[ost->forced_kf_index]) {
ost->forced_kf_index++;
forced_keyframe = 1;
} else if (ost->forced_keyframes_pexpr) {
double res;
ost->forced_keyframes_expr_const_values[FKF_T] = pts_time;
res = av_expr_eval(ost->forced_keyframes_pexpr,
ost->forced_keyframes_expr_const_values, NULL);
ff_dlog(NULL, "force_key_frame: n:%f n_forced:%f prev_forced_n:%f t:%f prev_forced_t:%f -> res:%f\n",
ost->forced_keyframes_expr_const_values[FKF_N],
ost->forced_keyframes_expr_const_values[FKF_N_FORCED],
ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_N],
ost->forced_keyframes_expr_const_values[FKF_T],
ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_T],
res);
if (res) {
forced_keyframe = 1;
ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_N] =
ost->forced_keyframes_expr_const_values[FKF_N];
ost->forced_keyframes_expr_const_values[FKF_PREV_FORCED_T] =
ost->forced_keyframes_expr_const_values[FKF_T];
ost->forced_keyframes_expr_const_values[FKF_N_FORCED] += 1;
}
if (forced_keyframe) {
in_picture->pict_type = AV_PICTURE_TYPE_I;
av_log(NULL, AV_LOG_DEBUG, "Forced keyframe at time %f\n", pts_time);
}
ost->forced_keyframes_expr_const_values[FKF_N] += 1;
} else if ( ost->forced_keyframes
&& !strncmp(ost->forced_keyframes, "source", 6)
&& in_picture->key_frame==1) {
forced_keyframe = 1;
}
update_benchmark(NULL);
if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "encoder <- type:video "
"frame_pts:%s frame_pts_time:%s time_base:%d/%d\n",
av_ts2str(in_picture->pts), av_ts2timestr(in_picture->pts, &enc->time_base),
enc->time_base.num, enc->time_base.den);
}
if (forced_keyframe) {
in_picture->pict_type = AV_PICTURE_TYPE_I;
av_log(NULL, AV_LOG_DEBUG, "Forced keyframe at time %f\n", pts_time);
}
ost->frames_encoded++;
update_benchmark(NULL);
if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "encoder <- type:video "
"frame_pts:%s frame_pts_time:%s time_base:%d/%d\n",
av_ts2str(in_picture->pts), av_ts2timestr(in_picture->pts, &enc->time_base),
enc->time_base.num, enc->time_base.den);
}
ret = avcodec_send_frame(enc, in_picture);
ost->frames_encoded++;
ret = avcodec_send_frame(enc, in_picture);
if (ret < 0)
goto error;
// Make sure Closed Captions will not be duplicated
av_frame_remove_side_data(in_picture, AV_FRAME_DATA_A53_CC);
while (1) {
ret = avcodec_receive_packet(enc, &pkt);
update_benchmark("encode_video %d.%d", ost->file_index, ost->index);
if (ret == AVERROR(EAGAIN))
break;
if (ret < 0)
goto error;
// Make sure Closed Captions will not be duplicated
av_frame_remove_side_data(in_picture, AV_FRAME_DATA_A53_CC);
while (1) {
ret = avcodec_receive_packet(enc, &pkt);
update_benchmark("encode_video %d.%d", ost->file_index, ost->index);
if (ret == AVERROR(EAGAIN))
break;
if (ret < 0)
goto error;
if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "encoder -> type:video "
"pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s\n",
av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &enc->time_base),
av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &enc->time_base));
}
if (pkt.pts == AV_NOPTS_VALUE && !(enc->codec->capabilities & AV_CODEC_CAP_DELAY))
pkt.pts = ost->sync_opts;
av_packet_rescale_ts(&pkt, enc->time_base, ost->mux_timebase);
if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "encoder -> type:video "
"pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s\n",
av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &ost->mux_timebase),
av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &ost->mux_timebase));
}
frame_size = pkt.size;
output_packet(of, &pkt, ost, 0);
/* if two pass, output log */
if (ost->logfile && enc->stats_out) {
fprintf(ost->logfile, "%s", enc->stats_out);
}
if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "encoder -> type:video "
"pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s\n",
av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &enc->time_base),
av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &enc->time_base));
}
if (pkt.pts == AV_NOPTS_VALUE && !(enc->codec->capabilities & AV_CODEC_CAP_DELAY))
pkt.pts = ost->sync_opts;
av_packet_rescale_ts(&pkt, enc->time_base, ost->mux_timebase);
if (debug_ts) {
av_log(NULL, AV_LOG_INFO, "encoder -> type:video "
"pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s\n",
av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &ost->mux_timebase),
av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &ost->mux_timebase));
}
frame_size = pkt.size;
output_packet(of, &pkt, ost, 0);
/* if two pass, output log */
if (ost->logfile && enc->stats_out) {
fprintf(ost->logfile, "%s", enc->stats_out);
}
}
ost->sync_opts++;
/*
* For video, number of frames in == number of packets out.
@ -3779,7 +3782,7 @@ static int init_output_stream(OutputStream *ost, char *error, int error_len)
// parse user provided disposition, and update stream values
if (ost->disposition) {
static const AVOption opts[] = {
{ "disposition" , NULL, 0, AV_OPT_TYPE_FLAGS, { .i64 = 0 }, INT64_MIN, INT64_MAX, .unit = "flags" },
{ "disposition" , NULL, 0, AV_OPT_TYPE_FLAGS, { .i64 = 0 }, INT64_MIN, (double)INT64_MAX, .unit = "flags" },
{ "default" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_DEFAULT }, .unit = "flags" },
{ "dub" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_DUB }, .unit = "flags" },
{ "original" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AV_DISPOSITION_ORIGINAL }, .unit = "flags" },

View File

@ -19,6 +19,9 @@
*/
/*
* CHANGES 01.2021
* - NDK r22 incompatibility issues regarding INT64_MAX fixed
*
* CHANGES 01.2020
* - ffprobe support changes
*
@ -222,7 +225,7 @@ AVDictionary *strip_specifiers(AVDictionary *dict)
int opt_abort_on(void *optctx, const char *opt, const char *arg)
{
const AVOption opts[] = {
{ "abort_on" , NULL, 0, AV_OPT_TYPE_FLAGS, { .i64 = 0 }, INT64_MIN, INT64_MAX, .unit = "flags" },
{ "abort_on" , NULL, 0, AV_OPT_TYPE_FLAGS, { .i64 = 0 }, INT64_MIN, (double)INT64_MAX, .unit = "flags" },
{ "empty_output" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = ABORT_ON_FLAG_EMPTY_OUTPUT }, .unit = "flags" },
{ NULL },
};

View File

@ -1,15 +1,10 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:4.0.1'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
classpath 'com.android.tools.build:gradle:4.1.1'
}
}

Binary file not shown.

View File

@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.2.1-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-6.8-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

2
android/gradlew vendored
View File

@ -82,6 +82,7 @@ esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
@ -129,6 +130,7 @@ fi
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath

22
android/gradlew.bat vendored Executable file → Normal file
View File

@ -40,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
@ -54,7 +54,7 @@ goto fail
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
@ -64,28 +64,14 @@ echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell

View File

@ -1,4 +1,4 @@
include ':app'
include ':ffmpeg-kit'
project(':ffmpeg-kit').projectDir = new File('..')
rootProject.name='FFmpegKit'
rootProject.name = 'ffmpeg-kit-android'

View File

@ -39,6 +39,6 @@ task javadoc(type: Javadoc) {
}
dependencies {
testImplementation "androidx.test.ext:junit:1.1.1"
testImplementation "androidx.test.ext:junit:1.1.2"
testImplementation "org.json:json:20190722"
}

View File

@ -39,6 +39,6 @@ task javadoc(type: Javadoc) {
}
dependencies {
testImplementation "androidx.test.ext:junit:1.1.1"
testImplementation "androidx.test.ext:junit:1.1.2"
testImplementation "org.json:json:20190722"
}

View File

@ -166,7 +166,7 @@ artifacts {
}
dependencies {
testImplementation "androidx.test.ext:junit:1.1.1"
testImplementation "androidx.test.ext:junit:1.1.2"
testImplementation "org.json:json:20190722"
}