Add 'Keep every nth frame' frame dropping strategy.

#minor-release

PiperOrigin-RevId: 538804347
(cherry picked from commit 276f2f1fe65402dbfd9d9f273d1c4da8eb08865b)
This commit is contained in:
tofunmi 2023-06-08 16:15:16 +00:00 committed by Tofunmi Adigun-Hameed
parent 78f23c0c9b
commit 39b98fe5ad
4 changed files with 256 additions and 7 deletions

View File

@ -0,0 +1,147 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static com.google.common.truth.Truth.assertThat;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Tests for {@link FrameDropEffect}. */
@RunWith(AndroidJUnit4.class)
public class FrameDropTest {
private static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
private static final String SCALE_WIDE_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/scale_wide.png";
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
private @MonotonicNonNull Queue<Long> actualPresentationTimesUs;
@EnsuresNonNull("actualPresentationTimesUs")
@Before
public void setUp() {
actualPresentationTimesUs = new ConcurrentLinkedQueue<>();
}
@After
public void release() {
checkNotNull(videoFrameProcessorTestRunner).release();
}
// TODO: b/536973872 - When support for testing multiple frames in the output, test whether the
// correct frames comes out.
@RequiresNonNull("actualPresentationTimesUs")
@Test
public void frameDrop_withDefaultStrategy_outputsFramesAtTheCorrectPresentationTimesUs()
throws Exception {
String testId = "frameDrop_withDefaultStrategy_outputsFramesAtTheCorrectPresentationTimesUs";
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(
testId, FrameDropEffect.createDefaultFrameDropEffect(/* targetFrameRate= */ 30))
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
.build();
ImmutableList<Integer> timestampsMs = ImmutableList.of(0, 16, 32, 48, 58, 71, 86);
for (int timestampMs : timestampsMs) {
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ timestampMs * 1000L,
/* frameRate= */ 1);
}
videoFrameProcessorTestRunner.endFrameProcessing();
assertThat(actualPresentationTimesUs).containsExactly(0L, 32_000L, 71_000L).inOrder();
}
@RequiresNonNull("actualPresentationTimesUs")
@Test
public void frameDrop_withSimpleStrategy_outputsFramesAtTheCorrectPresentationTimesUs()
throws Exception {
String testId = "frameDrop_withSimpleStrategy_outputsFramesAtTheCorrectPresentationTimesUs";
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(
testId,
FrameDropEffect.createSimpleFrameDropEffect(
/* expectedFrameRate= */ 6, /* targetFrameRate= */ 2))
.build();
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L,
/* frameRate= */ 4);
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(SCALE_WIDE_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ C.MICROS_PER_SECOND,
/* frameRate= */ 2);
videoFrameProcessorTestRunner.endFrameProcessing();
assertThat(actualPresentationTimesUs).containsExactly(500_000L, 1_500_000L).inOrder();
}
@RequiresNonNull("actualPresentationTimesUs")
@Test
public void frameDrop_withSimpleStrategy_outputsAllFrames() throws Exception {
String testId = "frameDrop_withSimpleStrategy_outputsCorrectNumberOfFrames";
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(
testId,
FrameDropEffect.createSimpleFrameDropEffect(
/* expectedFrameRate= */ 3, /* targetFrameRate= */ 3))
.build();
videoFrameProcessorTestRunner.queueInputBitmap(
readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* durationUs= */ C.MICROS_PER_SECOND,
/* offsetToAddUs= */ 0L,
/* frameRate= */ 3);
videoFrameProcessorTestRunner.endFrameProcessing();
assertThat(actualPresentationTimesUs).containsExactly(0L, 333_333L, 666_667L).inOrder();
}
@RequiresNonNull("actualPresentationTimesUs")
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId, FrameDropEffect frameDropEffect) {
return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory.Builder().build())
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setEffects(frameDropEffect)
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add);
}
}

View File

@ -25,7 +25,6 @@ import androidx.media3.common.C;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException;
// TODO(b/227625363): Add tests for this file.
/**
* Drops frames by only queuing input frames that are chosen by the frame dropping strategy.
*
@ -40,7 +39,7 @@ import androidx.media3.common.VideoFrameProcessingException;
* <p>Where T_lastQueued is the timestamp of the last queued frame and T_(x+1) is the timestamp of
* the next frame. The target frame interval is determined from {@code targetFps}.
*/
/* package */ final class FrameDroppingShaderProgram extends FrameCacheGlShaderProgram {
/* package */ final class DefaultFrameDroppingShaderProgram extends FrameCacheGlShaderProgram {
private final long targetFrameDeltaUs;
@Nullable private GlTextureInfo previousTexture;
@ -56,7 +55,7 @@ import androidx.media3.common.VideoFrameProcessingException;
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @param targetFps The number of frames per second the output video should roughly have.
*/
public FrameDroppingShaderProgram(Context context, boolean useHdr, float targetFps)
public DefaultFrameDroppingShaderProgram(Context context, boolean useHdr, float targetFps)
throws VideoFrameProcessingException {
super(context, /* capacity= */ 1, useHdr);
this.targetFrameDeltaUs = (long) (C.MICROS_PER_SECOND / targetFps);

View File

@ -17,6 +17,7 @@
package androidx.media3.effect;
import android.content.Context;
import androidx.media3.common.C;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi;
@ -24,20 +25,56 @@ import androidx.media3.common.util.UnstableApi;
@UnstableApi
public class FrameDropEffect implements GlEffect {
private final float inputFrameRate;
private final float targetFrameRate;
/**
* Creates an instance.
* Creates a {@link FrameDropEffect} with the default frame dropping strategy.
*
* <p>The strategy used is to queue the current frame, x, with timestamp T_x if and only if one of
* the following is true:
*
* <ul>
* <li>x is the first frame,
* <li>(T_x - T_lastQueued) is closer to the target frame interval than (T_(x+1) - T_lastQueued)
* </ul>
*
* <p>Where T_lastQueued is the timestamp of the last queued frame and T_(x+1) is the timestamp of
* the next frame. The target frame interval is determined from {@code targetFrameRate}.
*
* @param targetFrameRate The number of frames per second the output video should roughly have.
*/
public FrameDropEffect(float targetFrameRate) {
this.targetFrameRate = targetFrameRate;
public static FrameDropEffect createDefaultFrameDropEffect(float targetFrameRate) {
return new FrameDropEffect(/* inputFrameRate= */ C.RATE_UNSET, targetFrameRate);
}
/**
* Creates a {@link FrameDropEffect} that keeps every nth frame, where n is the {@code
* inputFrameRate} divided by the {@code targetFrameRate}.
*
* <p>For example, if the input stream came in at 60fps and the targeted frame rate was 20fps,
* every 3rd frame would be kept. If n is not an integer, then we round to the nearest one.
*
* @param expectedFrameRate The number of frames per second in the input stream.
* @param targetFrameRate The number of frames per second the output video should roughly have.
*/
public static FrameDropEffect createSimpleFrameDropEffect(
float expectedFrameRate, float targetFrameRate) {
return new FrameDropEffect(expectedFrameRate, targetFrameRate);
}
@Override
public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws VideoFrameProcessingException {
return new FrameDroppingShaderProgram(context, useHdr, targetFrameRate);
if (inputFrameRate == C.RATE_UNSET) {
return new DefaultFrameDroppingShaderProgram(context, useHdr, targetFrameRate);
} else {
return new SimpleFrameDroppingShaderProgram(context, useHdr, inputFrameRate, targetFrameRate);
}
}
private FrameDropEffect(float inputFrameRate, float targetFrameRate) {
this.inputFrameRate = inputFrameRate;
this.targetFrameRate = targetFrameRate;
}
}

View File

@ -0,0 +1,66 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument;
import static java.lang.Math.round;
import android.content.Context;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException;
/**
* Drops frames by only keeping every nth frame, where n is the {@code inputFrameRate} divided by
* the {@code targetFrameRate}.
*
* <p>For example, if the input stream came in at 60fps and the targeted frame rate was 20fps, every
* 3rd frame would be kept. If n is not an integer, then we round to the nearest one.
*/
/* package */ final class SimpleFrameDroppingShaderProgram extends FrameCacheGlShaderProgram {
private final int n;
private int framesReceived;
/**
* Creates a new instance.
*
* @param context The {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @param inputFrameRate The number of frames per second the input stream should have.
* @param targetFrameRate The number of frames per second the output video should roughly have.
*/
public SimpleFrameDroppingShaderProgram(
Context context, boolean useHdr, float inputFrameRate, float targetFrameRate)
throws VideoFrameProcessingException {
super(context, /* capacity= */ 1, useHdr);
n = round(inputFrameRate / targetFrameRate);
checkArgument(n >= 1, "The input frame rate should be greater than the target frame rate.");
}
@Override
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
framesReceived++;
if (framesReceived % n == 0) {
super.queueInputFrame(inputTexture, presentationTimeUs);
} else {
inputListener.onInputFrameProcessed(inputTexture);
inputListener.onReadyToAcceptInputFrame();
}
}
}