diff options
Diffstat (limited to 'libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine')
13 files changed, 1988 insertions, 0 deletions
diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/AudioChannel.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/AudioChannel.java new file mode 100644 index 000000000..ac3e43efb --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/AudioChannel.java @@ -0,0 +1,231 @@ +package net.ypresto.androidtranscoder.engine; + +import android.media.MediaCodec; +import android.media.MediaFormat; + +import net.ypresto.androidtranscoder.compat.MediaCodecBufferCompatWrapper; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.ShortBuffer; +import java.util.ArrayDeque; +import java.util.Queue; + +/** + * Channel of raw audio from decoder to encoder. + * Performs the necessary conversion between different input & output audio formats. + * + * We currently support upmixing from mono to stereo & downmixing from stereo to mono. + * Sample rate conversion is not supported yet. + */ +class AudioChannel { + + private static class AudioBuffer { + int bufferIndex; + long presentationTimeUs; + ShortBuffer data; + } + + public static final int BUFFER_INDEX_END_OF_STREAM = -1; + + private static final int BYTES_PER_SHORT = 2; + private static final long MICROSECS_PER_SEC = 1000000; + + private final Queue<AudioBuffer> mEmptyBuffers = new ArrayDeque<>(); + private final Queue<AudioBuffer> mFilledBuffers = new ArrayDeque<>(); + + private final MediaCodec mDecoder; + private final MediaCodec mEncoder; + private final MediaFormat mEncodeFormat; + + private int mInputSampleRate; + private int mInputChannelCount; + private int mOutputChannelCount; + + private AudioRemixer mRemixer; + + private final MediaCodecBufferCompatWrapper mDecoderBuffers; + private final MediaCodecBufferCompatWrapper mEncoderBuffers; + + private final AudioBuffer mOverflowBuffer = new AudioBuffer(); + + private MediaFormat mActualDecodedFormat; + + + public AudioChannel(final MediaCodec decoder, + final MediaCodec encoder, final MediaFormat encodeFormat) { + mDecoder = decoder; + mEncoder = encoder; + mEncodeFormat = encodeFormat; + + mDecoderBuffers = new MediaCodecBufferCompatWrapper(mDecoder); + mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder); + } + + public void setActualDecodedFormat(final MediaFormat decodedFormat) { + mActualDecodedFormat = decodedFormat; + + mInputSampleRate = mActualDecodedFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); + if (mInputSampleRate != mEncodeFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE)) { + throw new UnsupportedOperationException("Audio sample rate conversion not supported yet."); + } + + mInputChannelCount = mActualDecodedFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); + mOutputChannelCount = mEncodeFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); + + if (mInputChannelCount != 1 && mInputChannelCount != 2) { + throw new UnsupportedOperationException("Input channel count (" + mInputChannelCount + ") not supported."); + } + + if (mOutputChannelCount != 1 && mOutputChannelCount != 2) { + throw new UnsupportedOperationException("Output channel count (" + mOutputChannelCount + ") not supported."); + } + + if (mInputChannelCount > mOutputChannelCount) { + mRemixer = AudioRemixer.DOWNMIX; + } else if (mInputChannelCount < mOutputChannelCount) { + mRemixer = AudioRemixer.UPMIX; + } else { + mRemixer = AudioRemixer.PASSTHROUGH; + } + + mOverflowBuffer.presentationTimeUs = 0; + } + + public void drainDecoderBufferAndQueue(final int bufferIndex, final long presentationTimeUs) { + if (mActualDecodedFormat == null) { + throw new RuntimeException("Buffer received before format!"); + } + + final ByteBuffer data = + bufferIndex == BUFFER_INDEX_END_OF_STREAM ? + null : mDecoderBuffers.getOutputBuffer(bufferIndex); + + AudioBuffer buffer = mEmptyBuffers.poll(); + if (buffer == null) { + buffer = new AudioBuffer(); + } + + buffer.bufferIndex = bufferIndex; + buffer.presentationTimeUs = presentationTimeUs; + buffer.data = data == null ? null : data.asShortBuffer(); + + if (mOverflowBuffer.data == null) { + mOverflowBuffer.data = ByteBuffer + .allocateDirect(data.capacity()) + .order(ByteOrder.nativeOrder()) + .asShortBuffer(); + mOverflowBuffer.data.clear().flip(); + } + + mFilledBuffers.add(buffer); + } + + public boolean feedEncoder(long timeoutUs) { + final boolean hasOverflow = mOverflowBuffer.data != null && mOverflowBuffer.data.hasRemaining(); + if (mFilledBuffers.isEmpty() && !hasOverflow) { + // No audio data - Bail out + return false; + } + + final int encoderInBuffIndex = mEncoder.dequeueInputBuffer(timeoutUs); + if (encoderInBuffIndex < 0) { + // Encoder is full - Bail out + return false; + } + + // Drain overflow first + final ShortBuffer outBuffer = mEncoderBuffers.getInputBuffer(encoderInBuffIndex).asShortBuffer(); + if (hasOverflow) { + final long presentationTimeUs = drainOverflow(outBuffer); + mEncoder.queueInputBuffer(encoderInBuffIndex, + 0, outBuffer.position() * BYTES_PER_SHORT, + presentationTimeUs, 0); + return true; + } + + final AudioBuffer inBuffer = mFilledBuffers.poll(); + if (inBuffer.bufferIndex == BUFFER_INDEX_END_OF_STREAM) { + mEncoder.queueInputBuffer(encoderInBuffIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + return false; + } + + final long presentationTimeUs = remixAndMaybeFillOverflow(inBuffer, outBuffer); + mEncoder.queueInputBuffer(encoderInBuffIndex, + 0, outBuffer.position() * BYTES_PER_SHORT, + presentationTimeUs, 0); + if (inBuffer != null) { + mDecoder.releaseOutputBuffer(inBuffer.bufferIndex, false); + mEmptyBuffers.add(inBuffer); + } + + return true; + } + + private static long sampleCountToDurationUs(final int sampleCount, + final int sampleRate, + final int channelCount) { + return (sampleCount / (sampleRate * MICROSECS_PER_SEC)) / channelCount; + } + + private long drainOverflow(final ShortBuffer outBuff) { + final ShortBuffer overflowBuff = mOverflowBuffer.data; + final int overflowLimit = overflowBuff.limit(); + final int overflowSize = overflowBuff.remaining(); + + final long beginPresentationTimeUs = mOverflowBuffer.presentationTimeUs + + sampleCountToDurationUs(overflowBuff.position(), mInputSampleRate, mOutputChannelCount); + + outBuff.clear(); + // Limit overflowBuff to outBuff's capacity + overflowBuff.limit(outBuff.capacity()); + // Load overflowBuff onto outBuff + outBuff.put(overflowBuff); + + if (overflowSize >= outBuff.capacity()) { + // Overflow fully consumed - Reset + overflowBuff.clear().limit(0); + } else { + // Only partially consumed - Keep position & restore previous limit + overflowBuff.limit(overflowLimit); + } + + return beginPresentationTimeUs; + } + + private long remixAndMaybeFillOverflow(final AudioBuffer input, + final ShortBuffer outBuff) { + final ShortBuffer inBuff = input.data; + final ShortBuffer overflowBuff = mOverflowBuffer.data; + + outBuff.clear(); + + // Reset position to 0, and set limit to capacity (Since MediaCodec doesn't do that for us) + inBuff.clear(); + + if (inBuff.remaining() > outBuff.remaining()) { + // Overflow + // Limit inBuff to outBuff's capacity + inBuff.limit(outBuff.capacity()); + mRemixer.remix(inBuff, outBuff); + + // Reset limit to its own capacity & Keep position + inBuff.limit(inBuff.capacity()); + + // Remix the rest onto overflowBuffer + // NOTE: We should only reach this point when overflow buffer is empty + final long consumedDurationUs = + sampleCountToDurationUs(inBuff.position(), mInputSampleRate, mInputChannelCount); + mRemixer.remix(inBuff, overflowBuff); + + // Seal off overflowBuff & mark limit + overflowBuff.flip(); + mOverflowBuffer.presentationTimeUs = input.presentationTimeUs + consumedDurationUs; + } else { + // No overflow + mRemixer.remix(inBuff, outBuff); + } + + return input.presentationTimeUs; + } +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/AudioRemixer.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/AudioRemixer.java new file mode 100644 index 000000000..5255a2219 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/AudioRemixer.java @@ -0,0 +1,66 @@ +package net.ypresto.androidtranscoder.engine; + +import java.nio.ShortBuffer; + +public interface AudioRemixer { + void remix(final ShortBuffer inSBuff, final ShortBuffer outSBuff); + + AudioRemixer DOWNMIX = new AudioRemixer() { + private static final int SIGNED_SHORT_LIMIT = 32768; + private static final int UNSIGNED_SHORT_MAX = 65535; + + @Override + public void remix(final ShortBuffer inSBuff, final ShortBuffer outSBuff) { + // Down-mix stereo to mono + // Viktor Toth's algorithm - + // See: http://www.vttoth.com/CMS/index.php/technical-notes/68 + // http://stackoverflow.com/a/25102339 + final int inRemaining = inSBuff.remaining() / 2; + final int outSpace = outSBuff.remaining(); + + final int samplesToBeProcessed = Math.min(inRemaining, outSpace); + for (int i = 0; i < samplesToBeProcessed; ++i) { + // Convert to unsigned + final int a = inSBuff.get() + SIGNED_SHORT_LIMIT; + final int b = inSBuff.get() + SIGNED_SHORT_LIMIT; + int m; + // Pick the equation + if ((a < SIGNED_SHORT_LIMIT) || (b < SIGNED_SHORT_LIMIT)) { + // Viktor's first equation when both sources are "quiet" + // (i.e. less than middle of the dynamic range) + m = a * b / SIGNED_SHORT_LIMIT; + } else { + // Viktor's second equation when one or both sources are loud + m = 2 * (a + b) - (a * b) / SIGNED_SHORT_LIMIT - UNSIGNED_SHORT_MAX; + } + // Convert output back to signed short + if (m == UNSIGNED_SHORT_MAX + 1) m = UNSIGNED_SHORT_MAX; + outSBuff.put((short) (m - SIGNED_SHORT_LIMIT)); + } + } + }; + + AudioRemixer UPMIX = new AudioRemixer() { + @Override + public void remix(final ShortBuffer inSBuff, final ShortBuffer outSBuff) { + // Up-mix mono to stereo + final int inRemaining = inSBuff.remaining(); + final int outSpace = outSBuff.remaining() / 2; + + final int samplesToBeProcessed = Math.min(inRemaining, outSpace); + for (int i = 0; i < samplesToBeProcessed; ++i) { + final short inSample = inSBuff.get(); + outSBuff.put(inSample); + outSBuff.put(inSample); + } + } + }; + + AudioRemixer PASSTHROUGH = new AudioRemixer() { + @Override + public void remix(final ShortBuffer inSBuff, final ShortBuffer outSBuff) { + // Passthrough + outSBuff.put(inSBuff); + } + }; +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/AudioTrackTranscoder.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/AudioTrackTranscoder.java new file mode 100644 index 000000000..47e0a61d9 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/AudioTrackTranscoder.java @@ -0,0 +1,209 @@ +package net.ypresto.androidtranscoder.engine; + +import android.media.MediaCodec; +import android.media.MediaExtractor; +import android.media.MediaFormat; + +import net.ypresto.androidtranscoder.compat.MediaCodecBufferCompatWrapper; + +import java.io.IOException; + +public class AudioTrackTranscoder implements TrackTranscoder { + + private static final QueuedMuxer.SampleType SAMPLE_TYPE = QueuedMuxer.SampleType.AUDIO; + + private static final int DRAIN_STATE_NONE = 0; + private static final int DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY = 1; + private static final int DRAIN_STATE_CONSUMED = 2; + + private final MediaExtractor mExtractor; + private final QueuedMuxer mMuxer; + private long mWrittenPresentationTimeUs; + + private final int mTrackIndex; + private final MediaFormat mInputFormat; + private final MediaFormat mOutputFormat; + + private final MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo(); + private MediaCodec mDecoder; + private MediaCodec mEncoder; + private MediaFormat mActualOutputFormat; + + private MediaCodecBufferCompatWrapper mDecoderBuffers; + private MediaCodecBufferCompatWrapper mEncoderBuffers; + + private boolean mIsExtractorEOS; + private boolean mIsDecoderEOS; + private boolean mIsEncoderEOS; + private boolean mDecoderStarted; + private boolean mEncoderStarted; + + private AudioChannel mAudioChannel; + + public AudioTrackTranscoder(MediaExtractor extractor, int trackIndex, + MediaFormat outputFormat, QueuedMuxer muxer) { + mExtractor = extractor; + mTrackIndex = trackIndex; + mOutputFormat = outputFormat; + mMuxer = muxer; + + mInputFormat = mExtractor.getTrackFormat(mTrackIndex); + } + + @Override + public void setup() { + mExtractor.selectTrack(mTrackIndex); + try { + mEncoder = MediaCodec.createEncoderByType(mOutputFormat.getString(MediaFormat.KEY_MIME)); + } catch (IOException e) { + throw new IllegalStateException(e); + } + mEncoder.configure(mOutputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mEncoder.start(); + mEncoderStarted = true; + mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder); + + final MediaFormat inputFormat = mExtractor.getTrackFormat(mTrackIndex); + try { + mDecoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME)); + } catch (IOException e) { + throw new IllegalStateException(e); + } + mDecoder.configure(inputFormat, null, null, 0); + mDecoder.start(); + mDecoderStarted = true; + mDecoderBuffers = new MediaCodecBufferCompatWrapper(mDecoder); + + mAudioChannel = new AudioChannel(mDecoder, mEncoder, mOutputFormat); + } + + @Override + public MediaFormat getDeterminedFormat() { + return mInputFormat; + } + + @Override + public boolean stepPipeline() { + boolean busy = false; + + int status; + while (drainEncoder(0) != DRAIN_STATE_NONE) busy = true; + do { + status = drainDecoder(0); + if (status != DRAIN_STATE_NONE) busy = true; + // NOTE: not repeating to keep from deadlock when encoder is full. + } while (status == DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY); + + while (mAudioChannel.feedEncoder(0)) busy = true; + while (drainExtractor(0) != DRAIN_STATE_NONE) busy = true; + + return busy; + } + + private int drainExtractor(long timeoutUs) { + if (mIsExtractorEOS) return DRAIN_STATE_NONE; + int trackIndex = mExtractor.getSampleTrackIndex(); + if (trackIndex >= 0 && trackIndex != mTrackIndex) { + return DRAIN_STATE_NONE; + } + + final int result = mDecoder.dequeueInputBuffer(timeoutUs); + if (result < 0) return DRAIN_STATE_NONE; + if (trackIndex < 0) { + mIsExtractorEOS = true; + mDecoder.queueInputBuffer(result, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + return DRAIN_STATE_NONE; + } + + final int sampleSize = mExtractor.readSampleData(mDecoderBuffers.getInputBuffer(result), 0); + final boolean isKeyFrame = (mExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0; + mDecoder.queueInputBuffer(result, 0, sampleSize, mExtractor.getSampleTime(), isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0); + mExtractor.advance(); + return DRAIN_STATE_CONSUMED; + } + + private int drainDecoder(long timeoutUs) { + if (mIsDecoderEOS) return DRAIN_STATE_NONE; + + int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs); + switch (result) { + case MediaCodec.INFO_TRY_AGAIN_LATER: + return DRAIN_STATE_NONE; + case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: + mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat()); + case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: + return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY; + } + + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + mIsDecoderEOS = true; + mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0); + } else if (mBufferInfo.size > 0) { + mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs); + } + + return DRAIN_STATE_CONSUMED; + } + + private int drainEncoder(long timeoutUs) { + if (mIsEncoderEOS) return DRAIN_STATE_NONE; + + int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs); + switch (result) { + case MediaCodec.INFO_TRY_AGAIN_LATER: + return DRAIN_STATE_NONE; + case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: + if (mActualOutputFormat != null) { + throw new RuntimeException("Audio output format changed twice."); + } + mActualOutputFormat = mEncoder.getOutputFormat(); + mMuxer.setOutputFormat(SAMPLE_TYPE, mActualOutputFormat); + return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY; + case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: + mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder); + return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY; + } + + if (mActualOutputFormat == null) { + throw new RuntimeException("Could not determine actual output format."); + } + + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + mIsEncoderEOS = true; + mBufferInfo.set(0, 0, 0, mBufferInfo.flags); + } + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + // SPS or PPS, which should be passed by MediaFormat. + mEncoder.releaseOutputBuffer(result, false); + return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY; + } + mMuxer.writeSampleData(SAMPLE_TYPE, mEncoderBuffers.getOutputBuffer(result), mBufferInfo); + mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs; + mEncoder.releaseOutputBuffer(result, false); + return DRAIN_STATE_CONSUMED; + } + + @Override + public long getWrittenPresentationTimeUs() { + return mWrittenPresentationTimeUs; + } + + @Override + public boolean isFinished() { + return mIsEncoderEOS; + } + + @Override + public void release() { + if (mDecoder != null) { + if (mDecoderStarted) mDecoder.stop(); + mDecoder.release(); + mDecoder = null; + } + if (mEncoder != null) { + if (mEncoderStarted) mEncoder.stop(); + mEncoder.release(); + mEncoder = null; + } + } +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/InputSurface.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/InputSurface.java new file mode 100644 index 000000000..9793c8911 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/InputSurface.java @@ -0,0 +1,175 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/InputSurface.java +// blob: 157ed88d143229e4edb6889daf18fb73aa2fc5a5 +package net.ypresto.androidtranscoder.engine; +import android.opengl.EGL14; +import android.opengl.EGLConfig; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.opengl.EGLExt; +import android.opengl.EGLSurface; +import android.view.Surface; +/** + * Holds state associated with a Surface used for MediaCodec encoder input. + * <p> + * The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that + * to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent + * to the video encoder. + */ +class InputSurface { + private static final String TAG = "InputSurface"; + private static final int EGL_RECORDABLE_ANDROID = 0x3142; + private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; + private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; + private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; + private Surface mSurface; + /** + * Creates an InputSurface from a Surface. + */ + public InputSurface(Surface surface) { + if (surface == null) { + throw new NullPointerException(); + } + mSurface = surface; + eglSetup(); + } + /** + * Prepares EGL. We want a GLES 2.0 context and a surface that supports recording. + */ + private void eglSetup() { + mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + throw new RuntimeException("unable to get EGL14 display"); + } + int[] version = new int[2]; + if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { + mEGLDisplay = null; + throw new RuntimeException("unable to initialize EGL14"); + } + // Configure EGL for recordable and OpenGL ES 2.0. We want enough RGB bits + // to minimize artifacts from possible YUV conversion. + int[] attribList = { + EGL14.EGL_RED_SIZE, 8, + EGL14.EGL_GREEN_SIZE, 8, + EGL14.EGL_BLUE_SIZE, 8, + EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, + EGL_RECORDABLE_ANDROID, 1, + EGL14.EGL_NONE + }; + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, + numConfigs, 0)) { + throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config"); + } + // Configure context for OpenGL ES 2.0. + int[] attrib_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, + EGL14.EGL_NONE + }; + mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, + attrib_list, 0); + checkEglError("eglCreateContext"); + if (mEGLContext == null) { + throw new RuntimeException("null context"); + } + // Create a window surface, and attach it to the Surface we received. + int[] surfaceAttribs = { + EGL14.EGL_NONE + }; + mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface, + surfaceAttribs, 0); + checkEglError("eglCreateWindowSurface"); + if (mEGLSurface == null) { + throw new RuntimeException("surface was null"); + } + } + /** + * Discard all resources held by this class, notably the EGL context. Also releases the + * Surface that was passed to our constructor. + */ + public void release() { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface); + EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); + EGL14.eglReleaseThread(); + EGL14.eglTerminate(mEGLDisplay); + } + mSurface.release(); + mEGLDisplay = EGL14.EGL_NO_DISPLAY; + mEGLContext = EGL14.EGL_NO_CONTEXT; + mEGLSurface = EGL14.EGL_NO_SURFACE; + mSurface = null; + } + /** + * Makes our EGL context and surface current. + */ + public void makeCurrent() { + if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + public void makeUnCurrent() { + if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, + EGL14.EGL_NO_CONTEXT)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + /** + * Calls eglSwapBuffers. Use this to "publish" the current frame. + */ + public boolean swapBuffers() { + return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface); + } + /** + * Returns the Surface that the MediaCodec receives buffers from. + */ + public Surface getSurface() { + return mSurface; + } + /** + * Queries the surface's width. + */ + public int getWidth() { + int[] value = new int[1]; + EGL14.eglQuerySurface(mEGLDisplay, mEGLSurface, EGL14.EGL_WIDTH, value, 0); + return value[0]; + } + /** + * Queries the surface's height. + */ + public int getHeight() { + int[] value = new int[1]; + EGL14.eglQuerySurface(mEGLDisplay, mEGLSurface, EGL14.EGL_HEIGHT, value, 0); + return value[0]; + } + /** + * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds. + */ + public void setPresentationTime(long nsecs) { + EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs); + } + /** + * Checks for EGL errors. + */ + private void checkEglError(String msg) { + int error; + if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { + throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); + } + } +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/InvalidOutputFormatException.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/InvalidOutputFormatException.java new file mode 100644 index 000000000..4c6bbd9a4 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/InvalidOutputFormatException.java @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2015 Yuya Tanaka + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.ypresto.androidtranscoder.engine; + +public class InvalidOutputFormatException extends RuntimeException { + public InvalidOutputFormatException(String detailMessage) { + super(detailMessage); + } +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/MediaFormatValidator.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/MediaFormatValidator.java new file mode 100644 index 000000000..67ea5ba85 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/MediaFormatValidator.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2015 Yuya Tanaka + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.ypresto.androidtranscoder.engine; + +import android.media.MediaFormat; + +import net.ypresto.androidtranscoder.format.MediaFormatExtraConstants; +import net.ypresto.androidtranscoder.utils.AvcCsdUtils; +import net.ypresto.androidtranscoder.utils.AvcSpsUtils; + +import java.nio.ByteBuffer; + +class MediaFormatValidator { + // Refer: http://en.wikipedia.org/wiki/H.264/MPEG-4_AVC#Profiles + private static final byte PROFILE_IDC_BASELINE = 66; + + public static void validateVideoOutputFormat(MediaFormat format) { + String mime = format.getString(MediaFormat.KEY_MIME); + // Refer: http://developer.android.com/guide/appendix/media-formats.html#core + // Refer: http://en.wikipedia.org/wiki/MPEG-4_Part_14#Data_streams + if (!MediaFormatExtraConstants.MIMETYPE_VIDEO_AVC.equals(mime)) { + throw new InvalidOutputFormatException("Video codecs other than AVC is not supported, actual mime type: " + mime); + } + ByteBuffer spsBuffer = AvcCsdUtils.getSpsBuffer(format); + byte profileIdc = AvcSpsUtils.getProfileIdc(spsBuffer); + if (profileIdc != PROFILE_IDC_BASELINE) { + throw new InvalidOutputFormatException("Non-baseline AVC video profile is not supported by Android OS, actual profile_idc: " + profileIdc); + } + } + + public static void validateAudioOutputFormat(MediaFormat format) { + String mime = format.getString(MediaFormat.KEY_MIME); + if (!MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC.equals(mime)) { + throw new InvalidOutputFormatException("Audio codecs other than AAC is not supported, actual mime type: " + mime); + } + } +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java new file mode 100644 index 000000000..64a1edef4 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java @@ -0,0 +1,219 @@ +/* + * Copyright (C) 2014 Yuya Tanaka + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.ypresto.androidtranscoder.engine; + +import android.media.MediaExtractor; +import android.media.MediaFormat; +import android.media.MediaMetadataRetriever; +import android.media.MediaMuxer; +import android.util.Log; + +import net.ypresto.androidtranscoder.format.MediaFormatStrategy; +import net.ypresto.androidtranscoder.utils.MediaExtractorUtils; + +import java.io.FileDescriptor; +import java.io.IOException; + +/** + * Internal engine, do not use this directly. + */ +// TODO: treat encrypted data +public class MediaTranscoderEngine { + private static final String TAG = "MediaTranscoderEngine"; + private static final double PROGRESS_UNKNOWN = -1.0; + private static final long SLEEP_TO_WAIT_TRACK_TRANSCODERS = 10; + private static final long PROGRESS_INTERVAL_STEPS = 10; + private FileDescriptor mInputFileDescriptor; + private TrackTranscoder mVideoTrackTranscoder; + private TrackTranscoder mAudioTrackTranscoder; + private MediaExtractor mExtractor; + private MediaMuxer mMuxer; + private volatile double mProgress; + private ProgressCallback mProgressCallback; + private long mDurationUs; + + /** + * Do not use this constructor unless you know what you are doing. + */ + public MediaTranscoderEngine() { + } + + public void setDataSource(FileDescriptor fileDescriptor) { + mInputFileDescriptor = fileDescriptor; + } + + public ProgressCallback getProgressCallback() { + return mProgressCallback; + } + + public void setProgressCallback(ProgressCallback progressCallback) { + mProgressCallback = progressCallback; + } + + /** + * NOTE: This method is thread safe. + */ + public double getProgress() { + return mProgress; + } + + /** + * Run video transcoding. Blocks current thread. + * Audio data will not be transcoded; original stream will be wrote to output file. + * + * @param outputPath File path to output transcoded video file. + * @param formatStrategy Output format strategy. + * @throws IOException when input or output file could not be opened. + * @throws InvalidOutputFormatException when output format is not supported. + * @throws InterruptedException when cancel to transcode. + */ + public void transcodeVideo(String outputPath, MediaFormatStrategy formatStrategy) throws IOException, InterruptedException { + if (outputPath == null) { + throw new NullPointerException("Output path cannot be null."); + } + if (mInputFileDescriptor == null) { + throw new IllegalStateException("Data source is not set."); + } + try { + // NOTE: use single extractor to keep from running out audio track fast. + mExtractor = new MediaExtractor(); + mExtractor.setDataSource(mInputFileDescriptor); + mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); + setupMetadata(); + setupTrackTranscoders(formatStrategy); + runPipelines(); + mMuxer.stop(); + } finally { + try { + if (mVideoTrackTranscoder != null) { + mVideoTrackTranscoder.release(); + mVideoTrackTranscoder = null; + } + if (mAudioTrackTranscoder != null) { + mAudioTrackTranscoder.release(); + mAudioTrackTranscoder = null; + } + if (mExtractor != null) { + mExtractor.release(); + mExtractor = null; + } + } catch (RuntimeException e) { + // Too fatal to make alive the app, because it may leak native resources. + //noinspection ThrowFromFinallyBlock + throw new Error("Could not shutdown extractor, codecs and muxer pipeline.", e); + } + try { + if (mMuxer != null) { + mMuxer.release(); + mMuxer = null; + } + } catch (RuntimeException e) { + Log.e(TAG, "Failed to release muxer.", e); + } + } + } + + private void setupMetadata() throws IOException { + MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever(); + mediaMetadataRetriever.setDataSource(mInputFileDescriptor); + + String rotationString = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION); + try { + mMuxer.setOrientationHint(Integer.parseInt(rotationString)); + } catch (NumberFormatException e) { + // skip + } + + // TODO: parse ISO 6709 + // String locationString = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_LOCATION); + // mMuxer.setLocation(Integer.getInteger(rotationString, 0)); + + try { + mDurationUs = Long.parseLong(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)) * 1000; + } catch (NumberFormatException e) { + mDurationUs = -1; + } + Log.d(TAG, "Duration (us): " + mDurationUs); + } + + private void setupTrackTranscoders(MediaFormatStrategy formatStrategy) { + MediaExtractorUtils.TrackResult trackResult = MediaExtractorUtils.getFirstVideoAndAudioTrack(mExtractor); + MediaFormat videoOutputFormat = formatStrategy.createVideoOutputFormat(trackResult.mVideoTrackFormat); + MediaFormat audioOutputFormat = formatStrategy.createAudioOutputFormat(trackResult.mAudioTrackFormat); + if (videoOutputFormat == null && audioOutputFormat == null) { + throw new InvalidOutputFormatException("MediaFormatStrategy returned pass-through for both video and audio. No transcoding is necessary."); + } + QueuedMuxer queuedMuxer = new QueuedMuxer(mMuxer, new QueuedMuxer.Listener() { + @Override + public void onDetermineOutputFormat() { + MediaFormatValidator.validateVideoOutputFormat(mVideoTrackTranscoder.getDeterminedFormat()); + MediaFormatValidator.validateAudioOutputFormat(mAudioTrackTranscoder.getDeterminedFormat()); + } + }); + + if (videoOutputFormat == null) { + mVideoTrackTranscoder = new PassThroughTrackTranscoder(mExtractor, trackResult.mVideoTrackIndex, queuedMuxer, QueuedMuxer.SampleType.VIDEO); + } else { + mVideoTrackTranscoder = new VideoTrackTranscoder(mExtractor, trackResult.mVideoTrackIndex, videoOutputFormat, queuedMuxer); + } + mVideoTrackTranscoder.setup(); + if (audioOutputFormat == null) { + mAudioTrackTranscoder = new PassThroughTrackTranscoder(mExtractor, trackResult.mAudioTrackIndex, queuedMuxer, QueuedMuxer.SampleType.AUDIO); + } else { + mAudioTrackTranscoder = new AudioTrackTranscoder(mExtractor, trackResult.mAudioTrackIndex, audioOutputFormat, queuedMuxer); + } + mAudioTrackTranscoder.setup(); + mExtractor.selectTrack(trackResult.mVideoTrackIndex); + mExtractor.selectTrack(trackResult.mAudioTrackIndex); + } + + private void runPipelines() { + long loopCount = 0; + if (mDurationUs <= 0) { + double progress = PROGRESS_UNKNOWN; + mProgress = progress; + if (mProgressCallback != null) mProgressCallback.onProgress(progress); // unknown + } + while (!(mVideoTrackTranscoder.isFinished() && mAudioTrackTranscoder.isFinished())) { + boolean stepped = mVideoTrackTranscoder.stepPipeline() + || mAudioTrackTranscoder.stepPipeline(); + loopCount++; + if (mDurationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) { + double videoProgress = mVideoTrackTranscoder.isFinished() ? 1.0 : Math.min(1.0, (double) mVideoTrackTranscoder.getWrittenPresentationTimeUs() / mDurationUs); + double audioProgress = mAudioTrackTranscoder.isFinished() ? 1.0 : Math.min(1.0, (double) mAudioTrackTranscoder.getWrittenPresentationTimeUs() / mDurationUs); + double progress = (videoProgress + audioProgress) / 2.0; + mProgress = progress; + if (mProgressCallback != null) mProgressCallback.onProgress(progress); + } + if (!stepped) { + try { + Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS); + } catch (InterruptedException e) { + // nothing to do + } + } + } + } + + public interface ProgressCallback { + /** + * Called to notify progress. Same thread which initiated transcode is used. + * + * @param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown. + */ + void onProgress(double progress); + } +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/OutputSurface.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/OutputSurface.java new file mode 100644 index 000000000..e52ba0217 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/OutputSurface.java @@ -0,0 +1,276 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/OutputSurface.java +// blob: fc8ad9cd390c5c311f015d3b7c1359e4d295bc52 +// modified: change TIMEOUT_MS from 500 to 10000 +package net.ypresto.androidtranscoder.engine; +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.opengl.EGLConfig; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.opengl.EGLSurface; +import android.util.Log; +import android.view.Surface; +/** + * Holds state associated with a Surface used for MediaCodec decoder output. + * <p> + * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture, + * and then create a Surface for that SurfaceTexture. The Surface can be passed to + * MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the + * texture with updateTexImage, then render the texture with GL to a pbuffer. + * <p> + * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer. + * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives + * we just draw it on whatever surface is current. + * <p> + * By default, the Surface will be using a BufferQueue in asynchronous mode, so we + * can potentially drop frames. + */ +class OutputSurface implements SurfaceTexture.OnFrameAvailableListener { + private static final String TAG = "OutputSurface"; + private static final boolean VERBOSE = false; + private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; + private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; + private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; + private SurfaceTexture mSurfaceTexture; + private Surface mSurface; + private Object mFrameSyncObject = new Object(); // guards mFrameAvailable + private boolean mFrameAvailable; + private TextureRender mTextureRender; + /** + * Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new + * EGL context and surface will be made current. Creates a Surface that can be passed + * to MediaCodec.configure(). + */ + public OutputSurface(int width, int height) { + if (width <= 0 || height <= 0) { + throw new IllegalArgumentException(); + } + eglSetup(width, height); + makeCurrent(); + setup(); + } + /** + * Creates an OutputSurface using the current EGL context (rather than establishing a + * new one). Creates a Surface that can be passed to MediaCodec.configure(). + */ + public OutputSurface() { + setup(); + } + /** + * Creates instances of TextureRender and SurfaceTexture, and a Surface associated + * with the SurfaceTexture. + */ + private void setup() { + mTextureRender = new TextureRender(); + mTextureRender.surfaceCreated(); + // Even if we don't access the SurfaceTexture after the constructor returns, we + // still need to keep a reference to it. The Surface doesn't retain a reference + // at the Java level, so if we don't either then the object can get GCed, which + // causes the native finalizer to run. + if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId()); + mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId()); + // This doesn't work if OutputSurface is created on the thread that CTS started for + // these test cases. + // + // The CTS-created thread has a Looper, and the SurfaceTexture constructor will + // create a Handler that uses it. The "frame available" message is delivered + // there, but since we're not a Looper-based thread we'll never see it. For + // this to do anything useful, OutputSurface must be created on a thread without + // a Looper, so that SurfaceTexture uses the main application Looper instead. + // + // Java language note: passing "this" out of a constructor is generally unwise, + // but we should be able to get away with it here. + mSurfaceTexture.setOnFrameAvailableListener(this); + mSurface = new Surface(mSurfaceTexture); + } + /** + * Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer. + */ + private void eglSetup(int width, int height) { + mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); + if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { + throw new RuntimeException("unable to get EGL14 display"); + } + int[] version = new int[2]; + if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { + mEGLDisplay = null; + throw new RuntimeException("unable to initialize EGL14"); + } + // Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits + // to be able to tell if the frame is reasonable. + int[] attribList = { + EGL14.EGL_RED_SIZE, 8, + EGL14.EGL_GREEN_SIZE, 8, + EGL14.EGL_BLUE_SIZE, 8, + EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, + EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT, + EGL14.EGL_NONE + }; + EGLConfig[] configs = new EGLConfig[1]; + int[] numConfigs = new int[1]; + if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, + numConfigs, 0)) { + throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config"); + } + // Configure context for OpenGL ES 2.0. + int[] attrib_list = { + EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, + EGL14.EGL_NONE + }; + mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, + attrib_list, 0); + checkEglError("eglCreateContext"); + if (mEGLContext == null) { + throw new RuntimeException("null context"); + } + // Create a pbuffer surface. By using this for output, we can use glReadPixels + // to test values in the output. + int[] surfaceAttribs = { + EGL14.EGL_WIDTH, width, + EGL14.EGL_HEIGHT, height, + EGL14.EGL_NONE + }; + mEGLSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs, 0); + checkEglError("eglCreatePbufferSurface"); + if (mEGLSurface == null) { + throw new RuntimeException("surface was null"); + } + } + /** + * Discard all resources held by this class, notably the EGL context. + */ + public void release() { + if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { + EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface); + EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); + EGL14.eglReleaseThread(); + EGL14.eglTerminate(mEGLDisplay); + } + mSurface.release(); + // this causes a bunch of warnings that appear harmless but might confuse someone: + // W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned! + //mSurfaceTexture.release(); + mEGLDisplay = EGL14.EGL_NO_DISPLAY; + mEGLContext = EGL14.EGL_NO_CONTEXT; + mEGLSurface = EGL14.EGL_NO_SURFACE; + mTextureRender = null; + mSurface = null; + mSurfaceTexture = null; + } + /** + * Makes our EGL context and surface current. + */ + public void makeCurrent() { + if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) { + throw new RuntimeException("eglMakeCurrent failed"); + } + } + /** + * Returns the Surface that we draw onto. + */ + public Surface getSurface() { + return mSurface; + } + /** + * Replaces the fragment shader. + */ + public void changeFragmentShader(String fragmentShader) { + mTextureRender.changeFragmentShader(fragmentShader); + } + /** + * Latches the next buffer into the texture. Must be called from the thread that created + * the OutputSurface object, after the onFrameAvailable callback has signaled that new + * data is available. + */ + public void awaitNewImage() { + final int TIMEOUT_MS = 10000; + synchronized (mFrameSyncObject) { + while (!mFrameAvailable) { + try { + // Wait for onFrameAvailable() to signal us. Use a timeout to avoid + // stalling the test if it doesn't arrive. + mFrameSyncObject.wait(TIMEOUT_MS); + if (!mFrameAvailable) { + // TODO: if "spurious wakeup", continue while loop + throw new RuntimeException("Surface frame wait timed out"); + } + } catch (InterruptedException ie) { + // shouldn't happen + throw new RuntimeException(ie); + } + } + mFrameAvailable = false; + } + // Latch the data. + mTextureRender.checkGlError("before updateTexImage"); + mSurfaceTexture.updateTexImage(); + } + /** + * Wait up to given timeout until new image become available. + * @param timeoutMs + * @return true if new image is available. false for no new image until timeout. + */ + public boolean checkForNewImage(int timeoutMs) { + synchronized (mFrameSyncObject) { + while (!mFrameAvailable) { + try { + // Wait for onFrameAvailable() to signal us. Use a timeout to avoid + // stalling the test if it doesn't arrive. + mFrameSyncObject.wait(timeoutMs); + if (!mFrameAvailable) { + return false; + } + } catch (InterruptedException ie) { + // shouldn't happen + throw new RuntimeException(ie); + } + } + mFrameAvailable = false; + } + // Latch the data. + mTextureRender.checkGlError("before updateTexImage"); + mSurfaceTexture.updateTexImage(); + return true; + } + /** + * Draws the data from SurfaceTexture onto the current EGL surface. + */ + public void drawImage() { + mTextureRender.drawFrame(mSurfaceTexture); + } + @Override + public void onFrameAvailable(SurfaceTexture st) { + if (VERBOSE) Log.d(TAG, "new frame available"); + synchronized (mFrameSyncObject) { + if (mFrameAvailable) { + throw new RuntimeException("mFrameAvailable already set, frame could be dropped"); + } + mFrameAvailable = true; + mFrameSyncObject.notifyAll(); + } + } + /** + * Checks for EGL errors. + */ + private void checkEglError(String msg) { + int error; + if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { + throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); + } + } +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java new file mode 100644 index 000000000..7608dac86 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java @@ -0,0 +1,100 @@ +/* + * Copyright (C) 2014 Yuya Tanaka + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.ypresto.androidtranscoder.engine; + +import android.annotation.SuppressLint; +import android.media.MediaCodec; +import android.media.MediaExtractor; +import android.media.MediaFormat; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +public class PassThroughTrackTranscoder implements TrackTranscoder { + private final MediaExtractor mExtractor; + private final int mTrackIndex; + private final QueuedMuxer mMuxer; + private final QueuedMuxer.SampleType mSampleType; + private final MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo(); + private int mBufferSize; + private ByteBuffer mBuffer; + private boolean mIsEOS; + private MediaFormat mActualOutputFormat; + private long mWrittenPresentationTimeUs; + + public PassThroughTrackTranscoder(MediaExtractor extractor, int trackIndex, + QueuedMuxer muxer, QueuedMuxer.SampleType sampleType) { + mExtractor = extractor; + mTrackIndex = trackIndex; + mMuxer = muxer; + mSampleType = sampleType; + + mActualOutputFormat = mExtractor.getTrackFormat(mTrackIndex); + mMuxer.setOutputFormat(mSampleType, mActualOutputFormat); + mBufferSize = mActualOutputFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); + mBuffer = ByteBuffer.allocateDirect(mBufferSize).order(ByteOrder.nativeOrder()); + } + + @Override + public void setup() { + } + + @Override + public MediaFormat getDeterminedFormat() { + return mActualOutputFormat; + } + + @SuppressLint("Assert") + @Override + public boolean stepPipeline() { + if (mIsEOS) return false; + int trackIndex = mExtractor.getSampleTrackIndex(); + if (trackIndex < 0) { + mBuffer.clear(); + mBufferInfo.set(0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + mMuxer.writeSampleData(mSampleType, mBuffer, mBufferInfo); + mIsEOS = true; + return true; + } + if (trackIndex != mTrackIndex) return false; + + mBuffer.clear(); + int sampleSize = mExtractor.readSampleData(mBuffer, 0); + assert sampleSize <= mBufferSize; + boolean isKeyFrame = (mExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0; + int flags = isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0; + mBufferInfo.set(0, sampleSize, mExtractor.getSampleTime(), flags); + mMuxer.writeSampleData(mSampleType, mBuffer, mBufferInfo); + mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs; + + mExtractor.advance(); + return true; + } + + @Override + public long getWrittenPresentationTimeUs() { + return mWrittenPresentationTimeUs; + } + + @Override + public boolean isFinished() { + return mIsEOS; + } + + @Override + public void release() { + } +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java new file mode 100644 index 000000000..df58e9923 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java @@ -0,0 +1,140 @@ +/* + * Copyright (C) 2015 Yuya Tanaka + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.ypresto.androidtranscoder.engine; + +import android.media.MediaCodec; +import android.media.MediaFormat; +import android.media.MediaMuxer; +import android.util.Log; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.ArrayList; +import java.util.List; + +/** + * This class queues until all output track formats are determined. + */ +public class QueuedMuxer { + private static final String TAG = "QueuedMuxer"; + private static final int BUFFER_SIZE = 64 * 1024; // I have no idea whether this value is appropriate or not... + private final MediaMuxer mMuxer; + private final Listener mListener; + private MediaFormat mVideoFormat; + private MediaFormat mAudioFormat; + private int mVideoTrackIndex; + private int mAudioTrackIndex; + private ByteBuffer mByteBuffer; + private final List<SampleInfo> mSampleInfoList; + private boolean mStarted; + + public QueuedMuxer(MediaMuxer muxer, Listener listener) { + mMuxer = muxer; + mListener = listener; + mSampleInfoList = new ArrayList<>(); + } + + public void setOutputFormat(SampleType sampleType, MediaFormat format) { + switch (sampleType) { + case VIDEO: + mVideoFormat = format; + break; + case AUDIO: + mAudioFormat = format; + break; + default: + throw new AssertionError(); + } + onSetOutputFormat(); + } + + private void onSetOutputFormat() { + if (mVideoFormat == null || mAudioFormat == null) return; + mListener.onDetermineOutputFormat(); + + mVideoTrackIndex = mMuxer.addTrack(mVideoFormat); + Log.v(TAG, "Added track #" + mVideoTrackIndex + " with " + mVideoFormat.getString(MediaFormat.KEY_MIME) + " to muxer"); + mAudioTrackIndex = mMuxer.addTrack(mAudioFormat); + Log.v(TAG, "Added track #" + mAudioTrackIndex + " with " + mAudioFormat.getString(MediaFormat.KEY_MIME) + " to muxer"); + mMuxer.start(); + mStarted = true; + + if (mByteBuffer == null) { + mByteBuffer = ByteBuffer.allocate(0); + } + mByteBuffer.flip(); + Log.v(TAG, "Output format determined, writing " + mSampleInfoList.size() + + " samples / " + mByteBuffer.limit() + " bytes to muxer."); + MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); + int offset = 0; + for (SampleInfo sampleInfo : mSampleInfoList) { + sampleInfo.writeToBufferInfo(bufferInfo, offset); + mMuxer.writeSampleData(getTrackIndexForSampleType(sampleInfo.mSampleType), mByteBuffer, bufferInfo); + offset += sampleInfo.mSize; + } + mSampleInfoList.clear(); + mByteBuffer = null; + } + + public void writeSampleData(SampleType sampleType, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) { + if (mStarted) { + mMuxer.writeSampleData(getTrackIndexForSampleType(sampleType), byteBuf, bufferInfo); + return; + } + byteBuf.limit(bufferInfo.offset + bufferInfo.size); + byteBuf.position(bufferInfo.offset); + if (mByteBuffer == null) { + mByteBuffer = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder()); + } + mByteBuffer.put(byteBuf); + mSampleInfoList.add(new SampleInfo(sampleType, bufferInfo.size, bufferInfo)); + } + + private int getTrackIndexForSampleType(SampleType sampleType) { + switch (sampleType) { + case VIDEO: + return mVideoTrackIndex; + case AUDIO: + return mAudioTrackIndex; + default: + throw new AssertionError(); + } + } + + public enum SampleType {VIDEO, AUDIO} + + private static class SampleInfo { + private final SampleType mSampleType; + private final int mSize; + private final long mPresentationTimeUs; + private final int mFlags; + + private SampleInfo(SampleType sampleType, int size, MediaCodec.BufferInfo bufferInfo) { + mSampleType = sampleType; + mSize = size; + mPresentationTimeUs = bufferInfo.presentationTimeUs; + mFlags = bufferInfo.flags; + } + + private void writeToBufferInfo(MediaCodec.BufferInfo bufferInfo, int offset) { + bufferInfo.set(offset, mSize, mPresentationTimeUs, mFlags); + } + } + + public interface Listener { + void onDetermineOutputFormat(); + } +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/TextureRender.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/TextureRender.java new file mode 100644 index 000000000..571427d30 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/TextureRender.java @@ -0,0 +1,219 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/TextureRender.java +// blob: 4125dcfcfed6ed7fddba5b71d657dec0d433da6a +// modified: removed unused method bodies +// modified: use GL_LINEAR for GL_TEXTURE_MIN_FILTER to improve quality. +package net.ypresto.androidtranscoder.engine; +import android.graphics.SurfaceTexture; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.opengl.Matrix; +import android.util.Log; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; +/** + * Code for rendering a texture onto a surface using OpenGL ES 2.0. + */ +class TextureRender { + private static final String TAG = "TextureRender"; + private static final int FLOAT_SIZE_BYTES = 4; + private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; + private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; + private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; + private final float[] mTriangleVerticesData = { + // X, Y, Z, U, V + -1.0f, -1.0f, 0, 0.f, 0.f, + 1.0f, -1.0f, 0, 1.f, 0.f, + -1.0f, 1.0f, 0, 0.f, 1.f, + 1.0f, 1.0f, 0, 1.f, 1.f, + }; + private FloatBuffer mTriangleVertices; + private static final String VERTEX_SHADER = + "uniform mat4 uMVPMatrix;\n" + + "uniform mat4 uSTMatrix;\n" + + "attribute vec4 aPosition;\n" + + "attribute vec4 aTextureCoord;\n" + + "varying vec2 vTextureCoord;\n" + + "void main() {\n" + + " gl_Position = uMVPMatrix * aPosition;\n" + + " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + + "}\n"; + private static final String FRAGMENT_SHADER = + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + // highp here doesn't seem to matter + "varying vec2 vTextureCoord;\n" + + "uniform samplerExternalOES sTexture;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + + "}\n"; + private float[] mMVPMatrix = new float[16]; + private float[] mSTMatrix = new float[16]; + private int mProgram; + private int mTextureID = -12345; + private int muMVPMatrixHandle; + private int muSTMatrixHandle; + private int maPositionHandle; + private int maTextureHandle; + public TextureRender() { + mTriangleVertices = ByteBuffer.allocateDirect( + mTriangleVerticesData.length * FLOAT_SIZE_BYTES) + .order(ByteOrder.nativeOrder()).asFloatBuffer(); + mTriangleVertices.put(mTriangleVerticesData).position(0); + Matrix.setIdentityM(mSTMatrix, 0); + } + public int getTextureId() { + return mTextureID; + } + public void drawFrame(SurfaceTexture st) { + checkGlError("onDrawFrame start"); + st.getTransformMatrix(mSTMatrix); + GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); + GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); + GLES20.glUseProgram(mProgram); + checkGlError("glUseProgram"); + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); + mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); + GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, + TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); + checkGlError("glVertexAttribPointer maPosition"); + GLES20.glEnableVertexAttribArray(maPositionHandle); + checkGlError("glEnableVertexAttribArray maPositionHandle"); + mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); + GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, + TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); + checkGlError("glVertexAttribPointer maTextureHandle"); + GLES20.glEnableVertexAttribArray(maTextureHandle); + checkGlError("glEnableVertexAttribArray maTextureHandle"); + Matrix.setIdentityM(mMVPMatrix, 0); + GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); + GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0); + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + checkGlError("glDrawArrays"); + GLES20.glFinish(); + } + /** + * Initializes GL state. Call this after the EGL surface has been created and made current. + */ + public void surfaceCreated() { + mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); + if (mProgram == 0) { + throw new RuntimeException("failed creating program"); + } + maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); + checkGlError("glGetAttribLocation aPosition"); + if (maPositionHandle == -1) { + throw new RuntimeException("Could not get attrib location for aPosition"); + } + maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); + checkGlError("glGetAttribLocation aTextureCoord"); + if (maTextureHandle == -1) { + throw new RuntimeException("Could not get attrib location for aTextureCoord"); + } + muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); + checkGlError("glGetUniformLocation uMVPMatrix"); + if (muMVPMatrixHandle == -1) { + throw new RuntimeException("Could not get attrib location for uMVPMatrix"); + } + muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix"); + checkGlError("glGetUniformLocation uSTMatrix"); + if (muSTMatrixHandle == -1) { + throw new RuntimeException("Could not get attrib location for uSTMatrix"); + } + int[] textures = new int[1]; + GLES20.glGenTextures(1, textures, 0); + mTextureID = textures[0]; + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); + checkGlError("glBindTexture mTextureID"); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, + GLES20.GL_LINEAR); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, + GLES20.GL_LINEAR); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, + GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, + GLES20.GL_CLAMP_TO_EDGE); + checkGlError("glTexParameter"); + } + /** + * Replaces the fragment shader. + */ + public void changeFragmentShader(String fragmentShader) { + throw new UnsupportedOperationException("Not implemented"); + } + private int loadShader(int shaderType, String source) { + int shader = GLES20.glCreateShader(shaderType); + checkGlError("glCreateShader type=" + shaderType); + GLES20.glShaderSource(shader, source); + GLES20.glCompileShader(shader); + int[] compiled = new int[1]; + GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); + if (compiled[0] == 0) { + Log.e(TAG, "Could not compile shader " + shaderType + ":"); + Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); + GLES20.glDeleteShader(shader); + shader = 0; + } + return shader; + } + private int createProgram(String vertexSource, String fragmentSource) { + int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); + if (vertexShader == 0) { + return 0; + } + int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); + if (pixelShader == 0) { + return 0; + } + int program = GLES20.glCreateProgram(); + checkGlError("glCreateProgram"); + if (program == 0) { + Log.e(TAG, "Could not create program"); + } + GLES20.glAttachShader(program, vertexShader); + checkGlError("glAttachShader"); + GLES20.glAttachShader(program, pixelShader); + checkGlError("glAttachShader"); + GLES20.glLinkProgram(program); + int[] linkStatus = new int[1]; + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); + if (linkStatus[0] != GLES20.GL_TRUE) { + Log.e(TAG, "Could not link program: "); + Log.e(TAG, GLES20.glGetProgramInfoLog(program)); + GLES20.glDeleteProgram(program); + program = 0; + } + return program; + } + public void checkGlError(String op) { + int error; + while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { + Log.e(TAG, op + ": glError " + error); + throw new RuntimeException(op + ": glError " + error); + } + } + /** + * Saves the current frame to disk as a PNG image. Frame starts from (0,0). + * <p> + * Useful for debugging. + */ + public static void saveFrame(String filename, int width, int height) { + throw new UnsupportedOperationException("Not implemented."); + } +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/TrackTranscoder.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/TrackTranscoder.java new file mode 100644 index 000000000..04bff2a40 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/TrackTranscoder.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2014 Yuya Tanaka + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.ypresto.androidtranscoder.engine; + +import android.media.MediaFormat; + +public interface TrackTranscoder { + + void setup(); + + /** + * Get actual MediaFormat which is used to write to muxer. + * To determine you should call {@link #stepPipeline()} several times. + * + * @return Actual output format determined by coder, or {@code null} if not yet determined. + */ + MediaFormat getDeterminedFormat(); + + /** + * Step pipeline if output is available in any step of it. + * It assumes muxer has been started, so you should call muxer.start() first. + * + * @return true if data moved in pipeline. + */ + boolean stepPipeline(); + + /** + * Get presentation time of last sample written to muxer. + * + * @return Presentation time in micro-second. Return value is undefined if finished writing. + */ + long getWrittenPresentationTimeUs(); + + boolean isFinished(); + + void release(); +} diff --git a/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/VideoTrackTranscoder.java b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/VideoTrackTranscoder.java new file mode 100644 index 000000000..a5640dd99 --- /dev/null +++ b/libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/engine/VideoTrackTranscoder.java @@ -0,0 +1,231 @@ +/* + * Copyright (C) 2014 Yuya Tanaka + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package net.ypresto.androidtranscoder.engine; + +import android.media.MediaCodec; +import android.media.MediaExtractor; +import android.media.MediaFormat; + +import net.ypresto.androidtranscoder.format.MediaFormatExtraConstants; + +import java.io.IOException; +import java.nio.ByteBuffer; + +// Refer: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/ExtractDecodeEditEncodeMuxTest.java +public class VideoTrackTranscoder implements TrackTranscoder { + private static final String TAG = "VideoTrackTranscoder"; + private static final int DRAIN_STATE_NONE = 0; + private static final int DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY = 1; + private static final int DRAIN_STATE_CONSUMED = 2; + + private final MediaExtractor mExtractor; + private final int mTrackIndex; + private final MediaFormat mOutputFormat; + private final QueuedMuxer mMuxer; + private final MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo(); + private MediaCodec mDecoder; + private MediaCodec mEncoder; + private ByteBuffer[] mDecoderInputBuffers; + private ByteBuffer[] mEncoderOutputBuffers; + private MediaFormat mActualOutputFormat; + private OutputSurface mDecoderOutputSurfaceWrapper; + private InputSurface mEncoderInputSurfaceWrapper; + private boolean mIsExtractorEOS; + private boolean mIsDecoderEOS; + private boolean mIsEncoderEOS; + private boolean mDecoderStarted; + private boolean mEncoderStarted; + private long mWrittenPresentationTimeUs; + + public VideoTrackTranscoder(MediaExtractor extractor, int trackIndex, + MediaFormat outputFormat, QueuedMuxer muxer) { + mExtractor = extractor; + mTrackIndex = trackIndex; + mOutputFormat = outputFormat; + mMuxer = muxer; + } + + @Override + public void setup() { + mExtractor.selectTrack(mTrackIndex); + try { + mEncoder = MediaCodec.createEncoderByType(mOutputFormat.getString(MediaFormat.KEY_MIME)); + } catch (IOException e) { + throw new IllegalStateException(e); + } + mEncoder.configure(mOutputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + mEncoderInputSurfaceWrapper = new InputSurface(mEncoder.createInputSurface()); + mEncoderInputSurfaceWrapper.makeCurrent(); + mEncoder.start(); + mEncoderStarted = true; + mEncoderOutputBuffers = mEncoder.getOutputBuffers(); + + MediaFormat inputFormat = mExtractor.getTrackFormat(mTrackIndex); + if (inputFormat.containsKey(MediaFormatExtraConstants.KEY_ROTATION_DEGREES)) { + // Decoded video is rotated automatically in Android 5.0 lollipop. + // Turn off here because we don't want to encode rotated one. + // refer: https://android.googlesource.com/platform/frameworks/av/+blame/lollipop-release/media/libstagefright/Utils.cpp + inputFormat.setInteger(MediaFormatExtraConstants.KEY_ROTATION_DEGREES, 0); + } + mDecoderOutputSurfaceWrapper = new OutputSurface(); + try { + mDecoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME)); + } catch (IOException e) { + throw new IllegalStateException(e); + } + mDecoder.configure(inputFormat, mDecoderOutputSurfaceWrapper.getSurface(), null, 0); + mDecoder.start(); + mDecoderStarted = true; + mDecoderInputBuffers = mDecoder.getInputBuffers(); + } + + @Override + public MediaFormat getDeterminedFormat() { + return mActualOutputFormat; + } + + @Override + public boolean stepPipeline() { + boolean busy = false; + + int status; + while (drainEncoder(0) != DRAIN_STATE_NONE) busy = true; + do { + status = drainDecoder(0); + if (status != DRAIN_STATE_NONE) busy = true; + // NOTE: not repeating to keep from deadlock when encoder is full. + } while (status == DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY); + while (drainExtractor(0) != DRAIN_STATE_NONE) busy = true; + + return busy; + } + + @Override + public long getWrittenPresentationTimeUs() { + return mWrittenPresentationTimeUs; + } + + @Override + public boolean isFinished() { + return mIsEncoderEOS; + } + + // TODO: CloseGuard + @Override + public void release() { + if (mDecoderOutputSurfaceWrapper != null) { + mDecoderOutputSurfaceWrapper.release(); + mDecoderOutputSurfaceWrapper = null; + } + if (mEncoderInputSurfaceWrapper != null) { + mEncoderInputSurfaceWrapper.release(); + mEncoderInputSurfaceWrapper = null; + } + if (mDecoder != null) { + if (mDecoderStarted) mDecoder.stop(); + mDecoder.release(); + mDecoder = null; + } + if (mEncoder != null) { + if (mEncoderStarted) mEncoder.stop(); + mEncoder.release(); + mEncoder = null; + } + } + + private int drainExtractor(long timeoutUs) { + if (mIsExtractorEOS) return DRAIN_STATE_NONE; + int trackIndex = mExtractor.getSampleTrackIndex(); + if (trackIndex >= 0 && trackIndex != mTrackIndex) { + return DRAIN_STATE_NONE; + } + int result = mDecoder.dequeueInputBuffer(timeoutUs); + if (result < 0) return DRAIN_STATE_NONE; + if (trackIndex < 0) { + mIsExtractorEOS = true; + mDecoder.queueInputBuffer(result, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); + return DRAIN_STATE_NONE; + } + int sampleSize = mExtractor.readSampleData(mDecoderInputBuffers[result], 0); + boolean isKeyFrame = (mExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0; + mDecoder.queueInputBuffer(result, 0, sampleSize, mExtractor.getSampleTime(), isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0); + mExtractor.advance(); + return DRAIN_STATE_CONSUMED; + } + + private int drainDecoder(long timeoutUs) { + if (mIsDecoderEOS) return DRAIN_STATE_NONE; + int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs); + switch (result) { + case MediaCodec.INFO_TRY_AGAIN_LATER: + return DRAIN_STATE_NONE; + case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: + case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: + return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY; + } + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + mEncoder.signalEndOfInputStream(); + mIsDecoderEOS = true; + mBufferInfo.size = 0; + } + boolean doRender = (mBufferInfo.size > 0); + // NOTE: doRender will block if buffer (of encoder) is full. + // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt + mDecoder.releaseOutputBuffer(result, doRender); + if (doRender) { + mDecoderOutputSurfaceWrapper.awaitNewImage(); + mDecoderOutputSurfaceWrapper.drawImage(); + mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000); + mEncoderInputSurfaceWrapper.swapBuffers(); + } + return DRAIN_STATE_CONSUMED; + } + + private int drainEncoder(long timeoutUs) { + if (mIsEncoderEOS) return DRAIN_STATE_NONE; + int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs); + switch (result) { + case MediaCodec.INFO_TRY_AGAIN_LATER: + return DRAIN_STATE_NONE; + case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: + if (mActualOutputFormat != null) + throw new RuntimeException("Video output format changed twice."); + mActualOutputFormat = mEncoder.getOutputFormat(); + mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat); + return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY; + case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: + mEncoderOutputBuffers = mEncoder.getOutputBuffers(); + return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY; + } + if (mActualOutputFormat == null) { + throw new RuntimeException("Could not determine actual output format."); + } + + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + mIsEncoderEOS = true; + mBufferInfo.set(0, 0, 0, mBufferInfo.flags); + } + if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + // SPS or PPS, which should be passed by MediaFormat. + mEncoder.releaseOutputBuffer(result, false); + return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY; + } + mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo); + mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs; + mEncoder.releaseOutputBuffer(result, false); + return DRAIN_STATE_CONSUMED; + } +} |