forked from mirror/monocles_chat_clean
swap out transcoder library (Daniel Gultsch)
This commit is contained in:
parent
9b3fb1b7b9
commit
18b67fbd02
37 changed files with 1 additions and 3303 deletions
|
@ -38,7 +38,6 @@ configurations {
|
|||
dependencies {
|
||||
implementation 'org.jitsi:org.otr4j:0.23'
|
||||
implementation 'com.github.webrtc-sdk:android:93.4577.01'
|
||||
implementation project(':libs:android-transcoder')
|
||||
playstoreImplementation('com.google.firebase:firebase-messaging:22.0.0') {
|
||||
exclude group: 'com.google.firebase', module: 'firebase-core'
|
||||
exclude group: 'com.google.firebase', module: 'firebase-analytics'
|
||||
|
@ -91,7 +90,7 @@ dependencies {
|
|||
implementation 'com.github.AppIntro:AppIntro:6.1.0'
|
||||
implementation 'androidx.browser:browser:1.3.0'
|
||||
implementation 'com.otaliastudios:transcoder:0.9.1' // 0.10.4 seems to be buggy
|
||||
//implementation fileTree(include: ['libwebrtc-m92.aar'], dir: 'libs')
|
||||
implementation fileTree(include: ['libwebrtc-m92.aar'], dir: 'libs')
|
||||
}
|
||||
|
||||
ext {
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>libs</name>
|
||||
<comment>Project libs created by Buildship.</comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
|
||||
</natures>
|
||||
<filteredResources>
|
||||
<filter>
|
||||
<id>1632834367788</id>
|
||||
<name></name>
|
||||
<type>30</type>
|
||||
<matcher>
|
||||
<id>org.eclipse.core.resources.regexFilterMatcher</id>
|
||||
<arguments>node_modules|.git|__CREATED_BY_JAVA_LANGUAGE_SERVER__</arguments>
|
||||
</matcher>
|
||||
</filter>
|
||||
</filteredResources>
|
||||
</projectDescription>
|
1
libs/android-transcoder/.gitignore
vendored
1
libs/android-transcoder/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/build
|
|
@ -1,34 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>android-transcoder</name>
|
||||
<comment>Project android-transcoder created by Buildship.</comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
|
||||
</natures>
|
||||
<filteredResources>
|
||||
<filter>
|
||||
<id>1632834367782</id>
|
||||
<name></name>
|
||||
<type>30</type>
|
||||
<matcher>
|
||||
<id>org.eclipse.core.resources.regexFilterMatcher</id>
|
||||
<arguments>node_modules|.git|__CREATED_BY_JAVA_LANGUAGE_SERVER__</arguments>
|
||||
</matcher>
|
||||
</filter>
|
||||
</filteredResources>
|
||||
</projectDescription>
|
|
@ -1,17 +0,0 @@
|
|||
## 0.3.0
|
||||
- Fix cancel() sometimes not working. (Thanks @strayerM and @PinkFloyded)
|
||||
- Geolocation support on API>=19. (Thanks @hkurokawa)
|
||||
|
||||
## 0.2.0
|
||||
- Experimental audio transcoding support. (Thanks @aaron112)
|
||||
- Fix transcode does not run on Huawei Ascend P7. (Thanks @spiritedRunning)
|
||||
- Fix race condition caused by not closing output before callback. (Thanks @ryanwilliams83)
|
||||
|
||||
## 0.1.10
|
||||
- `Future` support. (Thanks @MaiKambayashi)
|
||||
|
||||
## 0.1.X
|
||||
- Stability updates. (Thanks @ozyozyo)
|
||||
|
||||
## 0.1.0
|
||||
- First release.
|
|
@ -1,32 +0,0 @@
|
|||
buildscript {
|
||||
repositories {
|
||||
jcenter()
|
||||
//mavenCentral()
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: 'com.android.library'
|
||||
|
||||
dependencies {
|
||||
implementation 'androidx.legacy:legacy-support-v13:1.0.0'
|
||||
}
|
||||
|
||||
android {
|
||||
|
||||
compileSdkVersion 29
|
||||
buildToolsVersion '30.0.3'
|
||||
|
||||
defaultConfig {
|
||||
minSdkVersion 18
|
||||
targetSdkVersion 29
|
||||
}
|
||||
|
||||
buildTypes {
|
||||
release {
|
||||
zipAlignEnabled true
|
||||
minifyEnabled true
|
||||
shrinkResources false
|
||||
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
|
||||
}
|
||||
}
|
||||
}
|
17
libs/android-transcoder/proguard-rules.pro
vendored
17
libs/android-transcoder/proguard-rules.pro
vendored
|
@ -1,17 +0,0 @@
|
|||
# Add project specific ProGuard rules here.
|
||||
# By default, the flags in this file are appended to flags specified
|
||||
# in /Users/yuya.tanaka/devel/android-sdk/tools/proguard/proguard-android.txt
|
||||
# You can edit the include path and order by changing the proguardFiles
|
||||
# directive in build.gradle.
|
||||
#
|
||||
# For more details, see
|
||||
# http://developer.android.com/guide/developing/tools/proguard.html
|
||||
|
||||
# Add any project specific keep options here:
|
||||
|
||||
# If your project uses WebView with JS, uncomment the following
|
||||
# and specify the fully qualified class name to the JavaScript interface
|
||||
# class:
|
||||
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
|
||||
# public *;
|
||||
#}
|
|
@ -1,5 +0,0 @@
|
|||
<manifest package="net.ypresto.androidtranscoder">
|
||||
|
||||
<application />
|
||||
|
||||
</manifest>
|
|
@ -1,248 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder;
|
||||
|
||||
import android.media.MediaFormat;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.util.Log;
|
||||
|
||||
import net.ypresto.androidtranscoder.engine.MediaTranscoderEngine;
|
||||
import net.ypresto.androidtranscoder.format.MediaFormatPresets;
|
||||
import net.ypresto.androidtranscoder.format.MediaFormatStrategy;
|
||||
|
||||
import java.io.FileDescriptor;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.ThreadFactory;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
public class MediaTranscoder {
|
||||
private static final String TAG = "MediaTranscoder";
|
||||
private static final int MAXIMUM_THREAD = 1; // TODO
|
||||
private static volatile MediaTranscoder sMediaTranscoder;
|
||||
private ThreadPoolExecutor mExecutor;
|
||||
|
||||
private MediaTranscoder() {
|
||||
mExecutor = new ThreadPoolExecutor(
|
||||
0, MAXIMUM_THREAD, 60, TimeUnit.SECONDS,
|
||||
new LinkedBlockingQueue<Runnable>(),
|
||||
new ThreadFactory() {
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
return new Thread(r, "MediaTranscoder-Worker");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static MediaTranscoder getInstance() {
|
||||
if (sMediaTranscoder == null) {
|
||||
synchronized (MediaTranscoder.class) {
|
||||
if (sMediaTranscoder == null) {
|
||||
sMediaTranscoder = new MediaTranscoder();
|
||||
}
|
||||
}
|
||||
}
|
||||
return sMediaTranscoder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transcodes video file asynchronously.
|
||||
* Audio track will be kept unchanged.
|
||||
*
|
||||
* @param inFileDescriptor FileDescriptor for input.
|
||||
* @param outPath File path for output.
|
||||
* @param listener Listener instance for callback.
|
||||
* @deprecated Use {@link #transcodeVideo(FileDescriptor, String, MediaFormatStrategy, MediaTranscoder.Listener)} which accepts output video format.
|
||||
*/
|
||||
@Deprecated
|
||||
public Future<Void> transcodeVideo(final FileDescriptor inFileDescriptor, final String outPath, final Listener listener) {
|
||||
return transcodeVideo(inFileDescriptor, outPath, new MediaFormatStrategy() {
|
||||
@Override
|
||||
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
|
||||
return MediaFormatPresets.getExportPreset960x540();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat createAudioOutputFormat(MediaFormat inputFormat) {
|
||||
return null;
|
||||
}
|
||||
}, listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Transcodes video file asynchronously.
|
||||
* Audio track will be kept unchanged.
|
||||
*
|
||||
* @param inPath File path for input.
|
||||
* @param outPath File path for output.
|
||||
* @param outFormatStrategy Strategy for output video format.
|
||||
* @param listener Listener instance for callback.
|
||||
* @throws IOException if input file could not be read.
|
||||
*/
|
||||
public Future<Void> transcodeVideo(final String inPath, final String outPath, final MediaFormatStrategy outFormatStrategy, final Listener listener) throws IOException {
|
||||
FileInputStream fileInputStream = null;
|
||||
FileDescriptor inFileDescriptor;
|
||||
try {
|
||||
fileInputStream = new FileInputStream(inPath);
|
||||
inFileDescriptor = fileInputStream.getFD();
|
||||
} catch (IOException e) {
|
||||
if (fileInputStream != null) {
|
||||
try {
|
||||
fileInputStream.close();
|
||||
} catch (IOException eClose) {
|
||||
Log.e(TAG, "Can't close input stream: ", eClose);
|
||||
}
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
final FileInputStream finalFileInputStream = fileInputStream;
|
||||
return transcodeVideo(inFileDescriptor, outPath, outFormatStrategy, new Listener() {
|
||||
@Override
|
||||
public void onTranscodeProgress(double progress) {
|
||||
listener.onTranscodeProgress(progress);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTranscodeCompleted() {
|
||||
closeStream();
|
||||
listener.onTranscodeCompleted();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTranscodeCanceled() {
|
||||
closeStream();
|
||||
listener.onTranscodeCanceled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTranscodeFailed(Exception exception) {
|
||||
closeStream();
|
||||
listener.onTranscodeFailed(exception);
|
||||
}
|
||||
|
||||
private void closeStream() {
|
||||
try {
|
||||
finalFileInputStream.close();
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "Can't close input stream: ", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Transcodes video file asynchronously.
|
||||
* Audio track will be kept unchanged.
|
||||
*
|
||||
* @param inFileDescriptor FileDescriptor for input.
|
||||
* @param outPath File path for output.
|
||||
* @param outFormatStrategy Strategy for output video format.
|
||||
* @param listener Listener instance for callback.
|
||||
*/
|
||||
public Future<Void> transcodeVideo(final FileDescriptor inFileDescriptor, final String outPath, final MediaFormatStrategy outFormatStrategy, final Listener listener) {
|
||||
Looper looper = Looper.myLooper();
|
||||
if (looper == null) looper = Looper.getMainLooper();
|
||||
final Handler handler = new Handler(looper);
|
||||
final AtomicReference<Future<Void>> futureReference = new AtomicReference<>();
|
||||
final Future<Void> createdFuture = mExecutor.submit(new Callable<Void>() {
|
||||
@Override
|
||||
public Void call() throws Exception {
|
||||
Exception caughtException = null;
|
||||
try {
|
||||
MediaTranscoderEngine engine = new MediaTranscoderEngine();
|
||||
engine.setProgressCallback(new MediaTranscoderEngine.ProgressCallback() {
|
||||
@Override
|
||||
public void onProgress(final double progress) {
|
||||
handler.post(new Runnable() { // TODO: reuse instance
|
||||
@Override
|
||||
public void run() {
|
||||
listener.onTranscodeProgress(progress);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
engine.setDataSource(inFileDescriptor);
|
||||
engine.transcodeVideo(outPath, outFormatStrategy);
|
||||
} catch (IOException e) {
|
||||
Log.w(TAG, "Transcode failed: input file (fd: " + inFileDescriptor.toString() + ") not found"
|
||||
+ " or could not open output file ('" + outPath + "') .", e);
|
||||
caughtException = e;
|
||||
} catch (InterruptedException e) {
|
||||
Log.i(TAG, "Cancel transcode video file.", e);
|
||||
caughtException = e;
|
||||
} catch (RuntimeException e) {
|
||||
Log.e(TAG, "Fatal error while transcoding, this might be invalid format or bug in engine or Android.", e);
|
||||
caughtException = e;
|
||||
}
|
||||
|
||||
final Exception exception = caughtException;
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (exception == null) {
|
||||
listener.onTranscodeCompleted();
|
||||
} else {
|
||||
Future<Void> future = futureReference.get();
|
||||
if (future != null && future.isCancelled()) {
|
||||
listener.onTranscodeCanceled();
|
||||
} else {
|
||||
listener.onTranscodeFailed(exception);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (exception != null) throw exception;
|
||||
return null;
|
||||
}
|
||||
});
|
||||
futureReference.set(createdFuture);
|
||||
return createdFuture;
|
||||
}
|
||||
|
||||
public interface Listener {
|
||||
/**
|
||||
* Called to notify progress.
|
||||
*
|
||||
* @param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
|
||||
*/
|
||||
void onTranscodeProgress(double progress);
|
||||
|
||||
/**
|
||||
* Called when transcode completed.
|
||||
*/
|
||||
void onTranscodeCompleted();
|
||||
|
||||
/**
|
||||
* Called when transcode canceled.
|
||||
*/
|
||||
void onTranscodeCanceled();
|
||||
|
||||
/**
|
||||
* Called when transcode failed.
|
||||
*
|
||||
* @param exception Exception thrown from {@link MediaTranscoderEngine#transcodeVideo(String, MediaFormatStrategy)}.
|
||||
* Note that it IS NOT {@link java.lang.Throwable}. This means {@link java.lang.Error} won't be caught.
|
||||
*/
|
||||
void onTranscodeFailed(Exception exception);
|
||||
}
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
package net.ypresto.androidtranscoder.compat;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* A Wrapper to MediaCodec that facilitates the use of API-dependent get{Input/Output}Buffer methods,
|
||||
* in order to prevent: http://stackoverflow.com/q/30646885
|
||||
*/
|
||||
public class MediaCodecBufferCompatWrapper {
|
||||
|
||||
final MediaCodec mMediaCodec;
|
||||
final ByteBuffer[] mInputBuffers;
|
||||
final ByteBuffer[] mOutputBuffers;
|
||||
|
||||
public MediaCodecBufferCompatWrapper(MediaCodec mediaCodec) {
|
||||
mMediaCodec = mediaCodec;
|
||||
|
||||
if (Build.VERSION.SDK_INT < 21) {
|
||||
mInputBuffers = mediaCodec.getInputBuffers();
|
||||
mOutputBuffers = mediaCodec.getOutputBuffers();
|
||||
} else {
|
||||
mInputBuffers = mOutputBuffers = null;
|
||||
}
|
||||
}
|
||||
|
||||
public ByteBuffer getInputBuffer(final int index) {
|
||||
if (Build.VERSION.SDK_INT >= 21) {
|
||||
return mMediaCodec.getInputBuffer(index);
|
||||
}
|
||||
return mInputBuffers[index];
|
||||
}
|
||||
|
||||
public ByteBuffer getOutputBuffer(final int index) {
|
||||
if (Build.VERSION.SDK_INT >= 21) {
|
||||
return mMediaCodec.getOutputBuffer(index);
|
||||
}
|
||||
return mOutputBuffers[index];
|
||||
}
|
||||
}
|
|
@ -1,102 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.compat;
|
||||
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecList;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
/**
|
||||
* This class emulates basic behavior of MediaCodecList in API level >= 21.
|
||||
* TODO: implement delegate to MediaCodecList in newer API.
|
||||
*/
|
||||
public class MediaCodecListCompat {
|
||||
public static final int REGULAR_CODECS = 0;
|
||||
public static final int ALL_CODECS = 1;
|
||||
|
||||
public MediaCodecListCompat(int kind) {
|
||||
if (kind != REGULAR_CODECS) {
|
||||
throw new UnsupportedOperationException("kind other than REGULAR_CODECS is not implemented.");
|
||||
}
|
||||
}
|
||||
|
||||
public final String findDecoderForFormat(MediaFormat format) {
|
||||
return findCoderForFormat(format, false);
|
||||
}
|
||||
|
||||
public final String findEncoderForFormat(MediaFormat format) {
|
||||
return findCoderForFormat(format, true);
|
||||
}
|
||||
|
||||
private String findCoderForFormat(MediaFormat format, boolean findEncoder) {
|
||||
String mimeType = format.getString(MediaFormat.KEY_MIME);
|
||||
Iterator<MediaCodecInfo> iterator = new MediaCodecInfoIterator();
|
||||
while (iterator.hasNext()) {
|
||||
MediaCodecInfo codecInfo = iterator.next();
|
||||
if (codecInfo.isEncoder() != findEncoder) continue;
|
||||
if (Arrays.asList(codecInfo.getSupportedTypes()).contains(mimeType)) {
|
||||
return codecInfo.getName();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public final MediaCodecInfo[] getCodecInfos() {
|
||||
int codecCount = getCodecCount();
|
||||
MediaCodecInfo[] codecInfos = new MediaCodecInfo[codecCount];
|
||||
Iterator<MediaCodecInfo> iterator = new MediaCodecInfoIterator();
|
||||
for (int i = 0; i < codecCount; i++) {
|
||||
codecInfos[i] = getCodecInfoAt(i);
|
||||
}
|
||||
return codecInfos;
|
||||
}
|
||||
|
||||
private static int getCodecCount() {
|
||||
return MediaCodecList.getCodecCount();
|
||||
}
|
||||
|
||||
private static MediaCodecInfo getCodecInfoAt(int index) {
|
||||
return MediaCodecList.getCodecInfoAt(index);
|
||||
}
|
||||
|
||||
private final class MediaCodecInfoIterator implements Iterator<MediaCodecInfo> {
|
||||
private int mCodecCount = getCodecCount();
|
||||
private int mIndex = -1;
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return mIndex + 1 < mCodecCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaCodecInfo next() {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
mIndex++;
|
||||
return getCodecInfoAt(mIndex);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,231 +0,0 @@
|
|||
package net.ypresto.androidtranscoder.engine;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import net.ypresto.androidtranscoder.compat.MediaCodecBufferCompatWrapper;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.ShortBuffer;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Queue;
|
||||
|
||||
/**
|
||||
* Channel of raw audio from decoder to encoder.
|
||||
* Performs the necessary conversion between different input & output audio formats.
|
||||
*
|
||||
* We currently support upmixing from mono to stereo & downmixing from stereo to mono.
|
||||
* Sample rate conversion is not supported yet.
|
||||
*/
|
||||
class AudioChannel {
|
||||
|
||||
private static class AudioBuffer {
|
||||
int bufferIndex;
|
||||
long presentationTimeUs;
|
||||
ShortBuffer data;
|
||||
}
|
||||
|
||||
public static final int BUFFER_INDEX_END_OF_STREAM = -1;
|
||||
|
||||
private static final int BYTES_PER_SHORT = 2;
|
||||
private static final long MICROSECS_PER_SEC = 1000000;
|
||||
|
||||
private final Queue<AudioBuffer> mEmptyBuffers = new ArrayDeque<>();
|
||||
private final Queue<AudioBuffer> mFilledBuffers = new ArrayDeque<>();
|
||||
|
||||
private final MediaCodec mDecoder;
|
||||
private final MediaCodec mEncoder;
|
||||
private final MediaFormat mEncodeFormat;
|
||||
|
||||
private int mInputSampleRate;
|
||||
private int mInputChannelCount;
|
||||
private int mOutputChannelCount;
|
||||
|
||||
private AudioRemixer mRemixer;
|
||||
|
||||
private final MediaCodecBufferCompatWrapper mDecoderBuffers;
|
||||
private final MediaCodecBufferCompatWrapper mEncoderBuffers;
|
||||
|
||||
private final AudioBuffer mOverflowBuffer = new AudioBuffer();
|
||||
|
||||
private MediaFormat mActualDecodedFormat;
|
||||
|
||||
|
||||
public AudioChannel(final MediaCodec decoder,
|
||||
final MediaCodec encoder, final MediaFormat encodeFormat) {
|
||||
mDecoder = decoder;
|
||||
mEncoder = encoder;
|
||||
mEncodeFormat = encodeFormat;
|
||||
|
||||
mDecoderBuffers = new MediaCodecBufferCompatWrapper(mDecoder);
|
||||
mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
|
||||
}
|
||||
|
||||
public void setActualDecodedFormat(final MediaFormat decodedFormat) {
|
||||
mActualDecodedFormat = decodedFormat;
|
||||
|
||||
mInputSampleRate = mActualDecodedFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
|
||||
if (mInputSampleRate != mEncodeFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE)) {
|
||||
throw new UnsupportedOperationException("Audio sample rate conversion not supported yet.");
|
||||
}
|
||||
|
||||
mInputChannelCount = mActualDecodedFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
|
||||
mOutputChannelCount = mEncodeFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
|
||||
|
||||
if (mInputChannelCount != 1 && mInputChannelCount != 2) {
|
||||
throw new UnsupportedOperationException("Input channel count (" + mInputChannelCount + ") not supported.");
|
||||
}
|
||||
|
||||
if (mOutputChannelCount != 1 && mOutputChannelCount != 2) {
|
||||
throw new UnsupportedOperationException("Output channel count (" + mOutputChannelCount + ") not supported.");
|
||||
}
|
||||
|
||||
if (mInputChannelCount > mOutputChannelCount) {
|
||||
mRemixer = AudioRemixer.DOWNMIX;
|
||||
} else if (mInputChannelCount < mOutputChannelCount) {
|
||||
mRemixer = AudioRemixer.UPMIX;
|
||||
} else {
|
||||
mRemixer = AudioRemixer.PASSTHROUGH;
|
||||
}
|
||||
|
||||
mOverflowBuffer.presentationTimeUs = 0;
|
||||
}
|
||||
|
||||
public void drainDecoderBufferAndQueue(final int bufferIndex, final long presentationTimeUs) {
|
||||
if (mActualDecodedFormat == null) {
|
||||
throw new RuntimeException("Buffer received before format!");
|
||||
}
|
||||
|
||||
final ByteBuffer data =
|
||||
bufferIndex == BUFFER_INDEX_END_OF_STREAM ?
|
||||
null : mDecoderBuffers.getOutputBuffer(bufferIndex);
|
||||
|
||||
AudioBuffer buffer = mEmptyBuffers.poll();
|
||||
if (buffer == null) {
|
||||
buffer = new AudioBuffer();
|
||||
}
|
||||
|
||||
buffer.bufferIndex = bufferIndex;
|
||||
buffer.presentationTimeUs = presentationTimeUs;
|
||||
buffer.data = data == null ? null : data.asShortBuffer();
|
||||
|
||||
if (mOverflowBuffer.data == null) {
|
||||
mOverflowBuffer.data = ByteBuffer
|
||||
.allocateDirect(data.capacity())
|
||||
.order(ByteOrder.nativeOrder())
|
||||
.asShortBuffer();
|
||||
mOverflowBuffer.data.clear().flip();
|
||||
}
|
||||
|
||||
mFilledBuffers.add(buffer);
|
||||
}
|
||||
|
||||
public boolean feedEncoder(long timeoutUs) {
|
||||
final boolean hasOverflow = mOverflowBuffer.data != null && mOverflowBuffer.data.hasRemaining();
|
||||
if (mFilledBuffers.isEmpty() && !hasOverflow) {
|
||||
// No audio data - Bail out
|
||||
return false;
|
||||
}
|
||||
|
||||
final int encoderInBuffIndex = mEncoder.dequeueInputBuffer(timeoutUs);
|
||||
if (encoderInBuffIndex < 0) {
|
||||
// Encoder is full - Bail out
|
||||
return false;
|
||||
}
|
||||
|
||||
// Drain overflow first
|
||||
final ShortBuffer outBuffer = mEncoderBuffers.getInputBuffer(encoderInBuffIndex).asShortBuffer();
|
||||
if (hasOverflow) {
|
||||
final long presentationTimeUs = drainOverflow(outBuffer);
|
||||
mEncoder.queueInputBuffer(encoderInBuffIndex,
|
||||
0, outBuffer.position() * BYTES_PER_SHORT,
|
||||
presentationTimeUs, 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
final AudioBuffer inBuffer = mFilledBuffers.poll();
|
||||
if (inBuffer.bufferIndex == BUFFER_INDEX_END_OF_STREAM) {
|
||||
mEncoder.queueInputBuffer(encoderInBuffIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
return false;
|
||||
}
|
||||
|
||||
final long presentationTimeUs = remixAndMaybeFillOverflow(inBuffer, outBuffer);
|
||||
mEncoder.queueInputBuffer(encoderInBuffIndex,
|
||||
0, outBuffer.position() * BYTES_PER_SHORT,
|
||||
presentationTimeUs, 0);
|
||||
if (inBuffer != null) {
|
||||
mDecoder.releaseOutputBuffer(inBuffer.bufferIndex, false);
|
||||
mEmptyBuffers.add(inBuffer);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static long sampleCountToDurationUs(final int sampleCount,
|
||||
final int sampleRate,
|
||||
final int channelCount) {
|
||||
return (sampleCount / (sampleRate * MICROSECS_PER_SEC)) / channelCount;
|
||||
}
|
||||
|
||||
private long drainOverflow(final ShortBuffer outBuff) {
|
||||
final ShortBuffer overflowBuff = mOverflowBuffer.data;
|
||||
final int overflowLimit = overflowBuff.limit();
|
||||
final int overflowSize = overflowBuff.remaining();
|
||||
|
||||
final long beginPresentationTimeUs = mOverflowBuffer.presentationTimeUs +
|
||||
sampleCountToDurationUs(overflowBuff.position(), mInputSampleRate, mOutputChannelCount);
|
||||
|
||||
outBuff.clear();
|
||||
// Limit overflowBuff to outBuff's capacity
|
||||
overflowBuff.limit(outBuff.capacity());
|
||||
// Load overflowBuff onto outBuff
|
||||
outBuff.put(overflowBuff);
|
||||
|
||||
if (overflowSize >= outBuff.capacity()) {
|
||||
// Overflow fully consumed - Reset
|
||||
overflowBuff.clear().limit(0);
|
||||
} else {
|
||||
// Only partially consumed - Keep position & restore previous limit
|
||||
overflowBuff.limit(overflowLimit);
|
||||
}
|
||||
|
||||
return beginPresentationTimeUs;
|
||||
}
|
||||
|
||||
private long remixAndMaybeFillOverflow(final AudioBuffer input,
|
||||
final ShortBuffer outBuff) {
|
||||
final ShortBuffer inBuff = input.data;
|
||||
final ShortBuffer overflowBuff = mOverflowBuffer.data;
|
||||
|
||||
outBuff.clear();
|
||||
|
||||
// Reset position to 0, and set limit to capacity (Since MediaCodec doesn't do that for us)
|
||||
inBuff.clear();
|
||||
|
||||
if (inBuff.remaining() > outBuff.remaining()) {
|
||||
// Overflow
|
||||
// Limit inBuff to outBuff's capacity
|
||||
inBuff.limit(outBuff.capacity());
|
||||
mRemixer.remix(inBuff, outBuff);
|
||||
|
||||
// Reset limit to its own capacity & Keep position
|
||||
inBuff.limit(inBuff.capacity());
|
||||
|
||||
// Remix the rest onto overflowBuffer
|
||||
// NOTE: We should only reach this point when overflow buffer is empty
|
||||
final long consumedDurationUs =
|
||||
sampleCountToDurationUs(inBuff.position(), mInputSampleRate, mInputChannelCount);
|
||||
mRemixer.remix(inBuff, overflowBuff);
|
||||
|
||||
// Seal off overflowBuff & mark limit
|
||||
overflowBuff.flip();
|
||||
mOverflowBuffer.presentationTimeUs = input.presentationTimeUs + consumedDurationUs;
|
||||
} else {
|
||||
// No overflow
|
||||
mRemixer.remix(inBuff, outBuff);
|
||||
}
|
||||
|
||||
return input.presentationTimeUs;
|
||||
}
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
package net.ypresto.androidtranscoder.engine;
|
||||
|
||||
import java.nio.ShortBuffer;
|
||||
|
||||
public interface AudioRemixer {
|
||||
void remix(final ShortBuffer inSBuff, final ShortBuffer outSBuff);
|
||||
|
||||
AudioRemixer DOWNMIX = new AudioRemixer() {
|
||||
private static final int SIGNED_SHORT_LIMIT = 32768;
|
||||
private static final int UNSIGNED_SHORT_MAX = 65535;
|
||||
|
||||
@Override
|
||||
public void remix(final ShortBuffer inSBuff, final ShortBuffer outSBuff) {
|
||||
// Down-mix stereo to mono
|
||||
// Viktor Toth's algorithm -
|
||||
// See: http://www.vttoth.com/CMS/index.php/technical-notes/68
|
||||
// http://stackoverflow.com/a/25102339
|
||||
final int inRemaining = inSBuff.remaining() / 2;
|
||||
final int outSpace = outSBuff.remaining();
|
||||
|
||||
final int samplesToBeProcessed = Math.min(inRemaining, outSpace);
|
||||
for (int i = 0; i < samplesToBeProcessed; ++i) {
|
||||
// Convert to unsigned
|
||||
final int a = inSBuff.get() + SIGNED_SHORT_LIMIT;
|
||||
final int b = inSBuff.get() + SIGNED_SHORT_LIMIT;
|
||||
int m;
|
||||
// Pick the equation
|
||||
if ((a < SIGNED_SHORT_LIMIT) || (b < SIGNED_SHORT_LIMIT)) {
|
||||
// Viktor's first equation when both sources are "quiet"
|
||||
// (i.e. less than middle of the dynamic range)
|
||||
m = a * b / SIGNED_SHORT_LIMIT;
|
||||
} else {
|
||||
// Viktor's second equation when one or both sources are loud
|
||||
m = 2 * (a + b) - (a * b) / SIGNED_SHORT_LIMIT - UNSIGNED_SHORT_MAX;
|
||||
}
|
||||
// Convert output back to signed short
|
||||
if (m == UNSIGNED_SHORT_MAX + 1) m = UNSIGNED_SHORT_MAX;
|
||||
outSBuff.put((short) (m - SIGNED_SHORT_LIMIT));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
AudioRemixer UPMIX = new AudioRemixer() {
|
||||
@Override
|
||||
public void remix(final ShortBuffer inSBuff, final ShortBuffer outSBuff) {
|
||||
// Up-mix mono to stereo
|
||||
final int inRemaining = inSBuff.remaining();
|
||||
final int outSpace = outSBuff.remaining() / 2;
|
||||
|
||||
final int samplesToBeProcessed = Math.min(inRemaining, outSpace);
|
||||
for (int i = 0; i < samplesToBeProcessed; ++i) {
|
||||
final short inSample = inSBuff.get();
|
||||
outSBuff.put(inSample);
|
||||
outSBuff.put(inSample);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
AudioRemixer PASSTHROUGH = new AudioRemixer() {
|
||||
@Override
|
||||
public void remix(final ShortBuffer inSBuff, final ShortBuffer outSBuff) {
|
||||
// Passthrough
|
||||
outSBuff.put(inSBuff);
|
||||
}
|
||||
};
|
||||
}
|
|
@ -1,209 +0,0 @@
|
|||
package net.ypresto.androidtranscoder.engine;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import net.ypresto.androidtranscoder.compat.MediaCodecBufferCompatWrapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class AudioTrackTranscoder implements TrackTranscoder {
|
||||
|
||||
private static final QueuedMuxer.SampleType SAMPLE_TYPE = QueuedMuxer.SampleType.AUDIO;
|
||||
|
||||
private static final int DRAIN_STATE_NONE = 0;
|
||||
private static final int DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY = 1;
|
||||
private static final int DRAIN_STATE_CONSUMED = 2;
|
||||
|
||||
private final MediaExtractor mExtractor;
|
||||
private final QueuedMuxer mMuxer;
|
||||
private long mWrittenPresentationTimeUs;
|
||||
|
||||
private final int mTrackIndex;
|
||||
private final MediaFormat mInputFormat;
|
||||
private final MediaFormat mOutputFormat;
|
||||
|
||||
private final MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
|
||||
private MediaCodec mDecoder;
|
||||
private MediaCodec mEncoder;
|
||||
private MediaFormat mActualOutputFormat;
|
||||
|
||||
private MediaCodecBufferCompatWrapper mDecoderBuffers;
|
||||
private MediaCodecBufferCompatWrapper mEncoderBuffers;
|
||||
|
||||
private boolean mIsExtractorEOS;
|
||||
private boolean mIsDecoderEOS;
|
||||
private boolean mIsEncoderEOS;
|
||||
private boolean mDecoderStarted;
|
||||
private boolean mEncoderStarted;
|
||||
|
||||
private AudioChannel mAudioChannel;
|
||||
|
||||
public AudioTrackTranscoder(MediaExtractor extractor, int trackIndex,
|
||||
MediaFormat outputFormat, QueuedMuxer muxer) {
|
||||
mExtractor = extractor;
|
||||
mTrackIndex = trackIndex;
|
||||
mOutputFormat = outputFormat;
|
||||
mMuxer = muxer;
|
||||
|
||||
mInputFormat = mExtractor.getTrackFormat(mTrackIndex);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
mExtractor.selectTrack(mTrackIndex);
|
||||
try {
|
||||
mEncoder = MediaCodec.createEncoderByType(mOutputFormat.getString(MediaFormat.KEY_MIME));
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
mEncoder.configure(mOutputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
mEncoder.start();
|
||||
mEncoderStarted = true;
|
||||
mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
|
||||
|
||||
final MediaFormat inputFormat = mExtractor.getTrackFormat(mTrackIndex);
|
||||
try {
|
||||
mDecoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
mDecoder.configure(inputFormat, null, null, 0);
|
||||
mDecoder.start();
|
||||
mDecoderStarted = true;
|
||||
mDecoderBuffers = new MediaCodecBufferCompatWrapper(mDecoder);
|
||||
|
||||
mAudioChannel = new AudioChannel(mDecoder, mEncoder, mOutputFormat);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat getDeterminedFormat() {
|
||||
return mInputFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean stepPipeline() {
|
||||
boolean busy = false;
|
||||
|
||||
int status;
|
||||
while (drainEncoder(0) != DRAIN_STATE_NONE) busy = true;
|
||||
do {
|
||||
status = drainDecoder(0);
|
||||
if (status != DRAIN_STATE_NONE) busy = true;
|
||||
// NOTE: not repeating to keep from deadlock when encoder is full.
|
||||
} while (status == DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY);
|
||||
|
||||
while (mAudioChannel.feedEncoder(0)) busy = true;
|
||||
while (drainExtractor(0) != DRAIN_STATE_NONE) busy = true;
|
||||
|
||||
return busy;
|
||||
}
|
||||
|
||||
private int drainExtractor(long timeoutUs) {
|
||||
if (mIsExtractorEOS) return DRAIN_STATE_NONE;
|
||||
int trackIndex = mExtractor.getSampleTrackIndex();
|
||||
if (trackIndex >= 0 && trackIndex != mTrackIndex) {
|
||||
return DRAIN_STATE_NONE;
|
||||
}
|
||||
|
||||
final int result = mDecoder.dequeueInputBuffer(timeoutUs);
|
||||
if (result < 0) return DRAIN_STATE_NONE;
|
||||
if (trackIndex < 0) {
|
||||
mIsExtractorEOS = true;
|
||||
mDecoder.queueInputBuffer(result, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
return DRAIN_STATE_NONE;
|
||||
}
|
||||
|
||||
final int sampleSize = mExtractor.readSampleData(mDecoderBuffers.getInputBuffer(result), 0);
|
||||
final boolean isKeyFrame = (mExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
|
||||
mDecoder.queueInputBuffer(result, 0, sampleSize, mExtractor.getSampleTime(), isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0);
|
||||
mExtractor.advance();
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
|
||||
private int drainDecoder(long timeoutUs) {
|
||||
if (mIsDecoderEOS) return DRAIN_STATE_NONE;
|
||||
|
||||
int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
|
||||
switch (result) {
|
||||
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
||||
return DRAIN_STATE_NONE;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
|
||||
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
mIsDecoderEOS = true;
|
||||
mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
|
||||
} else if (mBufferInfo.size > 0) {
|
||||
mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
|
||||
}
|
||||
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
|
||||
private int drainEncoder(long timeoutUs) {
|
||||
if (mIsEncoderEOS) return DRAIN_STATE_NONE;
|
||||
|
||||
int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
|
||||
switch (result) {
|
||||
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
||||
return DRAIN_STATE_NONE;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
if (mActualOutputFormat != null) {
|
||||
throw new RuntimeException("Audio output format changed twice.");
|
||||
}
|
||||
mActualOutputFormat = mEncoder.getOutputFormat();
|
||||
mMuxer.setOutputFormat(SAMPLE_TYPE, mActualOutputFormat);
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
|
||||
if (mActualOutputFormat == null) {
|
||||
throw new RuntimeException("Could not determine actual output format.");
|
||||
}
|
||||
|
||||
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
mIsEncoderEOS = true;
|
||||
mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
|
||||
}
|
||||
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
|
||||
// SPS or PPS, which should be passed by MediaFormat.
|
||||
mEncoder.releaseOutputBuffer(result, false);
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
mMuxer.writeSampleData(SAMPLE_TYPE, mEncoderBuffers.getOutputBuffer(result), mBufferInfo);
|
||||
mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
|
||||
mEncoder.releaseOutputBuffer(result, false);
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getWrittenPresentationTimeUs() {
|
||||
return mWrittenPresentationTimeUs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFinished() {
|
||||
return mIsEncoderEOS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (mDecoder != null) {
|
||||
if (mDecoderStarted) mDecoder.stop();
|
||||
mDecoder.release();
|
||||
mDecoder = null;
|
||||
}
|
||||
if (mEncoder != null) {
|
||||
if (mEncoderStarted) mEncoder.stop();
|
||||
mEncoder.release();
|
||||
mEncoder = null;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,175 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/InputSurface.java
|
||||
// blob: 157ed88d143229e4edb6889daf18fb73aa2fc5a5
|
||||
package net.ypresto.androidtranscoder.engine;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLExt;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.view.Surface;
|
||||
/**
|
||||
* Holds state associated with a Surface used for MediaCodec encoder input.
|
||||
* <p>
|
||||
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
|
||||
* to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
|
||||
* to the video encoder.
|
||||
*/
|
||||
class InputSurface {
|
||||
private static final String TAG = "InputSurface";
|
||||
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
|
||||
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
|
||||
private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
|
||||
private Surface mSurface;
|
||||
/**
|
||||
* Creates an InputSurface from a Surface.
|
||||
*/
|
||||
public InputSurface(Surface surface) {
|
||||
if (surface == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
mSurface = surface;
|
||||
eglSetup();
|
||||
}
|
||||
/**
|
||||
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
|
||||
*/
|
||||
private void eglSetup() {
|
||||
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("unable to get EGL14 display");
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
|
||||
mEGLDisplay = null;
|
||||
throw new RuntimeException("unable to initialize EGL14");
|
||||
}
|
||||
// Configure EGL for recordable and OpenGL ES 2.0. We want enough RGB bits
|
||||
// to minimize artifacts from possible YUV conversion.
|
||||
int[] attribList = {
|
||||
EGL14.EGL_RED_SIZE, 8,
|
||||
EGL14.EGL_GREEN_SIZE, 8,
|
||||
EGL14.EGL_BLUE_SIZE, 8,
|
||||
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
|
||||
EGL_RECORDABLE_ANDROID, 1,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
|
||||
numConfigs, 0)) {
|
||||
throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
|
||||
}
|
||||
// Configure context for OpenGL ES 2.0.
|
||||
int[] attrib_list = {
|
||||
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
|
||||
attrib_list, 0);
|
||||
checkEglError("eglCreateContext");
|
||||
if (mEGLContext == null) {
|
||||
throw new RuntimeException("null context");
|
||||
}
|
||||
// Create a window surface, and attach it to the Surface we received.
|
||||
int[] surfaceAttribs = {
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
|
||||
surfaceAttribs, 0);
|
||||
checkEglError("eglCreateWindowSurface");
|
||||
if (mEGLSurface == null) {
|
||||
throw new RuntimeException("surface was null");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Discard all resources held by this class, notably the EGL context. Also releases the
|
||||
* Surface that was passed to our constructor.
|
||||
*/
|
||||
public void release() {
|
||||
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
|
||||
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
|
||||
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
|
||||
EGL14.eglReleaseThread();
|
||||
EGL14.eglTerminate(mEGLDisplay);
|
||||
}
|
||||
mSurface.release();
|
||||
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
mEGLContext = EGL14.EGL_NO_CONTEXT;
|
||||
mEGLSurface = EGL14.EGL_NO_SURFACE;
|
||||
mSurface = null;
|
||||
}
|
||||
/**
|
||||
* Makes our EGL context and surface current.
|
||||
*/
|
||||
public void makeCurrent() {
|
||||
if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
|
||||
throw new RuntimeException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
public void makeUnCurrent() {
|
||||
if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
|
||||
EGL14.EGL_NO_CONTEXT)) {
|
||||
throw new RuntimeException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Calls eglSwapBuffers. Use this to "publish" the current frame.
|
||||
*/
|
||||
public boolean swapBuffers() {
|
||||
return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
|
||||
}
|
||||
/**
|
||||
* Returns the Surface that the MediaCodec receives buffers from.
|
||||
*/
|
||||
public Surface getSurface() {
|
||||
return mSurface;
|
||||
}
|
||||
/**
|
||||
* Queries the surface's width.
|
||||
*/
|
||||
public int getWidth() {
|
||||
int[] value = new int[1];
|
||||
EGL14.eglQuerySurface(mEGLDisplay, mEGLSurface, EGL14.EGL_WIDTH, value, 0);
|
||||
return value[0];
|
||||
}
|
||||
/**
|
||||
* Queries the surface's height.
|
||||
*/
|
||||
public int getHeight() {
|
||||
int[] value = new int[1];
|
||||
EGL14.eglQuerySurface(mEGLDisplay, mEGLSurface, EGL14.EGL_HEIGHT, value, 0);
|
||||
return value[0];
|
||||
}
|
||||
/**
|
||||
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
|
||||
*/
|
||||
public void setPresentationTime(long nsecs) {
|
||||
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
|
||||
}
|
||||
/**
|
||||
* Checks for EGL errors.
|
||||
*/
|
||||
private void checkEglError(String msg) {
|
||||
int error;
|
||||
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
|
||||
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2015 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.engine;
|
||||
|
||||
public class InvalidOutputFormatException extends RuntimeException {
|
||||
public InvalidOutputFormatException(String detailMessage) {
|
||||
super(detailMessage);
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2015 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.engine;
|
||||
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import net.ypresto.androidtranscoder.format.MediaFormatExtraConstants;
|
||||
import net.ypresto.androidtranscoder.utils.AvcCsdUtils;
|
||||
import net.ypresto.androidtranscoder.utils.AvcSpsUtils;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
class MediaFormatValidator {
|
||||
// Refer: http://en.wikipedia.org/wiki/H.264/MPEG-4_AVC#Profiles
|
||||
private static final byte PROFILE_IDC_BASELINE = 66;
|
||||
|
||||
public static void validateVideoOutputFormat(MediaFormat format) {
|
||||
String mime = format.getString(MediaFormat.KEY_MIME);
|
||||
// Refer: http://developer.android.com/guide/appendix/media-formats.html#core
|
||||
// Refer: http://en.wikipedia.org/wiki/MPEG-4_Part_14#Data_streams
|
||||
if (!MediaFormatExtraConstants.MIMETYPE_VIDEO_AVC.equals(mime)) {
|
||||
throw new InvalidOutputFormatException("Video codecs other than AVC is not supported, actual mime type: " + mime);
|
||||
}
|
||||
ByteBuffer spsBuffer = AvcCsdUtils.getSpsBuffer(format);
|
||||
byte profileIdc = AvcSpsUtils.getProfileIdc(spsBuffer);
|
||||
if (profileIdc != PROFILE_IDC_BASELINE) {
|
||||
throw new InvalidOutputFormatException("Non-baseline AVC video profile is not supported by Android OS, actual profile_idc: " + profileIdc);
|
||||
}
|
||||
}
|
||||
|
||||
public static void validateAudioOutputFormat(MediaFormat format) {
|
||||
String mime = format.getString(MediaFormat.KEY_MIME);
|
||||
if (!MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC.equals(mime)) {
|
||||
throw new InvalidOutputFormatException("Audio codecs other than AAC is not supported, actual mime type: " + mime);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,211 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.engine;
|
||||
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaMetadataRetriever;
|
||||
import android.media.MediaMuxer;
|
||||
import android.util.Log;
|
||||
|
||||
import net.ypresto.androidtranscoder.format.MediaFormatStrategy;
|
||||
import net.ypresto.androidtranscoder.utils.MediaExtractorUtils;
|
||||
|
||||
import java.io.FileDescriptor;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Internal engine, do not use this directly.
|
||||
*/
|
||||
// TODO: treat encrypted data
|
||||
public class MediaTranscoderEngine {
|
||||
private static final String TAG = "MediaTranscoderEngine";
|
||||
private static final double PROGRESS_UNKNOWN = -1.0;
|
||||
private static final long SLEEP_TO_WAIT_TRACK_TRANSCODERS = 10;
|
||||
private static final long PROGRESS_INTERVAL_STEPS = 10;
|
||||
private FileDescriptor mInputFileDescriptor;
|
||||
private TrackTranscoder mVideoTrackTranscoder;
|
||||
private TrackTranscoder mAudioTrackTranscoder;
|
||||
private MediaExtractor mExtractor;
|
||||
private MediaMuxer mMuxer;
|
||||
private volatile double mProgress;
|
||||
private ProgressCallback mProgressCallback;
|
||||
private long mDurationUs;
|
||||
|
||||
/**
|
||||
* Do not use this constructor unless you know what you are doing.
|
||||
*/
|
||||
public MediaTranscoderEngine() {
|
||||
}
|
||||
|
||||
public void setDataSource(FileDescriptor fileDescriptor) {
|
||||
mInputFileDescriptor = fileDescriptor;
|
||||
}
|
||||
|
||||
public ProgressCallback getProgressCallback() {
|
||||
return mProgressCallback;
|
||||
}
|
||||
|
||||
public void setProgressCallback(ProgressCallback progressCallback) {
|
||||
mProgressCallback = progressCallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* NOTE: This method is thread safe.
|
||||
*/
|
||||
public double getProgress() {
|
||||
return mProgress;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run video transcoding. Blocks current thread.
|
||||
* Audio data will not be transcoded; original stream will be wrote to output file.
|
||||
*
|
||||
* @param outputPath File path to output transcoded video file.
|
||||
* @param formatStrategy Output format strategy.
|
||||
* @throws IOException when input or output file could not be opened.
|
||||
* @throws InvalidOutputFormatException when output format is not supported.
|
||||
* @throws InterruptedException when cancel to transcode.
|
||||
*/
|
||||
public void transcodeVideo(String outputPath, MediaFormatStrategy formatStrategy) throws IOException, InterruptedException {
|
||||
if (outputPath == null) {
|
||||
throw new NullPointerException("Output path cannot be null.");
|
||||
}
|
||||
if (mInputFileDescriptor == null) {
|
||||
throw new IllegalStateException("Data source is not set.");
|
||||
}
|
||||
try {
|
||||
// NOTE: use single extractor to keep from running out audio track fast.
|
||||
mExtractor = new MediaExtractor();
|
||||
mExtractor.setDataSource(mInputFileDescriptor);
|
||||
mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
|
||||
setupMetadata();
|
||||
setupTrackTranscoders(formatStrategy);
|
||||
runPipelines();
|
||||
mMuxer.stop();
|
||||
} finally {
|
||||
try {
|
||||
if (mVideoTrackTranscoder != null) {
|
||||
mVideoTrackTranscoder.release();
|
||||
mVideoTrackTranscoder = null;
|
||||
}
|
||||
if (mAudioTrackTranscoder != null) {
|
||||
mAudioTrackTranscoder.release();
|
||||
mAudioTrackTranscoder = null;
|
||||
}
|
||||
if (mExtractor != null) {
|
||||
mExtractor.release();
|
||||
mExtractor = null;
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
// Too fatal to make alive the app, because it may leak native resources.
|
||||
//noinspection ThrowFromFinallyBlock
|
||||
throw new Error("Could not shutdown extractor, codecs and muxer pipeline.", e);
|
||||
}
|
||||
try {
|
||||
if (mMuxer != null) {
|
||||
mMuxer.release();
|
||||
mMuxer = null;
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
Log.e(TAG, "Failed to release muxer.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void setupMetadata() throws IOException {
|
||||
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
|
||||
mediaMetadataRetriever.setDataSource(mInputFileDescriptor);
|
||||
|
||||
String rotationString = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
|
||||
try {
|
||||
mMuxer.setOrientationHint(Integer.parseInt(rotationString));
|
||||
} catch (NumberFormatException e) {
|
||||
// skip
|
||||
}
|
||||
|
||||
try {
|
||||
mDurationUs = Long.parseLong(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)) * 1000;
|
||||
} catch (NumberFormatException e) {
|
||||
mDurationUs = -1;
|
||||
}
|
||||
Log.d(TAG, "Duration (us): " + mDurationUs);
|
||||
}
|
||||
|
||||
private void setupTrackTranscoders(MediaFormatStrategy formatStrategy) {
|
||||
MediaExtractorUtils.TrackResult trackResult = MediaExtractorUtils.getFirstVideoAndAudioTrack(mExtractor);
|
||||
MediaFormat videoOutputFormat = formatStrategy.createVideoOutputFormat(trackResult.mVideoTrackFormat);
|
||||
MediaFormat audioOutputFormat = formatStrategy.createAudioOutputFormat(trackResult.mAudioTrackFormat);
|
||||
if (videoOutputFormat == null && audioOutputFormat == null) {
|
||||
throw new InvalidOutputFormatException("MediaFormatStrategy returned pass-through for both video and audio. No transcoding is necessary.");
|
||||
}
|
||||
QueuedMuxer queuedMuxer = new QueuedMuxer(mMuxer, new QueuedMuxer.Listener() {
|
||||
@Override
|
||||
public void onDetermineOutputFormat() {
|
||||
MediaFormatValidator.validateVideoOutputFormat(mVideoTrackTranscoder.getDeterminedFormat());
|
||||
MediaFormatValidator.validateAudioOutputFormat(mAudioTrackTranscoder.getDeterminedFormat());
|
||||
}
|
||||
});
|
||||
|
||||
if (videoOutputFormat == null) {
|
||||
mVideoTrackTranscoder = new PassThroughTrackTranscoder(mExtractor, trackResult.mVideoTrackIndex, queuedMuxer, QueuedMuxer.SampleType.VIDEO);
|
||||
} else {
|
||||
mVideoTrackTranscoder = new VideoTrackTranscoder(mExtractor, trackResult.mVideoTrackIndex, videoOutputFormat, queuedMuxer);
|
||||
}
|
||||
mVideoTrackTranscoder.setup();
|
||||
if (audioOutputFormat == null) {
|
||||
mAudioTrackTranscoder = new PassThroughTrackTranscoder(mExtractor, trackResult.mAudioTrackIndex, queuedMuxer, QueuedMuxer.SampleType.AUDIO);
|
||||
} else {
|
||||
mAudioTrackTranscoder = new AudioTrackTranscoder(mExtractor, trackResult.mAudioTrackIndex, audioOutputFormat, queuedMuxer);
|
||||
}
|
||||
mAudioTrackTranscoder.setup();
|
||||
mExtractor.selectTrack(trackResult.mVideoTrackIndex);
|
||||
mExtractor.selectTrack(trackResult.mAudioTrackIndex);
|
||||
}
|
||||
|
||||
private void runPipelines() throws InterruptedException {
|
||||
long loopCount = 0;
|
||||
if (mDurationUs <= 0) {
|
||||
double progress = PROGRESS_UNKNOWN;
|
||||
mProgress = progress;
|
||||
if (mProgressCallback != null) mProgressCallback.onProgress(progress); // unknown
|
||||
}
|
||||
while (!(mVideoTrackTranscoder.isFinished() && mAudioTrackTranscoder.isFinished())) {
|
||||
boolean stepped = mVideoTrackTranscoder.stepPipeline()
|
||||
|| mAudioTrackTranscoder.stepPipeline();
|
||||
loopCount++;
|
||||
if (mDurationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
|
||||
double videoProgress = mVideoTrackTranscoder.isFinished() ? 1.0 : Math.min(1.0, (double) mVideoTrackTranscoder.getWrittenPresentationTimeUs() / mDurationUs);
|
||||
double audioProgress = mAudioTrackTranscoder.isFinished() ? 1.0 : Math.min(1.0, (double) mAudioTrackTranscoder.getWrittenPresentationTimeUs() / mDurationUs);
|
||||
double progress = (videoProgress + audioProgress) / 2.0;
|
||||
mProgress = progress;
|
||||
if (mProgressCallback != null) mProgressCallback.onProgress(progress);
|
||||
}
|
||||
if (!stepped) {
|
||||
Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public interface ProgressCallback {
|
||||
/**
|
||||
* Called to notify progress. Same thread which initiated transcode is used.
|
||||
*
|
||||
* @param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
|
||||
*/
|
||||
void onProgress(double progress);
|
||||
}
|
||||
}
|
|
@ -1,276 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/OutputSurface.java
|
||||
// blob: fc8ad9cd390c5c311f015d3b7c1359e4d295bc52
|
||||
// modified: change TIMEOUT_MS from 500 to 10000
|
||||
package net.ypresto.androidtranscoder.engine;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.util.Log;
|
||||
import android.view.Surface;
|
||||
/**
|
||||
* Holds state associated with a Surface used for MediaCodec decoder output.
|
||||
* <p>
|
||||
* The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
|
||||
* and then create a Surface for that SurfaceTexture. The Surface can be passed to
|
||||
* MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
|
||||
* texture with updateTexImage, then render the texture with GL to a pbuffer.
|
||||
* <p>
|
||||
* The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
|
||||
* Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
|
||||
* we just draw it on whatever surface is current.
|
||||
* <p>
|
||||
* By default, the Surface will be using a BufferQueue in asynchronous mode, so we
|
||||
* can potentially drop frames.
|
||||
*/
|
||||
class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
|
||||
private static final String TAG = "OutputSurface";
|
||||
private static final boolean VERBOSE = false;
|
||||
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
|
||||
private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
|
||||
private SurfaceTexture mSurfaceTexture;
|
||||
private Surface mSurface;
|
||||
private Object mFrameSyncObject = new Object(); // guards mFrameAvailable
|
||||
private boolean mFrameAvailable;
|
||||
private TextureRender mTextureRender;
|
||||
/**
|
||||
* Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new
|
||||
* EGL context and surface will be made current. Creates a Surface that can be passed
|
||||
* to MediaCodec.configure().
|
||||
*/
|
||||
public OutputSurface(int width, int height) {
|
||||
if (width <= 0 || height <= 0) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
eglSetup(width, height);
|
||||
makeCurrent();
|
||||
setup();
|
||||
}
|
||||
/**
|
||||
* Creates an OutputSurface using the current EGL context (rather than establishing a
|
||||
* new one). Creates a Surface that can be passed to MediaCodec.configure().
|
||||
*/
|
||||
public OutputSurface() {
|
||||
setup();
|
||||
}
|
||||
/**
|
||||
* Creates instances of TextureRender and SurfaceTexture, and a Surface associated
|
||||
* with the SurfaceTexture.
|
||||
*/
|
||||
private void setup() {
|
||||
mTextureRender = new TextureRender();
|
||||
mTextureRender.surfaceCreated();
|
||||
// Even if we don't access the SurfaceTexture after the constructor returns, we
|
||||
// still need to keep a reference to it. The Surface doesn't retain a reference
|
||||
// at the Java level, so if we don't either then the object can get GCed, which
|
||||
// causes the native finalizer to run.
|
||||
if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
|
||||
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
|
||||
// This doesn't work if OutputSurface is created on the thread that CTS started for
|
||||
// these test cases.
|
||||
//
|
||||
// The CTS-created thread has a Looper, and the SurfaceTexture constructor will
|
||||
// create a Handler that uses it. The "frame available" message is delivered
|
||||
// there, but since we're not a Looper-based thread we'll never see it. For
|
||||
// this to do anything useful, OutputSurface must be created on a thread without
|
||||
// a Looper, so that SurfaceTexture uses the main application Looper instead.
|
||||
//
|
||||
// Java language note: passing "this" out of a constructor is generally unwise,
|
||||
// but we should be able to get away with it here.
|
||||
mSurfaceTexture.setOnFrameAvailableListener(this);
|
||||
mSurface = new Surface(mSurfaceTexture);
|
||||
}
|
||||
/**
|
||||
* Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer.
|
||||
*/
|
||||
private void eglSetup(int width, int height) {
|
||||
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("unable to get EGL14 display");
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
|
||||
mEGLDisplay = null;
|
||||
throw new RuntimeException("unable to initialize EGL14");
|
||||
}
|
||||
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
|
||||
// to be able to tell if the frame is reasonable.
|
||||
int[] attribList = {
|
||||
EGL14.EGL_RED_SIZE, 8,
|
||||
EGL14.EGL_GREEN_SIZE, 8,
|
||||
EGL14.EGL_BLUE_SIZE, 8,
|
||||
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
|
||||
EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
|
||||
numConfigs, 0)) {
|
||||
throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
|
||||
}
|
||||
// Configure context for OpenGL ES 2.0.
|
||||
int[] attrib_list = {
|
||||
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
|
||||
attrib_list, 0);
|
||||
checkEglError("eglCreateContext");
|
||||
if (mEGLContext == null) {
|
||||
throw new RuntimeException("null context");
|
||||
}
|
||||
// Create a pbuffer surface. By using this for output, we can use glReadPixels
|
||||
// to test values in the output.
|
||||
int[] surfaceAttribs = {
|
||||
EGL14.EGL_WIDTH, width,
|
||||
EGL14.EGL_HEIGHT, height,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
mEGLSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs, 0);
|
||||
checkEglError("eglCreatePbufferSurface");
|
||||
if (mEGLSurface == null) {
|
||||
throw new RuntimeException("surface was null");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Discard all resources held by this class, notably the EGL context.
|
||||
*/
|
||||
public void release() {
|
||||
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
|
||||
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
|
||||
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
|
||||
EGL14.eglReleaseThread();
|
||||
EGL14.eglTerminate(mEGLDisplay);
|
||||
}
|
||||
mSurface.release();
|
||||
// this causes a bunch of warnings that appear harmless but might confuse someone:
|
||||
// W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
|
||||
//mSurfaceTexture.release();
|
||||
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
mEGLContext = EGL14.EGL_NO_CONTEXT;
|
||||
mEGLSurface = EGL14.EGL_NO_SURFACE;
|
||||
mTextureRender = null;
|
||||
mSurface = null;
|
||||
mSurfaceTexture = null;
|
||||
}
|
||||
/**
|
||||
* Makes our EGL context and surface current.
|
||||
*/
|
||||
public void makeCurrent() {
|
||||
if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
|
||||
throw new RuntimeException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns the Surface that we draw onto.
|
||||
*/
|
||||
public Surface getSurface() {
|
||||
return mSurface;
|
||||
}
|
||||
/**
|
||||
* Replaces the fragment shader.
|
||||
*/
|
||||
public void changeFragmentShader(String fragmentShader) {
|
||||
mTextureRender.changeFragmentShader(fragmentShader);
|
||||
}
|
||||
/**
|
||||
* Latches the next buffer into the texture. Must be called from the thread that created
|
||||
* the OutputSurface object, after the onFrameAvailable callback has signaled that new
|
||||
* data is available.
|
||||
*/
|
||||
public void awaitNewImage() {
|
||||
final int TIMEOUT_MS = 10000;
|
||||
synchronized (mFrameSyncObject) {
|
||||
while (!mFrameAvailable) {
|
||||
try {
|
||||
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
|
||||
// stalling the test if it doesn't arrive.
|
||||
mFrameSyncObject.wait(TIMEOUT_MS);
|
||||
if (!mFrameAvailable) {
|
||||
// TODO: if "spurious wakeup", continue while loop
|
||||
throw new RuntimeException("Surface frame wait timed out");
|
||||
}
|
||||
} catch (InterruptedException ie) {
|
||||
// shouldn't happen
|
||||
throw new RuntimeException(ie);
|
||||
}
|
||||
}
|
||||
mFrameAvailable = false;
|
||||
}
|
||||
// Latch the data.
|
||||
mTextureRender.checkGlError("before updateTexImage");
|
||||
mSurfaceTexture.updateTexImage();
|
||||
}
|
||||
/**
|
||||
* Wait up to given timeout until new image become available.
|
||||
* @param timeoutMs
|
||||
* @return true if new image is available. false for no new image until timeout.
|
||||
*/
|
||||
public boolean checkForNewImage(int timeoutMs) {
|
||||
synchronized (mFrameSyncObject) {
|
||||
while (!mFrameAvailable) {
|
||||
try {
|
||||
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
|
||||
// stalling the test if it doesn't arrive.
|
||||
mFrameSyncObject.wait(timeoutMs);
|
||||
if (!mFrameAvailable) {
|
||||
return false;
|
||||
}
|
||||
} catch (InterruptedException ie) {
|
||||
// shouldn't happen
|
||||
throw new RuntimeException(ie);
|
||||
}
|
||||
}
|
||||
mFrameAvailable = false;
|
||||
}
|
||||
// Latch the data.
|
||||
mTextureRender.checkGlError("before updateTexImage");
|
||||
mSurfaceTexture.updateTexImage();
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Draws the data from SurfaceTexture onto the current EGL surface.
|
||||
*/
|
||||
public void drawImage() {
|
||||
mTextureRender.drawFrame(mSurfaceTexture);
|
||||
}
|
||||
@Override
|
||||
public void onFrameAvailable(SurfaceTexture st) {
|
||||
if (VERBOSE) Log.d(TAG, "new frame available");
|
||||
synchronized (mFrameSyncObject) {
|
||||
if (mFrameAvailable) {
|
||||
throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
|
||||
}
|
||||
mFrameAvailable = true;
|
||||
mFrameSyncObject.notifyAll();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Checks for EGL errors.
|
||||
*/
|
||||
private void checkEglError(String msg) {
|
||||
int error;
|
||||
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
|
||||
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,100 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.engine;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
|
||||
public class PassThroughTrackTranscoder implements TrackTranscoder {
|
||||
private final MediaExtractor mExtractor;
|
||||
private final int mTrackIndex;
|
||||
private final QueuedMuxer mMuxer;
|
||||
private final QueuedMuxer.SampleType mSampleType;
|
||||
private final MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
|
||||
private int mBufferSize;
|
||||
private ByteBuffer mBuffer;
|
||||
private boolean mIsEOS;
|
||||
private MediaFormat mActualOutputFormat;
|
||||
private long mWrittenPresentationTimeUs;
|
||||
|
||||
public PassThroughTrackTranscoder(MediaExtractor extractor, int trackIndex,
|
||||
QueuedMuxer muxer, QueuedMuxer.SampleType sampleType) {
|
||||
mExtractor = extractor;
|
||||
mTrackIndex = trackIndex;
|
||||
mMuxer = muxer;
|
||||
mSampleType = sampleType;
|
||||
|
||||
mActualOutputFormat = mExtractor.getTrackFormat(mTrackIndex);
|
||||
mMuxer.setOutputFormat(mSampleType, mActualOutputFormat);
|
||||
mBufferSize = mActualOutputFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
|
||||
mBuffer = ByteBuffer.allocateDirect(mBufferSize).order(ByteOrder.nativeOrder());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat getDeterminedFormat() {
|
||||
return mActualOutputFormat;
|
||||
}
|
||||
|
||||
@SuppressLint("Assert")
|
||||
@Override
|
||||
public boolean stepPipeline() {
|
||||
if (mIsEOS) return false;
|
||||
int trackIndex = mExtractor.getSampleTrackIndex();
|
||||
if (trackIndex < 0) {
|
||||
mBuffer.clear();
|
||||
mBufferInfo.set(0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
mMuxer.writeSampleData(mSampleType, mBuffer, mBufferInfo);
|
||||
mIsEOS = true;
|
||||
return true;
|
||||
}
|
||||
if (trackIndex != mTrackIndex) return false;
|
||||
|
||||
mBuffer.clear();
|
||||
int sampleSize = mExtractor.readSampleData(mBuffer, 0);
|
||||
assert sampleSize <= mBufferSize;
|
||||
boolean isKeyFrame = (mExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
|
||||
int flags = isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0;
|
||||
mBufferInfo.set(0, sampleSize, mExtractor.getSampleTime(), flags);
|
||||
mMuxer.writeSampleData(mSampleType, mBuffer, mBufferInfo);
|
||||
mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
|
||||
|
||||
mExtractor.advance();
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getWrittenPresentationTimeUs() {
|
||||
return mWrittenPresentationTimeUs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFinished() {
|
||||
return mIsEOS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
}
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2015 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.engine;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaMuxer;
|
||||
import android.util.Log;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This class queues until all output track formats are determined.
|
||||
*/
|
||||
public class QueuedMuxer {
|
||||
private static final String TAG = "QueuedMuxer";
|
||||
private static final int BUFFER_SIZE = 64 * 1024; // I have no idea whether this value is appropriate or not...
|
||||
private final MediaMuxer mMuxer;
|
||||
private final Listener mListener;
|
||||
private MediaFormat mVideoFormat;
|
||||
private MediaFormat mAudioFormat;
|
||||
private int mVideoTrackIndex;
|
||||
private int mAudioTrackIndex;
|
||||
private ByteBuffer mByteBuffer;
|
||||
private final List<SampleInfo> mSampleInfoList;
|
||||
private boolean mStarted;
|
||||
|
||||
public QueuedMuxer(MediaMuxer muxer, Listener listener) {
|
||||
mMuxer = muxer;
|
||||
mListener = listener;
|
||||
mSampleInfoList = new ArrayList<>();
|
||||
}
|
||||
|
||||
public void setOutputFormat(SampleType sampleType, MediaFormat format) {
|
||||
switch (sampleType) {
|
||||
case VIDEO:
|
||||
mVideoFormat = format;
|
||||
break;
|
||||
case AUDIO:
|
||||
mAudioFormat = format;
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError();
|
||||
}
|
||||
onSetOutputFormat();
|
||||
}
|
||||
|
||||
private void onSetOutputFormat() {
|
||||
if (mVideoFormat == null || mAudioFormat == null) return;
|
||||
mListener.onDetermineOutputFormat();
|
||||
|
||||
mVideoTrackIndex = mMuxer.addTrack(mVideoFormat);
|
||||
Log.v(TAG, "Added track #" + mVideoTrackIndex + " with " + mVideoFormat.getString(MediaFormat.KEY_MIME) + " to muxer");
|
||||
mAudioTrackIndex = mMuxer.addTrack(mAudioFormat);
|
||||
Log.v(TAG, "Added track #" + mAudioTrackIndex + " with " + mAudioFormat.getString(MediaFormat.KEY_MIME) + " to muxer");
|
||||
mMuxer.start();
|
||||
mStarted = true;
|
||||
|
||||
if (mByteBuffer == null) {
|
||||
mByteBuffer = ByteBuffer.allocate(0);
|
||||
}
|
||||
mByteBuffer.flip();
|
||||
Log.v(TAG, "Output format determined, writing " + mSampleInfoList.size() +
|
||||
" samples / " + mByteBuffer.limit() + " bytes to muxer.");
|
||||
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
int offset = 0;
|
||||
for (SampleInfo sampleInfo : mSampleInfoList) {
|
||||
sampleInfo.writeToBufferInfo(bufferInfo, offset);
|
||||
mMuxer.writeSampleData(getTrackIndexForSampleType(sampleInfo.mSampleType), mByteBuffer, bufferInfo);
|
||||
offset += sampleInfo.mSize;
|
||||
}
|
||||
mSampleInfoList.clear();
|
||||
mByteBuffer = null;
|
||||
}
|
||||
|
||||
public void writeSampleData(SampleType sampleType, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
|
||||
if (mStarted) {
|
||||
mMuxer.writeSampleData(getTrackIndexForSampleType(sampleType), byteBuf, bufferInfo);
|
||||
return;
|
||||
}
|
||||
byteBuf.limit(bufferInfo.offset + bufferInfo.size);
|
||||
byteBuf.position(bufferInfo.offset);
|
||||
if (mByteBuffer == null) {
|
||||
mByteBuffer = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder());
|
||||
}
|
||||
mByteBuffer.put(byteBuf);
|
||||
mSampleInfoList.add(new SampleInfo(sampleType, bufferInfo.size, bufferInfo));
|
||||
}
|
||||
|
||||
private int getTrackIndexForSampleType(SampleType sampleType) {
|
||||
switch (sampleType) {
|
||||
case VIDEO:
|
||||
return mVideoTrackIndex;
|
||||
case AUDIO:
|
||||
return mAudioTrackIndex;
|
||||
default:
|
||||
throw new AssertionError();
|
||||
}
|
||||
}
|
||||
|
||||
public enum SampleType {VIDEO, AUDIO}
|
||||
|
||||
private static class SampleInfo {
|
||||
private final SampleType mSampleType;
|
||||
private final int mSize;
|
||||
private final long mPresentationTimeUs;
|
||||
private final int mFlags;
|
||||
|
||||
private SampleInfo(SampleType sampleType, int size, MediaCodec.BufferInfo bufferInfo) {
|
||||
mSampleType = sampleType;
|
||||
mSize = size;
|
||||
mPresentationTimeUs = bufferInfo.presentationTimeUs;
|
||||
mFlags = bufferInfo.flags;
|
||||
}
|
||||
|
||||
private void writeToBufferInfo(MediaCodec.BufferInfo bufferInfo, int offset) {
|
||||
bufferInfo.set(offset, mSize, mPresentationTimeUs, mFlags);
|
||||
}
|
||||
}
|
||||
|
||||
public interface Listener {
|
||||
void onDetermineOutputFormat();
|
||||
}
|
||||
}
|
|
@ -1,219 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/TextureRender.java
|
||||
// blob: 4125dcfcfed6ed7fddba5b71d657dec0d433da6a
|
||||
// modified: removed unused method bodies
|
||||
// modified: use GL_LINEAR for GL_TEXTURE_MIN_FILTER to improve quality.
|
||||
package net.ypresto.androidtranscoder.engine;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
import android.util.Log;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.FloatBuffer;
|
||||
/**
|
||||
* Code for rendering a texture onto a surface using OpenGL ES 2.0.
|
||||
*/
|
||||
class TextureRender {
|
||||
private static final String TAG = "TextureRender";
|
||||
private static final int FLOAT_SIZE_BYTES = 4;
|
||||
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
|
||||
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
|
||||
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
|
||||
private final float[] mTriangleVerticesData = {
|
||||
// X, Y, Z, U, V
|
||||
-1.0f, -1.0f, 0, 0.f, 0.f,
|
||||
1.0f, -1.0f, 0, 1.f, 0.f,
|
||||
-1.0f, 1.0f, 0, 0.f, 1.f,
|
||||
1.0f, 1.0f, 0, 1.f, 1.f,
|
||||
};
|
||||
private FloatBuffer mTriangleVertices;
|
||||
private static final String VERTEX_SHADER =
|
||||
"uniform mat4 uMVPMatrix;\n" +
|
||||
"uniform mat4 uSTMatrix;\n" +
|
||||
"attribute vec4 aPosition;\n" +
|
||||
"attribute vec4 aTextureCoord;\n" +
|
||||
"varying vec2 vTextureCoord;\n" +
|
||||
"void main() {\n" +
|
||||
" gl_Position = uMVPMatrix * aPosition;\n" +
|
||||
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
|
||||
"}\n";
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"#extension GL_OES_EGL_image_external : require\n" +
|
||||
"precision mediump float;\n" + // highp here doesn't seem to matter
|
||||
"varying vec2 vTextureCoord;\n" +
|
||||
"uniform samplerExternalOES sTexture;\n" +
|
||||
"void main() {\n" +
|
||||
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
"}\n";
|
||||
private float[] mMVPMatrix = new float[16];
|
||||
private float[] mSTMatrix = new float[16];
|
||||
private int mProgram;
|
||||
private int mTextureID = -12345;
|
||||
private int muMVPMatrixHandle;
|
||||
private int muSTMatrixHandle;
|
||||
private int maPositionHandle;
|
||||
private int maTextureHandle;
|
||||
public TextureRender() {
|
||||
mTriangleVertices = ByteBuffer.allocateDirect(
|
||||
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
|
||||
.order(ByteOrder.nativeOrder()).asFloatBuffer();
|
||||
mTriangleVertices.put(mTriangleVerticesData).position(0);
|
||||
Matrix.setIdentityM(mSTMatrix, 0);
|
||||
}
|
||||
public int getTextureId() {
|
||||
return mTextureID;
|
||||
}
|
||||
public void drawFrame(SurfaceTexture st) {
|
||||
checkGlError("onDrawFrame start");
|
||||
st.getTransformMatrix(mSTMatrix);
|
||||
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
|
||||
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
|
||||
GLES20.glUseProgram(mProgram);
|
||||
checkGlError("glUseProgram");
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
|
||||
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
|
||||
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
|
||||
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
|
||||
checkGlError("glVertexAttribPointer maPosition");
|
||||
GLES20.glEnableVertexAttribArray(maPositionHandle);
|
||||
checkGlError("glEnableVertexAttribArray maPositionHandle");
|
||||
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
|
||||
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
|
||||
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
|
||||
checkGlError("glVertexAttribPointer maTextureHandle");
|
||||
GLES20.glEnableVertexAttribArray(maTextureHandle);
|
||||
checkGlError("glEnableVertexAttribArray maTextureHandle");
|
||||
Matrix.setIdentityM(mMVPMatrix, 0);
|
||||
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
|
||||
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
checkGlError("glDrawArrays");
|
||||
GLES20.glFinish();
|
||||
}
|
||||
/**
|
||||
* Initializes GL state. Call this after the EGL surface has been created and made current.
|
||||
*/
|
||||
public void surfaceCreated() {
|
||||
mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
if (mProgram == 0) {
|
||||
throw new RuntimeException("failed creating program");
|
||||
}
|
||||
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
|
||||
checkGlError("glGetAttribLocation aPosition");
|
||||
if (maPositionHandle == -1) {
|
||||
throw new RuntimeException("Could not get attrib location for aPosition");
|
||||
}
|
||||
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
|
||||
checkGlError("glGetAttribLocation aTextureCoord");
|
||||
if (maTextureHandle == -1) {
|
||||
throw new RuntimeException("Could not get attrib location for aTextureCoord");
|
||||
}
|
||||
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
|
||||
checkGlError("glGetUniformLocation uMVPMatrix");
|
||||
if (muMVPMatrixHandle == -1) {
|
||||
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
|
||||
}
|
||||
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
|
||||
checkGlError("glGetUniformLocation uSTMatrix");
|
||||
if (muSTMatrixHandle == -1) {
|
||||
throw new RuntimeException("Could not get attrib location for uSTMatrix");
|
||||
}
|
||||
int[] textures = new int[1];
|
||||
GLES20.glGenTextures(1, textures, 0);
|
||||
mTextureID = textures[0];
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
|
||||
checkGlError("glBindTexture mTextureID");
|
||||
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
|
||||
GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
|
||||
GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
|
||||
GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
|
||||
GLES20.GL_CLAMP_TO_EDGE);
|
||||
checkGlError("glTexParameter");
|
||||
}
|
||||
/**
|
||||
* Replaces the fragment shader.
|
||||
*/
|
||||
public void changeFragmentShader(String fragmentShader) {
|
||||
throw new UnsupportedOperationException("Not implemented");
|
||||
}
|
||||
private int loadShader(int shaderType, String source) {
|
||||
int shader = GLES20.glCreateShader(shaderType);
|
||||
checkGlError("glCreateShader type=" + shaderType);
|
||||
GLES20.glShaderSource(shader, source);
|
||||
GLES20.glCompileShader(shader);
|
||||
int[] compiled = new int[1];
|
||||
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
|
||||
if (compiled[0] == 0) {
|
||||
Log.e(TAG, "Could not compile shader " + shaderType + ":");
|
||||
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
|
||||
GLES20.glDeleteShader(shader);
|
||||
shader = 0;
|
||||
}
|
||||
return shader;
|
||||
}
|
||||
private int createProgram(String vertexSource, String fragmentSource) {
|
||||
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
|
||||
if (vertexShader == 0) {
|
||||
return 0;
|
||||
}
|
||||
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
|
||||
if (pixelShader == 0) {
|
||||
return 0;
|
||||
}
|
||||
int program = GLES20.glCreateProgram();
|
||||
checkGlError("glCreateProgram");
|
||||
if (program == 0) {
|
||||
Log.e(TAG, "Could not create program");
|
||||
}
|
||||
GLES20.glAttachShader(program, vertexShader);
|
||||
checkGlError("glAttachShader");
|
||||
GLES20.glAttachShader(program, pixelShader);
|
||||
checkGlError("glAttachShader");
|
||||
GLES20.glLinkProgram(program);
|
||||
int[] linkStatus = new int[1];
|
||||
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
|
||||
if (linkStatus[0] != GLES20.GL_TRUE) {
|
||||
Log.e(TAG, "Could not link program: ");
|
||||
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
|
||||
GLES20.glDeleteProgram(program);
|
||||
program = 0;
|
||||
}
|
||||
return program;
|
||||
}
|
||||
public void checkGlError(String op) {
|
||||
int error;
|
||||
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
|
||||
Log.e(TAG, op + ": glError " + error);
|
||||
throw new RuntimeException(op + ": glError " + error);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Saves the current frame to disk as a PNG image. Frame starts from (0,0).
|
||||
* <p>
|
||||
* Useful for debugging.
|
||||
*/
|
||||
public static void saveFrame(String filename, int width, int height) {
|
||||
throw new UnsupportedOperationException("Not implemented.");
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.engine;
|
||||
|
||||
import android.media.MediaFormat;
|
||||
|
||||
public interface TrackTranscoder {
|
||||
|
||||
void setup();
|
||||
|
||||
/**
|
||||
* Get actual MediaFormat which is used to write to muxer.
|
||||
* To determine you should call {@link #stepPipeline()} several times.
|
||||
*
|
||||
* @return Actual output format determined by coder, or {@code null} if not yet determined.
|
||||
*/
|
||||
MediaFormat getDeterminedFormat();
|
||||
|
||||
/**
|
||||
* Step pipeline if output is available in any step of it.
|
||||
* It assumes muxer has been started, so you should call muxer.start() first.
|
||||
*
|
||||
* @return true if data moved in pipeline.
|
||||
*/
|
||||
boolean stepPipeline();
|
||||
|
||||
/**
|
||||
* Get presentation time of last sample written to muxer.
|
||||
*
|
||||
* @return Presentation time in micro-second. Return value is undefined if finished writing.
|
||||
*/
|
||||
long getWrittenPresentationTimeUs();
|
||||
|
||||
boolean isFinished();
|
||||
|
||||
void release();
|
||||
}
|
|
@ -1,231 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.engine;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import net.ypresto.androidtranscoder.format.MediaFormatExtraConstants;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
// Refer: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/ExtractDecodeEditEncodeMuxTest.java
|
||||
public class VideoTrackTranscoder implements TrackTranscoder {
|
||||
private static final String TAG = "VideoTrackTranscoder";
|
||||
private static final int DRAIN_STATE_NONE = 0;
|
||||
private static final int DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY = 1;
|
||||
private static final int DRAIN_STATE_CONSUMED = 2;
|
||||
|
||||
private final MediaExtractor mExtractor;
|
||||
private final int mTrackIndex;
|
||||
private final MediaFormat mOutputFormat;
|
||||
private final QueuedMuxer mMuxer;
|
||||
private final MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
|
||||
private MediaCodec mDecoder;
|
||||
private MediaCodec mEncoder;
|
||||
private ByteBuffer[] mDecoderInputBuffers;
|
||||
private ByteBuffer[] mEncoderOutputBuffers;
|
||||
private MediaFormat mActualOutputFormat;
|
||||
private OutputSurface mDecoderOutputSurfaceWrapper;
|
||||
private InputSurface mEncoderInputSurfaceWrapper;
|
||||
private boolean mIsExtractorEOS;
|
||||
private boolean mIsDecoderEOS;
|
||||
private boolean mIsEncoderEOS;
|
||||
private boolean mDecoderStarted;
|
||||
private boolean mEncoderStarted;
|
||||
private long mWrittenPresentationTimeUs;
|
||||
|
||||
public VideoTrackTranscoder(MediaExtractor extractor, int trackIndex,
|
||||
MediaFormat outputFormat, QueuedMuxer muxer) {
|
||||
mExtractor = extractor;
|
||||
mTrackIndex = trackIndex;
|
||||
mOutputFormat = outputFormat;
|
||||
mMuxer = muxer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
mExtractor.selectTrack(mTrackIndex);
|
||||
try {
|
||||
mEncoder = MediaCodec.createEncoderByType(mOutputFormat.getString(MediaFormat.KEY_MIME));
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
mEncoder.configure(mOutputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
mEncoderInputSurfaceWrapper = new InputSurface(mEncoder.createInputSurface());
|
||||
mEncoderInputSurfaceWrapper.makeCurrent();
|
||||
mEncoder.start();
|
||||
mEncoderStarted = true;
|
||||
mEncoderOutputBuffers = mEncoder.getOutputBuffers();
|
||||
|
||||
MediaFormat inputFormat = mExtractor.getTrackFormat(mTrackIndex);
|
||||
if (inputFormat.containsKey(MediaFormatExtraConstants.KEY_ROTATION_DEGREES)) {
|
||||
// Decoded video is rotated automatically in Android 5.0 lollipop.
|
||||
// Turn off here because we don't want to encode rotated one.
|
||||
// refer: https://android.googlesource.com/platform/frameworks/av/+blame/lollipop-release/media/libstagefright/Utils.cpp
|
||||
inputFormat.setInteger(MediaFormatExtraConstants.KEY_ROTATION_DEGREES, 0);
|
||||
}
|
||||
mDecoderOutputSurfaceWrapper = new OutputSurface();
|
||||
try {
|
||||
mDecoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
mDecoder.configure(inputFormat, mDecoderOutputSurfaceWrapper.getSurface(), null, 0);
|
||||
mDecoder.start();
|
||||
mDecoderStarted = true;
|
||||
mDecoderInputBuffers = mDecoder.getInputBuffers();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat getDeterminedFormat() {
|
||||
return mActualOutputFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean stepPipeline() {
|
||||
boolean busy = false;
|
||||
|
||||
int status;
|
||||
while (drainEncoder(0) != DRAIN_STATE_NONE) busy = true;
|
||||
do {
|
||||
status = drainDecoder(0);
|
||||
if (status != DRAIN_STATE_NONE) busy = true;
|
||||
// NOTE: not repeating to keep from deadlock when encoder is full.
|
||||
} while (status == DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY);
|
||||
while (drainExtractor(0) != DRAIN_STATE_NONE) busy = true;
|
||||
|
||||
return busy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getWrittenPresentationTimeUs() {
|
||||
return mWrittenPresentationTimeUs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFinished() {
|
||||
return mIsEncoderEOS;
|
||||
}
|
||||
|
||||
// TODO: CloseGuard
|
||||
@Override
|
||||
public void release() {
|
||||
if (mDecoderOutputSurfaceWrapper != null) {
|
||||
mDecoderOutputSurfaceWrapper.release();
|
||||
mDecoderOutputSurfaceWrapper = null;
|
||||
}
|
||||
if (mEncoderInputSurfaceWrapper != null) {
|
||||
mEncoderInputSurfaceWrapper.release();
|
||||
mEncoderInputSurfaceWrapper = null;
|
||||
}
|
||||
if (mDecoder != null) {
|
||||
if (mDecoderStarted) mDecoder.stop();
|
||||
mDecoder.release();
|
||||
mDecoder = null;
|
||||
}
|
||||
if (mEncoder != null) {
|
||||
if (mEncoderStarted) mEncoder.stop();
|
||||
mEncoder.release();
|
||||
mEncoder = null;
|
||||
}
|
||||
}
|
||||
|
||||
private int drainExtractor(long timeoutUs) {
|
||||
if (mIsExtractorEOS) return DRAIN_STATE_NONE;
|
||||
int trackIndex = mExtractor.getSampleTrackIndex();
|
||||
if (trackIndex >= 0 && trackIndex != mTrackIndex) {
|
||||
return DRAIN_STATE_NONE;
|
||||
}
|
||||
int result = mDecoder.dequeueInputBuffer(timeoutUs);
|
||||
if (result < 0) return DRAIN_STATE_NONE;
|
||||
if (trackIndex < 0) {
|
||||
mIsExtractorEOS = true;
|
||||
mDecoder.queueInputBuffer(result, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
|
||||
return DRAIN_STATE_NONE;
|
||||
}
|
||||
int sampleSize = mExtractor.readSampleData(mDecoderInputBuffers[result], 0);
|
||||
boolean isKeyFrame = (mExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
|
||||
mDecoder.queueInputBuffer(result, 0, sampleSize, mExtractor.getSampleTime(), isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0);
|
||||
mExtractor.advance();
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
|
||||
private int drainDecoder(long timeoutUs) {
|
||||
if (mIsDecoderEOS) return DRAIN_STATE_NONE;
|
||||
int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
|
||||
switch (result) {
|
||||
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
||||
return DRAIN_STATE_NONE;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
mEncoder.signalEndOfInputStream();
|
||||
mIsDecoderEOS = true;
|
||||
mBufferInfo.size = 0;
|
||||
}
|
||||
boolean doRender = (mBufferInfo.size > 0);
|
||||
// NOTE: doRender will block if buffer (of encoder) is full.
|
||||
// Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
|
||||
mDecoder.releaseOutputBuffer(result, doRender);
|
||||
if (doRender) {
|
||||
mDecoderOutputSurfaceWrapper.awaitNewImage();
|
||||
mDecoderOutputSurfaceWrapper.drawImage();
|
||||
mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
|
||||
mEncoderInputSurfaceWrapper.swapBuffers();
|
||||
}
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
|
||||
private int drainEncoder(long timeoutUs) {
|
||||
if (mIsEncoderEOS) return DRAIN_STATE_NONE;
|
||||
int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
|
||||
switch (result) {
|
||||
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
||||
return DRAIN_STATE_NONE;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
if (mActualOutputFormat != null)
|
||||
throw new RuntimeException("Video output format changed twice.");
|
||||
mActualOutputFormat = mEncoder.getOutputFormat();
|
||||
mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
mEncoderOutputBuffers = mEncoder.getOutputBuffers();
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
if (mActualOutputFormat == null) {
|
||||
throw new RuntimeException("Could not determine actual output format.");
|
||||
}
|
||||
|
||||
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
mIsEncoderEOS = true;
|
||||
mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
|
||||
}
|
||||
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
|
||||
// SPS or PPS, which should be passed by MediaFormat.
|
||||
mEncoder.releaseOutputBuffer(result, false);
|
||||
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
|
||||
}
|
||||
mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
|
||||
mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
|
||||
mEncoder.releaseOutputBuffer(result, false);
|
||||
return DRAIN_STATE_CONSUMED;
|
||||
}
|
||||
}
|
|
@ -1,88 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.format;
|
||||
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaFormat;
|
||||
import android.util.Log;
|
||||
|
||||
class Android16By9FormatStrategy implements MediaFormatStrategy {
|
||||
public static final int AUDIO_BITRATE_AS_IS = -1;
|
||||
public static final int AUDIO_CHANNELS_AS_IS = -1;
|
||||
public static final int SCALE_720P = 5;
|
||||
private static final String TAG = "Android16By9FormatStrategy";
|
||||
private final int mScale;
|
||||
private final int mVideoBitrate;
|
||||
private final int mAudioBitrate;
|
||||
private final int mAudioChannels;
|
||||
|
||||
public Android16By9FormatStrategy(int scale, int videoBitrate) {
|
||||
this(scale, videoBitrate, AUDIO_BITRATE_AS_IS, AUDIO_CHANNELS_AS_IS);
|
||||
}
|
||||
|
||||
public Android16By9FormatStrategy(int scale, int videoBitrate, int audioBitrate, int audioChannels) {
|
||||
mScale = scale;
|
||||
mVideoBitrate = videoBitrate;
|
||||
mAudioBitrate = audioBitrate;
|
||||
mAudioChannels = audioChannels;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
|
||||
int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
|
||||
int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
int targetLonger = mScale * 16 * 16;
|
||||
int targetShorter = mScale * 16 * 9;
|
||||
int longer, shorter, outWidth, outHeight;
|
||||
if (width >= height) {
|
||||
longer = width;
|
||||
shorter = height;
|
||||
outWidth = targetLonger;
|
||||
outHeight = targetShorter;
|
||||
} else {
|
||||
shorter = width;
|
||||
longer = height;
|
||||
outWidth = targetShorter;
|
||||
outHeight = targetLonger;
|
||||
}
|
||||
if (longer * 9 != shorter * 16) {
|
||||
throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")");
|
||||
}
|
||||
if (shorter <= targetShorter) {
|
||||
Log.d(TAG, "This video's height is less or equal to " + targetShorter + ", pass-through. (" + width + "x" + height + ")");
|
||||
return null;
|
||||
}
|
||||
MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
|
||||
// From Nexus 4 Camera in 720p
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
return format;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat createAudioOutputFormat(MediaFormat inputFormat) {
|
||||
if (mAudioBitrate == AUDIO_BITRATE_AS_IS || mAudioChannels == AUDIO_CHANNELS_AS_IS) return null;
|
||||
|
||||
// Use original sample rate, as resampling is not supported yet.
|
||||
final MediaFormat format = MediaFormat.createAudioFormat(MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC,
|
||||
inputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), mAudioChannels);
|
||||
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitrate);
|
||||
return format;
|
||||
}
|
||||
}
|
|
@ -1,90 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.format;
|
||||
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaFormat;
|
||||
import android.util.Log;
|
||||
|
||||
class Android720pFormatStrategy implements MediaFormatStrategy {
|
||||
public static final int AUDIO_BITRATE_AS_IS = -1;
|
||||
public static final int AUDIO_CHANNELS_AS_IS = -1;
|
||||
private static final String TAG = "720pFormatStrategy";
|
||||
private static final int LONGER_LENGTH = 1280;
|
||||
private static final int SHORTER_LENGTH = 720;
|
||||
private static final int DEFAULT_VIDEO_BITRATE = 8000 * 1000; // From Nexus 4 Camera in 720p
|
||||
private final int mVideoBitrate;
|
||||
private final int mAudioBitrate;
|
||||
private final int mAudioChannels;
|
||||
|
||||
public Android720pFormatStrategy() {
|
||||
this(DEFAULT_VIDEO_BITRATE);
|
||||
}
|
||||
|
||||
public Android720pFormatStrategy(int videoBitrate) {
|
||||
this(videoBitrate, AUDIO_BITRATE_AS_IS, AUDIO_CHANNELS_AS_IS);
|
||||
}
|
||||
|
||||
public Android720pFormatStrategy(int videoBitrate, int audioBitrate, int audioChannels) {
|
||||
mVideoBitrate = videoBitrate;
|
||||
mAudioBitrate = audioBitrate;
|
||||
mAudioChannels = audioChannels;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
|
||||
int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
|
||||
int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
int longer, shorter, outWidth, outHeight;
|
||||
if (width >= height) {
|
||||
longer = width;
|
||||
shorter = height;
|
||||
outWidth = LONGER_LENGTH;
|
||||
outHeight = SHORTER_LENGTH;
|
||||
} else {
|
||||
shorter = width;
|
||||
longer = height;
|
||||
outWidth = SHORTER_LENGTH;
|
||||
outHeight = LONGER_LENGTH;
|
||||
}
|
||||
if (longer * 9 != shorter * 16) {
|
||||
throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")");
|
||||
}
|
||||
if (shorter < SHORTER_LENGTH) {
|
||||
Log.d(TAG, "This video is less to 720p, pass-through. (" + width + "x" + height + ")");
|
||||
return null;
|
||||
}
|
||||
MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
|
||||
// From Nexus 4 Camera in 720p
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
return format;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat createAudioOutputFormat(MediaFormat inputFormat) {
|
||||
if (mAudioBitrate == AUDIO_BITRATE_AS_IS || mAudioChannels == AUDIO_CHANNELS_AS_IS) return null;
|
||||
|
||||
// Use original sample rate, as resampling is not supported yet.
|
||||
final MediaFormat format = MediaFormat.createAudioFormat(MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC,
|
||||
inputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), mAudioChannels);
|
||||
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitrate);
|
||||
return format;
|
||||
}
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.format;
|
||||
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaFormat;
|
||||
import android.os.Build;
|
||||
import android.util.Log;
|
||||
|
||||
import androidx.annotation.RequiresApi;
|
||||
|
||||
class AndroidStandardFormatStrategy implements MediaFormatStrategy {
|
||||
public static final int AUDIO_BITRATE_AS_IS = -1;
|
||||
public static final int AUDIO_CHANNELS_AS_IS = -1;
|
||||
private static final String TAG = "StandardCompression";
|
||||
private static final int DEFAULT_VIDEO_BITRATE = 2000 * 1000;
|
||||
private static int LONGER_LENGTH = 1280;
|
||||
private static int SHORTER_LENGTH = 720;
|
||||
private final int mVideoBitrate;
|
||||
private final int mVideoresolution;
|
||||
private final int mAudioBitrate;
|
||||
private final int mAudioChannels;
|
||||
private float ASPECT_RATIO = LONGER_LENGTH / SHORTER_LENGTH;
|
||||
|
||||
public AndroidStandardFormatStrategy() {
|
||||
this(DEFAULT_VIDEO_BITRATE, SHORTER_LENGTH);
|
||||
}
|
||||
|
||||
public AndroidStandardFormatStrategy(int videoBitrate, int SHORTER_LENGTH) {
|
||||
this(videoBitrate, SHORTER_LENGTH, AUDIO_BITRATE_AS_IS, AUDIO_CHANNELS_AS_IS);
|
||||
}
|
||||
|
||||
public AndroidStandardFormatStrategy(int videoBitrate, int SHORTER_LENGTH, int audioBitrate, int audioChannels) {
|
||||
mVideoBitrate = videoBitrate;
|
||||
mVideoresolution = SHORTER_LENGTH;
|
||||
mAudioBitrate = audioBitrate;
|
||||
mAudioChannels = audioChannels;
|
||||
}
|
||||
|
||||
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
|
||||
@Override
|
||||
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
|
||||
int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
|
||||
int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
ASPECT_RATIO = (float) width / height;
|
||||
Log.d(TAG, "Input video (" + width + "x" + height + " ratio: " + ASPECT_RATIO);
|
||||
int shorter, outWidth, outHeight;
|
||||
if (width >= height) {
|
||||
shorter = height;
|
||||
outWidth = Math.round(mVideoresolution * ASPECT_RATIO);
|
||||
outHeight = mVideoresolution;
|
||||
} else {
|
||||
shorter = width;
|
||||
outWidth = mVideoresolution;
|
||||
outHeight = Math.round(mVideoresolution * ASPECT_RATIO);
|
||||
}
|
||||
if (shorter < mVideoresolution) {
|
||||
Log.d(TAG, "This video is less to " + mVideoresolution + "p, pass-through. (" + width + "x" + height + ")");
|
||||
return null;
|
||||
}
|
||||
Log.d(TAG, "Converting video (" + outWidth + "x" + outHeight + " ratio: " + ASPECT_RATIO + ")");
|
||||
MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
format.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline);
|
||||
format.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel13);
|
||||
}
|
||||
return format;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat createAudioOutputFormat(MediaFormat inputFormat) {
|
||||
if (mAudioBitrate == AUDIO_BITRATE_AS_IS || mAudioChannels == AUDIO_CHANNELS_AS_IS)
|
||||
return null;
|
||||
|
||||
// Use original sample rate, as resampling is not supported yet.
|
||||
final MediaFormat format = MediaFormat.createAudioFormat(MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC,
|
||||
inputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), mAudioChannels);
|
||||
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitrate);
|
||||
return format;
|
||||
}
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.format;
|
||||
|
||||
import android.media.MediaFormat;
|
||||
import android.util.Log;
|
||||
|
||||
/**
|
||||
* Created by yuya.tanaka on 2014/11/20.
|
||||
*/
|
||||
class ExportPreset960x540Strategy implements MediaFormatStrategy {
|
||||
private static final String TAG = "ExportPreset960x540Strategy";
|
||||
|
||||
@Override
|
||||
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
|
||||
// TODO: detect non-baseline profile and throw exception
|
||||
int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
|
||||
int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
MediaFormat outputFormat = MediaFormatPresets.getExportPreset960x540(width, height);
|
||||
int outWidth = outputFormat.getInteger(MediaFormat.KEY_WIDTH);
|
||||
int outHeight = outputFormat.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
Log.d(TAG, String.format("inputFormat: %dx%d => outputFormat: %dx%d", width, height, outWidth, outHeight));
|
||||
return outputFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MediaFormat createAudioOutputFormat(MediaFormat inputFormat) {
|
||||
// TODO
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.format;
|
||||
|
||||
public class MediaFormatExtraConstants {
|
||||
// from MediaFormat of API level >= 21, but might be usable in older APIs as native code implementation exists.
|
||||
// https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/ACodec.cpp#2621
|
||||
// NOTE: native code enforces baseline profile.
|
||||
// https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/ACodec.cpp#2638
|
||||
/** For encoder parameter. Use value of MediaCodecInfo.CodecProfileLevel.AVCProfile* . */
|
||||
public static final String KEY_PROFILE = "profile";
|
||||
|
||||
// from https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/ACodec.cpp#2623
|
||||
/** For encoder parameter. Use value of MediaCodecInfo.CodecProfileLevel.AVCLevel* . */
|
||||
public static final String KEY_LEVEL = "level";
|
||||
|
||||
// from https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/MediaCodec.cpp#2197
|
||||
/** Included in MediaFormat from {@link android.media.MediaExtractor#getTrackFormat(int)}. Value is {@link java.nio.ByteBuffer}. */
|
||||
public static final String KEY_AVC_SPS = "csd-0";
|
||||
/** Included in MediaFormat from {@link android.media.MediaExtractor#getTrackFormat(int)}. Value is {@link java.nio.ByteBuffer}. */
|
||||
public static final String KEY_AVC_PPS = "csd-1";
|
||||
|
||||
/**
|
||||
* For decoder parameter and included in MediaFormat from {@link android.media.MediaExtractor#getTrackFormat(int)}.
|
||||
* Decoder rotates specified degrees before rendering video to surface.
|
||||
* NOTE: Only included in track format of API >= 21.
|
||||
*/
|
||||
public static final String KEY_ROTATION_DEGREES = "rotation-degrees";
|
||||
|
||||
// Video formats
|
||||
// from MediaFormat of API level >= 21
|
||||
public static final String MIMETYPE_VIDEO_AVC = "video/avc";
|
||||
public static final String MIMETYPE_VIDEO_H263 = "video/3gpp";
|
||||
public static final String MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
|
||||
|
||||
// Audio formats
|
||||
// from MediaFormat of API level >= 21
|
||||
public static final String MIMETYPE_AUDIO_AAC = "audio/mp4a-latm";
|
||||
|
||||
private MediaFormatExtraConstants() {
|
||||
throw new RuntimeException();
|
||||
}
|
||||
}
|
|
@ -1,80 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.format;
|
||||
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
// Refer for example: https://gist.github.com/wobbals/3990442
|
||||
// Refer for preferred parameters: https://developer.apple.com/library/ios/documentation/networkinginternet/conceptual/streamingmediaguide/UsingHTTPLiveStreaming/UsingHTTPLiveStreaming.html#//apple_ref/doc/uid/TP40008332-CH102-SW8
|
||||
// Refer for available keys: (ANDROID ROOT)/media/libstagefright/ACodec.cpp
|
||||
public class MediaFormatPresets {
|
||||
private static final int LONGER_LENGTH_960x540 = 960;
|
||||
|
||||
private MediaFormatPresets() {
|
||||
}
|
||||
|
||||
// preset similar to iOS SDK's AVAssetExportPreset960x540
|
||||
@Deprecated
|
||||
public static MediaFormat getExportPreset960x540() {
|
||||
MediaFormat format = MediaFormat.createVideoFormat("video/avc", 960, 540);
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, 5500 * 1000);
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
|
||||
return format;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preset similar to iOS SDK's AVAssetExportPreset960x540.
|
||||
* Note that encoding resolutions of this preset are not supported in all devices e.g. Nexus 4.
|
||||
* On unsupported device encoded video stream will be broken without any exception.
|
||||
* @param originalWidth Input video width.
|
||||
* @param originalHeight Input video height.
|
||||
* @return MediaFormat instance, or null if pass through.
|
||||
*/
|
||||
public static MediaFormat getExportPreset960x540(int originalWidth, int originalHeight) {
|
||||
int longerLength = Math.max(originalWidth, originalHeight);
|
||||
int shorterLength = Math.min(originalWidth, originalHeight);
|
||||
|
||||
if (longerLength <= LONGER_LENGTH_960x540) return null; // don't upscale
|
||||
|
||||
int residue = LONGER_LENGTH_960x540 * shorterLength % longerLength;
|
||||
if (residue != 0) {
|
||||
double ambiguousShorter = (double) LONGER_LENGTH_960x540 * shorterLength / longerLength;
|
||||
throw new OutputFormatUnavailableException(String.format(
|
||||
"Could not fit to integer, original: (%d, %d), scaled: (%d, %f)",
|
||||
longerLength, shorterLength, LONGER_LENGTH_960x540, ambiguousShorter));
|
||||
}
|
||||
|
||||
int scaledShorter = LONGER_LENGTH_960x540 * shorterLength / longerLength;
|
||||
int width, height;
|
||||
if (originalWidth >= originalHeight) {
|
||||
width = LONGER_LENGTH_960x540;
|
||||
height = scaledShorter;
|
||||
} else {
|
||||
width = scaledShorter;
|
||||
height = LONGER_LENGTH_960x540;
|
||||
}
|
||||
|
||||
MediaFormat format = MediaFormat.createVideoFormat("video/avc", width, height);
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, 5500 * 1000);
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
|
||||
return format;
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.format;
|
||||
|
||||
import android.media.MediaFormat;
|
||||
|
||||
public interface MediaFormatStrategy {
|
||||
|
||||
/**
|
||||
* Returns preferred video format for encoding.
|
||||
*
|
||||
* @param inputFormat MediaFormat from MediaExtractor, contains csd-0/csd-1.
|
||||
* @return null for passthrough.
|
||||
* @throws OutputFormatUnavailableException if input could not be transcoded because of restrictions.
|
||||
*/
|
||||
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat);
|
||||
|
||||
/**
|
||||
* Caution: this method should return null currently.
|
||||
*
|
||||
* @return null for passthrough.
|
||||
* @throws OutputFormatUnavailableException if input could not be transcoded because of restrictions.
|
||||
*/
|
||||
public MediaFormat createAudioOutputFormat(MediaFormat inputFormat);
|
||||
|
||||
}
|
|
@ -1,82 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.format;
|
||||
|
||||
public class MediaFormatStrategyPresets {
|
||||
public static final int AUDIO_BITRATE_AS_IS = -1;
|
||||
public static final int AUDIO_CHANNELS_AS_IS = -1;
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #createExportPreset960x540Strategy()}.
|
||||
*/
|
||||
@Deprecated
|
||||
public static final MediaFormatStrategy EXPORT_PRESET_960x540 = new ExportPreset960x540Strategy();
|
||||
|
||||
/**
|
||||
* Standard preset
|
||||
*
|
||||
* @param bitrate Preferred bitrate for video encoding.
|
||||
*/
|
||||
public static MediaFormatStrategy createAndroidStandardStrategy(int bitrate, int resolution) {
|
||||
return new AndroidStandardFormatStrategy(bitrate, resolution);
|
||||
}
|
||||
|
||||
/**
|
||||
* Preset based on Nexus 4 camera recording with 720p quality.
|
||||
* This preset is ensured to work on any Android >=4.3 devices by Android CTS (if codec is available).
|
||||
* Default bitrate is 8Mbps. {@link #createAndroid720pStrategy(int)} to specify bitrate.
|
||||
*/
|
||||
public static MediaFormatStrategy createAndroid720pStrategy() {
|
||||
return new Android720pFormatStrategy();
|
||||
}
|
||||
|
||||
/**
|
||||
* Preset based on Nexus 4 camera recording with 720p quality.
|
||||
* This preset is ensured to work on any Android >=4.3 devices by Android CTS (if codec is available).
|
||||
* Audio track will be copied as-is.
|
||||
*
|
||||
* @param bitrate Preferred bitrate for video encoding.
|
||||
*/
|
||||
public static MediaFormatStrategy createAndroid720pStrategy(int bitrate) {
|
||||
return new Android720pFormatStrategy(bitrate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Preset based on Nexus 4 camera recording with 720p quality.
|
||||
* This preset is ensured to work on any Android >=4.3 devices by Android CTS (if codec is available).
|
||||
* <br>
|
||||
* Note: audio transcoding is experimental feature.
|
||||
*
|
||||
* @param bitrate Preferred bitrate for video encoding.
|
||||
* @param audioBitrate Preferred bitrate for audio encoding.
|
||||
* @param audioChannels Output audio channels.
|
||||
*/
|
||||
public static MediaFormatStrategy createAndroid720pStrategy(int bitrate, int audioBitrate, int audioChannels) {
|
||||
return new Android720pFormatStrategy(bitrate, audioBitrate, audioChannels);
|
||||
}
|
||||
|
||||
/**
|
||||
* Preset similar to iOS SDK's AVAssetExportPreset960x540.
|
||||
* Note that encoding resolutions of this preset are not supported in all devices e.g. Nexus 4.
|
||||
* On unsupported device encoded video stream will be broken without any exception.
|
||||
*/
|
||||
public static MediaFormatStrategy createExportPreset960x540Strategy() {
|
||||
return new ExportPreset960x540Strategy();
|
||||
}
|
||||
|
||||
private MediaFormatStrategyPresets() {
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.format;
|
||||
|
||||
public class OutputFormatUnavailableException extends RuntimeException {
|
||||
public OutputFormatUnavailableException(String detailMessage) {
|
||||
super(detailMessage);
|
||||
}
|
||||
}
|
|
@ -1,69 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2015 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.utils;
|
||||
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import net.ypresto.androidtranscoder.format.MediaFormatExtraConstants;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class AvcCsdUtils {
|
||||
// Refer: https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/MediaCodec.cpp#2198
|
||||
// Refer: http://stackoverflow.com/a/2861340
|
||||
private static final byte[] AVC_START_CODE_3 = {0x00, 0x00, 0x01};
|
||||
private static final byte[] AVC_START_CODE_4 = {0x00, 0x00, 0x00, 0x01};
|
||||
// Refer: http://www.cardinalpeak.com/blog/the-h-264-sequence-parameter-set/
|
||||
private static final byte AVC_SPS_NAL = 103; // 0<<7 + 3<<5 + 7<<0
|
||||
// https://tools.ietf.org/html/rfc6184
|
||||
private static final byte AVC_SPS_NAL_2 = 39; // 0<<7 + 1<<5 + 7<<0
|
||||
private static final byte AVC_SPS_NAL_3 = 71; // 0<<7 + 2<<5 + 7<<0
|
||||
|
||||
/**
|
||||
* @return ByteBuffer contains SPS without NAL header.
|
||||
*/
|
||||
public static ByteBuffer getSpsBuffer(MediaFormat format) {
|
||||
ByteBuffer sourceBuffer = format.getByteBuffer(MediaFormatExtraConstants.KEY_AVC_SPS).asReadOnlyBuffer(); // might be direct buffer
|
||||
ByteBuffer prefixedSpsBuffer = ByteBuffer.allocate(sourceBuffer.limit()).order(sourceBuffer.order());
|
||||
prefixedSpsBuffer.put(sourceBuffer);
|
||||
prefixedSpsBuffer.flip();
|
||||
|
||||
skipStartCode(prefixedSpsBuffer);
|
||||
|
||||
byte spsNalData = prefixedSpsBuffer.get();
|
||||
if (spsNalData != AVC_SPS_NAL && spsNalData != AVC_SPS_NAL_2 && spsNalData != AVC_SPS_NAL_3) {
|
||||
throw new IllegalStateException("Got non SPS NAL data.");
|
||||
}
|
||||
|
||||
return prefixedSpsBuffer.slice();
|
||||
}
|
||||
|
||||
private static void skipStartCode(ByteBuffer prefixedSpsBuffer) {
|
||||
byte[] prefix3 = new byte[3];
|
||||
prefixedSpsBuffer.get(prefix3);
|
||||
if (Arrays.equals(prefix3, AVC_START_CODE_3)) return;
|
||||
|
||||
byte[] prefix4 = Arrays.copyOf(prefix3, 4);
|
||||
prefix4[3] = prefixedSpsBuffer.get();
|
||||
if (Arrays.equals(prefix4, AVC_START_CODE_4)) return;
|
||||
throw new IllegalStateException("AVC NAL start code does not found in csd.");
|
||||
}
|
||||
|
||||
private AvcCsdUtils() {
|
||||
throw new RuntimeException();
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2016 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.utils;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public class AvcSpsUtils {
|
||||
public static byte getProfileIdc(ByteBuffer spsBuffer) {
|
||||
// Refer: http://www.cardinalpeak.com/blog/the-h-264-sequence-parameter-set/
|
||||
// First byte after NAL.
|
||||
return spsBuffer.get(0);
|
||||
}
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
package net.ypresto.androidtranscoder.utils;
|
||||
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public class ISO6709LocationParser {
|
||||
private final Pattern pattern;
|
||||
|
||||
public ISO6709LocationParser() {
|
||||
this.pattern = Pattern.compile("([+\\-][0-9.]+)([+\\-][0-9.]+)");
|
||||
}
|
||||
|
||||
/**
|
||||
* This method parses the given string representing a geographic point location by coordinates in ISO 6709 format
|
||||
* and returns the latitude and the longitude in float. If <code>location</code> is not in ISO 6709 format,
|
||||
* this method returns <code>null</code>
|
||||
*
|
||||
* @param location a String representing a geographic point location by coordinates in ISO 6709 format
|
||||
* @return <code>null</code> if the given string is not as expected, an array of floats with size 2,
|
||||
* where the first element represents latitude and the second represents longitude, otherwise.
|
||||
*/
|
||||
public float[] parse(String location) {
|
||||
if (location == null) return null;
|
||||
Matcher m = pattern.matcher(location);
|
||||
if (m.find() && m.groupCount() == 2) {
|
||||
String latstr = m.group(1);
|
||||
String lonstr = m.group(2);
|
||||
try {
|
||||
float lat = Float.parseFloat(latstr);
|
||||
float lon = Float.parseFloat(lonstr);
|
||||
return new float[]{lat, lon};
|
||||
} catch (NumberFormatException ignored) {
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -1,63 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2014 Yuya Tanaka
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package net.ypresto.androidtranscoder.utils;
|
||||
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
public class MediaExtractorUtils {
|
||||
|
||||
private MediaExtractorUtils() {
|
||||
}
|
||||
|
||||
public static class TrackResult {
|
||||
|
||||
private TrackResult() {
|
||||
}
|
||||
|
||||
public int mVideoTrackIndex;
|
||||
public String mVideoTrackMime;
|
||||
public MediaFormat mVideoTrackFormat;
|
||||
public int mAudioTrackIndex;
|
||||
public String mAudioTrackMime;
|
||||
public MediaFormat mAudioTrackFormat;
|
||||
}
|
||||
|
||||
public static TrackResult getFirstVideoAndAudioTrack(MediaExtractor extractor) {
|
||||
TrackResult trackResult = new TrackResult();
|
||||
trackResult.mVideoTrackIndex = -1;
|
||||
trackResult.mAudioTrackIndex = -1;
|
||||
int trackCount = extractor.getTrackCount();
|
||||
for (int i = 0; i < trackCount; i++) {
|
||||
MediaFormat format = extractor.getTrackFormat(i);
|
||||
String mime = format.getString(MediaFormat.KEY_MIME);
|
||||
if (trackResult.mVideoTrackIndex < 0 && mime.startsWith("video/")) {
|
||||
trackResult.mVideoTrackIndex = i;
|
||||
trackResult.mVideoTrackMime = mime;
|
||||
trackResult.mVideoTrackFormat = format;
|
||||
} else if (trackResult.mAudioTrackIndex < 0 && mime.startsWith("audio/")) {
|
||||
trackResult.mAudioTrackIndex = i;
|
||||
trackResult.mAudioTrackMime = mime;
|
||||
trackResult.mAudioTrackFormat = format;
|
||||
}
|
||||
if (trackResult.mVideoTrackIndex >= 0 && trackResult.mAudioTrackIndex >= 0) break;
|
||||
}
|
||||
if (trackResult.mVideoTrackIndex < 0 || trackResult.mAudioTrackIndex < 0) {
|
||||
throw new IllegalArgumentException("extractor does not contain video and/or audio tracks.");
|
||||
}
|
||||
return trackResult;
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue