filledBuffers = new ArrayDeque<>();
+
+ private final MediaCodec decoder;
+ private final MediaCodec encoder;
+ private final MediaFormat encodeFormat;
+
+ private int inputSampleRate;
+ private int inputChannelCount;
+ private int outputChannelCount;
+
+ private final MediaCodecBufferCompatWrapper decoderBuffers;
+ private final MediaCodecBufferCompatWrapper encoderBuffers;
+
+ private final AudioBuffer overflowBuffer = new AudioBuffer();
+
+ private MediaFormat actualDecodedFormat;
+
+
+ AudioChannel(final MediaCodec decoder,
+ final MediaCodec encoder, final MediaFormat encodeFormat) {
+ this.decoder = decoder;
+ this.encoder = encoder;
+ this.encodeFormat = encodeFormat;
+
+ decoderBuffers = new MediaCodecBufferCompatWrapper(this.decoder);
+ encoderBuffers = new MediaCodecBufferCompatWrapper(this.encoder);
+ }
+
+ void setActualDecodedFormat(final MediaFormat decodedFormat) {
+ actualDecodedFormat = decodedFormat;
+
+ inputSampleRate = actualDecodedFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+ if (inputSampleRate != encodeFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE)) {
+ throw new UnsupportedOperationException("Audio sample rate conversion not supported yet.");
+ }
+
+ inputChannelCount = actualDecodedFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+ outputChannelCount = encodeFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+
+ if (inputChannelCount != 1 && inputChannelCount != 2) {
+ throw new UnsupportedOperationException("Input channel count (" + inputChannelCount + ") not supported.");
+ }
+
+ if (outputChannelCount != 1 && outputChannelCount != 2) {
+ throw new UnsupportedOperationException("Output channel count (" + outputChannelCount + ") not supported.");
+ }
+
+ overflowBuffer.presentationTimeUs = 0;
+ }
+
+ void drainDecoderBufferAndQueue(final int bufferIndex, final long presentationTimeUs) {
+ if (actualDecodedFormat == null) {
+ throw new RuntimeException("Buffer received before format!");
+ }
+
+ final ByteBuffer data =
+ bufferIndex == BUFFER_INDEX_END_OF_STREAM ?
+ null : decoderBuffers.getOutputBuffer(bufferIndex);
+
+ AudioBuffer buffer = emptyBuffers.poll();
+ if (buffer == null) {
+ buffer = new AudioBuffer();
+ }
+
+ buffer.bufferIndex = bufferIndex;
+ buffer.presentationTimeUs = presentationTimeUs;
+ buffer.data = data == null ? null : data.asShortBuffer();
+
+ if (overflowBuffer.data == null) {
+ overflowBuffer.data = ByteBuffer
+ .allocateDirect(data.capacity())
+ .order(ByteOrder.nativeOrder())
+ .asShortBuffer();
+ overflowBuffer.data.clear().flip();
+ }
+
+ filledBuffers.add(buffer);
+ }
+
+ boolean feedEncoder(long timeoutUs) {
+ final boolean hasOverflow = overflowBuffer.data != null && overflowBuffer.data.hasRemaining();
+ if (filledBuffers.isEmpty() && !hasOverflow) {
+ // No audio data - Bail out
+ return false;
+ }
+
+ final int encoderInBuffIndex = encoder.dequeueInputBuffer(timeoutUs);
+ if (encoderInBuffIndex < 0) {
+ // Encoder is full - Bail out
+ return false;
+ }
+
+ // Drain overflow first
+ final ShortBuffer outBuffer = encoderBuffers.getInputBuffer(encoderInBuffIndex).asShortBuffer();
+ if (hasOverflow) {
+ final long presentationTimeUs = drainOverflow(outBuffer);
+ encoder.queueInputBuffer(encoderInBuffIndex,
+ 0, outBuffer.position() * BYTES_PER_SHORT,
+ presentationTimeUs, 0);
+ return true;
+ }
+
+ final AudioBuffer inBuffer = filledBuffers.poll();
+ if (inBuffer.bufferIndex == BUFFER_INDEX_END_OF_STREAM) {
+ encoder.queueInputBuffer(encoderInBuffIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+ return false;
+ }
+
+ final long presentationTimeUs = remixAndMaybeFillOverflow(inBuffer, outBuffer);
+ encoder.queueInputBuffer(encoderInBuffIndex,
+ 0, outBuffer.position() * BYTES_PER_SHORT,
+ presentationTimeUs, 0);
+ if (inBuffer != null) {
+ decoder.releaseOutputBuffer(inBuffer.bufferIndex, false);
+ emptyBuffers.add(inBuffer);
+ }
+
+ return true;
+ }
+
+ private static long sampleCountToDurationUs(final int sampleCount,
+ final int sampleRate,
+ final int channelCount) {
+ return (sampleCount / (sampleRate * MICROSECS_PER_SEC)) / channelCount;
+ }
+
+ private long drainOverflow(final ShortBuffer outBuff) {
+ final ShortBuffer overflowBuff = overflowBuffer.data;
+ final int overflowLimit = overflowBuff.limit();
+ final int overflowSize = overflowBuff.remaining();
+
+ final long beginPresentationTimeUs = overflowBuffer.presentationTimeUs +
+ sampleCountToDurationUs(overflowBuff.position(), inputSampleRate, outputChannelCount);
+
+ outBuff.clear();
+ // Limit overflowBuff to outBuff's capacity
+ overflowBuff.limit(outBuff.capacity());
+ // Load overflowBuff onto outBuff
+ outBuff.put(overflowBuff);
+
+ if (overflowSize >= outBuff.capacity()) {
+ // Overflow fully consumed - Reset
+ overflowBuff.clear().limit(0);
+ } else {
+ // Only partially consumed - Keep position & restore previous limit
+ overflowBuff.limit(overflowLimit);
+ }
+
+ return beginPresentationTimeUs;
+ }
+
+ private long remixAndMaybeFillOverflow(final AudioBuffer input,
+ final ShortBuffer outBuff) {
+ final ShortBuffer inBuff = input.data;
+ final ShortBuffer overflowBuff = overflowBuffer.data;
+
+ outBuff.clear();
+
+ // Reset position to 0, and set limit to capacity (Since MediaCodec doesn't do that for us)
+ inBuff.clear();
+
+ if (inBuff.remaining() > outBuff.remaining()) {
+ // Overflow
+ // Limit inBuff to outBuff's capacity
+ inBuff.limit(outBuff.capacity());
+ outBuff.put(inBuff);
+
+ // Reset limit to its own capacity & Keep position
+ inBuff.limit(inBuff.capacity());
+
+ // Remix the rest onto overflowBuffer
+ // NOTE: We should only reach this point when overflow buffer is empty
+ final long consumedDurationUs =
+ sampleCountToDurationUs(inBuff.position(), inputSampleRate, inputChannelCount);
+ overflowBuff.put(inBuff);
+
+ // Seal off overflowBuff & mark limit
+ overflowBuff.flip();
+ overflowBuffer.presentationTimeUs = input.presentationTimeUs + consumedDurationUs;
+ } else {
+ // No overflow
+ outBuff.put(inBuff);
+ }
+
+ return input.presentationTimeUs;
+ }
+}
+
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/AudioComposer.java b/gpuv/src/main/java/com/xypower/gpuv/composer/AudioComposer.java
new file mode 100644
index 00000000..da76608a
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/AudioComposer.java
@@ -0,0 +1,83 @@
+package com.xypower.gpuv.composer;
+
+import android.annotation.SuppressLint;
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+
+// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java
+class AudioComposer implements IAudioComposer {
+ private final MediaExtractor mediaExtractor;
+ private final int trackIndex;
+ private final MuxRender muxRender;
+ private final MuxRender.SampleType sampleType = MuxRender.SampleType.AUDIO;
+ private final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
+ private int bufferSize;
+ private ByteBuffer buffer;
+ private boolean isEOS;
+ private MediaFormat actualOutputFormat;
+ private long writtenPresentationTimeUs;
+
+ AudioComposer(MediaExtractor mediaExtractor, int trackIndex,
+ MuxRender muxRender) {
+ this.mediaExtractor = mediaExtractor;
+ this.trackIndex = trackIndex;
+ this.muxRender = muxRender;
+
+ actualOutputFormat = this.mediaExtractor.getTrackFormat(this.trackIndex);
+ this.muxRender.setOutputFormat(this.sampleType, actualOutputFormat);
+ bufferSize = actualOutputFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
+ buffer = ByteBuffer.allocateDirect(bufferSize).order(ByteOrder.nativeOrder());
+ }
+
+
+ @SuppressLint("Assert")
+ public boolean stepPipeline() {
+ if (isEOS) return false;
+ int trackIndex = mediaExtractor.getSampleTrackIndex();
+ if (trackIndex < 0) {
+ buffer.clear();
+ bufferInfo.set(0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+ muxRender.writeSampleData(sampleType, buffer, bufferInfo);
+ isEOS = true;
+ return true;
+ }
+ if (trackIndex != this.trackIndex) return false;
+
+ buffer.clear();
+ int sampleSize = mediaExtractor.readSampleData(buffer, 0);
+ assert sampleSize <= bufferSize;
+ boolean isKeyFrame = (mediaExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
+ int flags = isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0;
+ bufferInfo.set(0, sampleSize, mediaExtractor.getSampleTime(), flags);
+ muxRender.writeSampleData(sampleType, buffer, bufferInfo);
+ writtenPresentationTimeUs = bufferInfo.presentationTimeUs;
+
+ mediaExtractor.advance();
+ return true;
+ }
+
+ @Override
+ public long getWrittenPresentationTimeUs() {
+ return writtenPresentationTimeUs;
+ }
+
+ @Override
+ public boolean isFinished() {
+ return isEOS;
+ }
+
+ @Override
+ public void setup() {
+ // do nothing
+ }
+
+ @Override
+ public void release() {
+ // do nothing
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/DecoderSurface.java b/gpuv/src/main/java/com/xypower/gpuv/composer/DecoderSurface.java
new file mode 100644
index 00000000..a2d92dce
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/DecoderSurface.java
@@ -0,0 +1,357 @@
+package com.xypower.gpuv.composer;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.*;
+import android.util.Log;
+import android.util.Size;
+import android.view.Surface;
+import com.xypower.gpuv.egl.EglUtil;
+import com.xypower.gpuv.egl.GlFramebufferObject;
+import com.xypower.gpuv.egl.GlPreviewFilter;
+import com.xypower.gpuv.egl.GlSurfaceTexture;
+import com.xypower.gpuv.egl.filter.GlFilter;
+
+import static android.opengl.GLES20.*;
+
+
+// Refer : https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/OutputSurface.java
+
+/**
+ * Holds state associated with a Surface used for MediaCodec decoder output.
+ *
+ * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
+ * and then create a Surface for that SurfaceTexture. The Surface can be passed to
+ * MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
+ * texture with updateTexImage, then render the texture with GL to a pbuffer.
+ *
+ * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
+ * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
+ * we just draw it on whatever surface is current.
+ *
+ * By default, the Surface will be using a BufferQueue in asynchronous mode, so we
+ * can potentially drop frames.
+ */
+class DecoderSurface implements SurfaceTexture.OnFrameAvailableListener {
+ private static final String TAG = "DecoderSurface";
+ private static final boolean VERBOSE = false;
+ private EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY;
+ private EGLContext eglContext = EGL14.EGL_NO_CONTEXT;
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+ private Surface surface;
+ private Object frameSyncObject = new Object(); // guards frameAvailable
+ private boolean frameAvailable;
+ private GlFilter filter;
+
+ private int texName;
+
+ private GlSurfaceTexture previewTexture;
+
+ private GlFramebufferObject filterFramebufferObject;
+ private GlPreviewFilter previewShader;
+ private GlFilter normalShader;
+ private GlFramebufferObject framebufferObject;
+
+ private float[] MVPMatrix = new float[16];
+ private float[] ProjMatrix = new float[16];
+ private float[] MMatrix = new float[16];
+ private float[] VMatrix = new float[16];
+ private float[] STMatrix = new float[16];
+
+
+ private Rotation rotation = Rotation.NORMAL;
+ private Size outputResolution;
+ private Size inputResolution;
+ private FillMode fillMode = FillMode.PRESERVE_ASPECT_FIT;
+ private FillModeCustomItem fillModeCustomItem;
+ private boolean flipVertical = false;
+ private boolean flipHorizontal = false;
+
+ /**
+ * Creates an DecoderSurface using the current EGL context (rather than establishing a
+ * new one). Creates a Surface that can be passed to MediaCodec.configure().
+ */
+ DecoderSurface(GlFilter filter) {
+ this.filter = filter;
+ setup();
+ }
+
+ /**
+ * Creates instances of TextureRender and SurfaceTexture, and a Surface associated
+ * with the SurfaceTexture.
+ */
+ private void setup() {
+
+ // Even if we don't access the SurfaceTexture after the constructor returns, we
+ // still need to keep a reference to it. The Surface doesn't retain a reference
+ // at the Java level, so if we don't either then the object can get GCed, which
+ // causes the native finalizer to run.
+
+ // if (VERBOSE) Log.d(TAG, "textureID=" + filter.getTextureId());
+ // surfaceTexture = new SurfaceTexture(filter.getTextureId());
+
+ // This doesn't work if DecoderSurface is created on the thread that CTS started for
+ // these test cases.
+ //
+ // The CTS-created thread has a Looper, and the SurfaceTexture constructor will
+ // create a Handler that uses it. The "frame available" message is delivered
+ // there, but since we're not a Looper-based thread we'll never see it. For
+ // this to do anything useful, DecoderSurface must be created on a thread without
+ // a Looper, so that SurfaceTexture uses the main application Looper instead.
+ //
+ // Java language note: passing "this" out of a constructor is generally unwise,
+ // but we should be able to get away with it here.
+
+ filter.setup();
+ framebufferObject = new GlFramebufferObject();
+ normalShader = new GlFilter();
+ normalShader.setup();
+
+ final int[] args = new int[1];
+
+ GLES20.glGenTextures(args.length, args, 0);
+ texName = args[0];
+
+ // SurfaceTextureを生成
+ previewTexture = new GlSurfaceTexture(texName);
+ previewTexture.setOnFrameAvailableListener(this);
+ surface = new Surface(previewTexture.getSurfaceTexture());
+
+ GLES20.glBindTexture(previewTexture.getTextureTarget(), texName);
+ // GL_TEXTURE_EXTERNAL_OES
+ //OpenGlUtils.setupSampler(previewTexture.getTextureTarget(), GL_LINEAR, GL_NEAREST);
+ EglUtil.setupSampler(previewTexture.getTextureTarget(), GL_LINEAR, GL_NEAREST);
+
+ GLES20.glBindTexture(GL_TEXTURE_2D, 0);
+
+ // GL_TEXTURE_EXTERNAL_OES
+ previewShader = new GlPreviewFilter(previewTexture.getTextureTarget());
+ previewShader.setup();
+ filterFramebufferObject = new GlFramebufferObject();
+
+
+ Matrix.setLookAtM(VMatrix, 0,
+ 0.0f, 0.0f, 5.0f,
+ 0.0f, 0.0f, 0.0f,
+ 0.0f, 1.0f, 0.0f
+ );
+
+ GLES20.glGetIntegerv(GL_MAX_TEXTURE_SIZE, args, 0);
+
+
+ }
+
+
+ /**
+ * Discard all resources held by this class, notably the EGL context.
+ */
+ void release() {
+ if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
+ EGL14.eglDestroySurface(eglDisplay, eglSurface);
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ }
+ surface.release();
+ previewTexture.release();
+ // this causes a bunch of warnings that appear harmless but might confuse someone:
+ // W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
+ //surfaceTexture.release();
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ filter.release();
+ filter = null;
+ surface = null;
+ previewTexture = null;
+ }
+
+ /**
+ * Returns the Surface that we draw onto.
+ */
+ Surface getSurface() {
+ return surface;
+ }
+
+ /**
+ * Latches the next buffer into the texture. Must be called from the thread that created
+ * the DecoderSurface object, after the onFrameAvailable callback has signaled that new
+ * data is available.
+ */
+ void awaitNewImage() {
+ final int TIMEOUT_MS = 10000;
+ synchronized (frameSyncObject) {
+ while (!frameAvailable) {
+ try {
+ // Wait for onFrameAvailable() to signal us. Use a timeout to avoid
+ // stalling the test if it doesn't arrive.
+ frameSyncObject.wait(TIMEOUT_MS);
+ if (!frameAvailable) {
+ // TODO: if "spurious wakeup", continue while loop
+ throw new RuntimeException("Surface frame wait timed out");
+ }
+ } catch (InterruptedException ie) {
+ // shouldn't happen
+ throw new RuntimeException(ie);
+ }
+ }
+ frameAvailable = false;
+ }
+ // Latch the data.
+ // GlUtils.checkGlError("before updateTexImage");
+ previewTexture.updateTexImage();
+ previewTexture.getTransformMatrix(STMatrix);
+ }
+
+
+ /**
+ * Draws the data from SurfaceTexture onto the current EGL surface.
+ */
+ void drawImage() {
+
+ framebufferObject.enable();
+ GLES20.glViewport(0, 0, framebufferObject.getWidth(), framebufferObject.getHeight());
+
+
+ if (filter != null) {
+ filterFramebufferObject.enable();
+ GLES20.glViewport(0, 0, filterFramebufferObject.getWidth(), filterFramebufferObject.getHeight());
+ }
+
+ GLES20.glClear(GL_COLOR_BUFFER_BIT);
+
+ Matrix.multiplyMM(MVPMatrix, 0, VMatrix, 0, MMatrix, 0);
+ Matrix.multiplyMM(MVPMatrix, 0, ProjMatrix, 0, MVPMatrix, 0);
+
+ float scaleDirectionX = flipHorizontal ? -1 : 1;
+ float scaleDirectionY = flipVertical ? -1 : 1;
+
+ float scale[];
+ switch (fillMode) {
+ case PRESERVE_ASPECT_FIT:
+ scale = FillMode.getScaleAspectFit(rotation.getRotation(), inputResolution.getWidth(), inputResolution.getHeight(), outputResolution.getWidth(), outputResolution.getHeight());
+
+ // Log.d(TAG, "scale[0] = " + scale[0] + " scale[1] = " + scale[1]);
+
+ Matrix.scaleM(MVPMatrix, 0, scale[0] * scaleDirectionX, scale[1] * scaleDirectionY, 1);
+ if (rotation != Rotation.NORMAL) {
+ Matrix.rotateM(MVPMatrix, 0, -rotation.getRotation(), 0.f, 0.f, 1.f);
+ }
+ break;
+ case PRESERVE_ASPECT_CROP:
+ scale = FillMode.getScaleAspectCrop(rotation.getRotation(), inputResolution.getWidth(), inputResolution.getHeight(), outputResolution.getWidth(), outputResolution.getHeight());
+ Matrix.scaleM(MVPMatrix, 0, scale[0] * scaleDirectionX, scale[1] * scaleDirectionY, 1);
+ if (rotation != Rotation.NORMAL) {
+ Matrix.rotateM(MVPMatrix, 0, -rotation.getRotation(), 0.f, 0.f, 1.f);
+ }
+ break;
+ case CUSTOM:
+ if (fillModeCustomItem != null) {
+ Matrix.translateM(MVPMatrix, 0, fillModeCustomItem.getTranslateX(), -fillModeCustomItem.getTranslateY(), 0f);
+ scale = FillMode.getScaleAspectCrop(rotation.getRotation(), inputResolution.getWidth(), inputResolution.getHeight(), outputResolution.getWidth(), outputResolution.getHeight());
+
+ if (fillModeCustomItem.getRotate() == 0 || fillModeCustomItem.getRotate() == 180) {
+ Matrix.scaleM(MVPMatrix,
+ 0,
+ fillModeCustomItem.getScale() * scale[0] * scaleDirectionX,
+ fillModeCustomItem.getScale() * scale[1] * scaleDirectionY,
+ 1);
+ } else {
+ Matrix.scaleM(MVPMatrix,
+ 0,
+ fillModeCustomItem.getScale() * scale[0] * (1 / fillModeCustomItem.getVideoWidth() * fillModeCustomItem.getVideoHeight()) * scaleDirectionX,
+ fillModeCustomItem.getScale() * scale[1] * (fillModeCustomItem.getVideoWidth() / fillModeCustomItem.getVideoHeight()) * scaleDirectionY,
+ 1);
+ }
+
+ Matrix.rotateM(MVPMatrix, 0, -(rotation.getRotation() + fillModeCustomItem.getRotate()), 0.f, 0.f, 1.f);
+
+// Log.d(TAG, "inputResolution = " + inputResolution.getWidth() + " height = " + inputResolution.getHeight());
+// Log.d(TAG, "out = " + outputResolution.getWidth() + " height = " + outputResolution.getHeight());
+// Log.d(TAG, "rotation = " + rotation.getRotation());
+// Log.d(TAG, "scale[0] = " + scale[0] + " scale[1] = " + scale[1]);
+
+
+ }
+ default:
+ break;
+ }
+
+
+ previewShader.draw(texName, MVPMatrix, STMatrix, 1f);
+
+ if (filter != null) {
+ // 一度shaderに描画したものを、fboを利用して、drawする。drawには必要なさげだけど。
+ framebufferObject.enable();
+ GLES20.glClear(GL_COLOR_BUFFER_BIT);
+ filter.draw(filterFramebufferObject.getTexName(), framebufferObject);
+ }
+
+
+ ////////////////////////////////////////////////////////////////////////////////////
+
+ GLES20.glBindFramebuffer(GL_FRAMEBUFFER, 0);
+ GLES20.glViewport(0, 0, framebufferObject.getWidth(), framebufferObject.getHeight());
+
+ GLES20.glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
+ normalShader.draw(framebufferObject.getTexName(), null);
+ }
+
+ @Override
+ public void onFrameAvailable(SurfaceTexture st) {
+ if (VERBOSE) Log.d(TAG, "new frame available");
+ synchronized (frameSyncObject) {
+ if (frameAvailable) {
+ throw new RuntimeException("frameAvailable already set, frame could be dropped");
+ }
+ frameAvailable = true;
+ frameSyncObject.notifyAll();
+ }
+ }
+
+ void setRotation(Rotation rotation) {
+ this.rotation = rotation;
+ }
+
+
+ void setOutputResolution(Size resolution) {
+ this.outputResolution = resolution;
+ }
+
+ void setFillMode(FillMode fillMode) {
+ this.fillMode = fillMode;
+ }
+
+ void setInputResolution(Size resolution) {
+ this.inputResolution = resolution;
+ }
+
+ void setFillModeCustomItem(FillModeCustomItem fillModeCustomItem) {
+ this.fillModeCustomItem = fillModeCustomItem;
+ }
+
+ void setFlipVertical(boolean flipVertical) {
+ this.flipVertical = flipVertical;
+ }
+
+ void setFlipHorizontal(boolean flipHorizontal) {
+ this.flipHorizontal = flipHorizontal;
+ }
+
+ void completeParams() {
+ int width = outputResolution.getWidth();
+ int height = outputResolution.getHeight();
+ framebufferObject.setup(width, height);
+ normalShader.setFrameSize(width, height);
+
+ filterFramebufferObject.setup(width, height);
+ previewShader.setFrameSize(width, height);
+ // MCLog.d("onSurfaceChanged width = " + width + " height = " + height + " aspectRatio = " + scaleRatio);
+ Matrix.frustumM(ProjMatrix, 0, -1f, 1f, -1, 1, 5, 7);
+ Matrix.setIdentityM(MMatrix, 0);
+
+ if (filter != null) {
+ filter.setFrameSize(width, height);
+ }
+
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/EncoderSurface.java b/gpuv/src/main/java/com/xypower/gpuv/composer/EncoderSurface.java
new file mode 100644
index 00000000..fbe85ea2
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/EncoderSurface.java
@@ -0,0 +1,141 @@
+package com.xypower.gpuv.composer;
+
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.view.Surface;
+
+// Refer : https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/InputSurface.java
+
+/**
+ * Holds state associated with a Surface used for MediaCodec encoder input.
+ *
+ * The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
+ * to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
+ * to the video encoder.
+ */
+class EncoderSurface {
+
+ private static final int EGL_RECORDABLE_ANDROID = 0x3142;
+ private EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY;
+ private EGLContext eglContext = EGL14.EGL_NO_CONTEXT;
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+ private Surface surface;
+
+ /**
+ * Creates an EncoderSurface from a Surface.
+ */
+ EncoderSurface(Surface surface) {
+ if (surface == null) {
+ throw new NullPointerException();
+ }
+ this.surface = surface;
+ eglSetup();
+ }
+
+ /**
+ * Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
+ */
+ private void eglSetup() {
+ eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+ eglDisplay = null;
+ throw new RuntimeException("unable to initialize EGL14");
+ }
+ // Configure EGL for recordable and OpenGL ES 2.0. We want enough RGB bits
+ // to minimize artifacts from possible YUV conversion.
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+ EGL_RECORDABLE_ANDROID, 1,
+ EGL14.EGL_NONE
+ };
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.length,
+ numConfigs, 0)) {
+ throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
+ }
+ // Configure context for OpenGL ES 2.0.
+ int[] attrib_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL14.EGL_NONE
+ };
+ eglContext = EGL14.eglCreateContext(eglDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
+ attrib_list, 0);
+ checkEglError("eglCreateContext");
+ if (eglContext == null) {
+ throw new RuntimeException("null context");
+ }
+ // Create a window surface, and attach it to the Surface we received.
+ int[] surfaceAttribs = {
+ EGL14.EGL_NONE
+ };
+ eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, configs[0], surface,
+ surfaceAttribs, 0);
+ checkEglError("eglCreateWindowSurface");
+ if (eglSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ }
+
+ /**
+ * Discard all resources held by this class, notably the EGL context. Also releases the
+ * Surface that was passed to our constructor.
+ */
+ public void release() {
+ if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
+ EGL14.eglDestroySurface(eglDisplay, eglSurface);
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ }
+ surface.release();
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ surface = null;
+ }
+
+ /**
+ * Makes our EGL context and surface current.
+ */
+ void makeCurrent() {
+ if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ /**
+ * Calls eglSwapBuffers. Use this to "publish" the current frame.
+ */
+ void swapBuffers() {
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ /**
+ * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
+ */
+ void setPresentationTime(long nsecs) {
+ EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, nsecs);
+ }
+
+ /**
+ * Checks for EGL errors.
+ */
+ private void checkEglError(String msg) {
+ int error;
+ if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+ }
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/FillMode.java b/gpuv/src/main/java/com/xypower/gpuv/composer/FillMode.java
new file mode 100644
index 00000000..e399dd6b
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/FillMode.java
@@ -0,0 +1,51 @@
+package com.xypower.gpuv.composer;
+
+public enum FillMode {
+ PRESERVE_ASPECT_FIT,
+ PRESERVE_ASPECT_CROP,
+ CUSTOM;
+
+ public static float[] getScaleAspectFit(int angle, int widthIn, int heightIn, int widthOut, int heightOut) {
+ final float[] scale = {1, 1};
+ scale[0] = scale[1] = 1;
+ if (angle == 90 || angle == 270) {
+ int cx = widthIn;
+ widthIn = heightIn;
+ heightIn = cx;
+ }
+
+ float aspectRatioIn = (float) widthIn / (float) heightIn;
+ float heightOutCalculated = (float) widthOut / aspectRatioIn;
+
+ if (heightOutCalculated < heightOut) {
+ scale[1] = heightOutCalculated / heightOut;
+ } else {
+ scale[0] = heightOut * aspectRatioIn / widthOut;
+ }
+
+ return scale;
+ }
+
+ public static float[] getScaleAspectCrop(int angle, int widthIn, int heightIn, int widthOut, int heightOut) {
+ final float[] scale = {1, 1};
+ scale[0] = scale[1] = 1;
+ if (angle == 90 || angle == 270) {
+ int cx = widthIn;
+ widthIn = heightIn;
+ heightIn = cx;
+ }
+
+ float aspectRatioIn = (float) widthIn / (float) heightIn;
+ float aspectRatioOut = (float) widthOut / (float) heightOut;
+
+ if (aspectRatioIn > aspectRatioOut) {
+ float widthOutCalculated = (float) heightOut * aspectRatioIn;
+ scale[0] = widthOutCalculated / widthOut;
+ } else {
+ float heightOutCalculated = (float) widthOut / aspectRatioIn;
+ scale[1] = heightOutCalculated / heightOut;
+ }
+
+ return scale;
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/FillModeCustomItem.java b/gpuv/src/main/java/com/xypower/gpuv/composer/FillModeCustomItem.java
new file mode 100644
index 00000000..ce37a1f7
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/FillModeCustomItem.java
@@ -0,0 +1,83 @@
+package com.xypower.gpuv.composer;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+
+public class FillModeCustomItem implements Parcelable {
+ private final float scale;
+ private final float rotate;
+ private final float translateX;
+ private final float translateY;
+ private final float videoWidth;
+ private final float videoHeight;
+
+ public FillModeCustomItem(float scale, float rotate, float translateX, float translateY, float videoWidth, float videoHeight) {
+ this.scale = scale;
+ this.rotate = rotate;
+ this.translateX = translateX;
+ this.translateY = translateY;
+ this.videoWidth = videoWidth;
+ this.videoHeight = videoHeight;
+ }
+
+ public float getScale() {
+ return scale;
+ }
+
+ public float getRotate() {
+ return rotate;
+ }
+
+ public float getTranslateX() {
+ return translateX;
+ }
+
+ public float getTranslateY() {
+ return translateY;
+ }
+
+ public float getVideoWidth() {
+ return videoWidth;
+ }
+
+ public float getVideoHeight() {
+ return videoHeight;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeFloat(this.scale);
+ dest.writeFloat(this.rotate);
+ dest.writeFloat(this.translateX);
+ dest.writeFloat(this.translateY);
+ dest.writeFloat(this.videoWidth);
+ dest.writeFloat(this.videoHeight);
+ }
+
+ protected FillModeCustomItem(Parcel in) {
+ this.scale = in.readFloat();
+ this.rotate = in.readFloat();
+ this.translateX = in.readFloat();
+ this.translateY = in.readFloat();
+ this.videoWidth = in.readFloat();
+ this.videoHeight = in.readFloat();
+ }
+
+ public static final Parcelable.Creator CREATOR = new Parcelable.Creator() {
+ @Override
+ public FillModeCustomItem createFromParcel(Parcel source) {
+ return new FillModeCustomItem(source);
+ }
+
+ @Override
+ public FillModeCustomItem[] newArray(int size) {
+ return new FillModeCustomItem[size];
+ }
+ };
+}
+
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/GPUMp4Composer.java b/gpuv/src/main/java/com/xypower/gpuv/composer/GPUMp4Composer.java
new file mode 100644
index 00000000..6c74a8dc
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/GPUMp4Composer.java
@@ -0,0 +1,322 @@
+package com.xypower.gpuv.composer;
+
+import android.content.Context;
+import android.media.MediaMetadataRetriever;
+import android.net.Uri;
+import android.util.Log;
+import android.util.Size;
+import com.xypower.gpuv.egl.filter.GlFilter;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+
+public class GPUMp4Composer {
+
+ private final static String TAG = GPUMp4Composer.class.getSimpleName();
+
+ private Context context;
+ private final String srcPath;
+ private final String destPath;
+ private GlFilter filter;
+ private Size outputResolution;
+ private int bitrate = -1;
+ private boolean mute = false;
+ private Rotation rotation = Rotation.NORMAL;
+ private Listener listener;
+ private FillMode fillMode = FillMode.PRESERVE_ASPECT_FIT;
+ private FillModeCustomItem fillModeCustomItem;
+ private int timeScale = 1;
+ private boolean flipVertical = false;
+ private boolean flipHorizontal = false;
+
+ private ExecutorService executorService;
+
+
+ public GPUMp4Composer(final String srcPath, final String destPath) {
+ this.srcPath = srcPath;
+ this.destPath = destPath;
+ }
+
+ public GPUMp4Composer(final Context context, final String srcPath, final String destPath) {
+ this.context = context;
+ this.srcPath = srcPath;
+ this.destPath = destPath;
+ }
+
+ public GPUMp4Composer filter(GlFilter filter) {
+ this.filter = filter;
+ return this;
+ }
+
+ public GPUMp4Composer size(int width, int height) {
+ this.outputResolution = new Size(width, height);
+ return this;
+ }
+
+ public GPUMp4Composer videoBitrate(int bitrate) {
+ this.bitrate = bitrate;
+ return this;
+ }
+
+ public GPUMp4Composer mute(boolean mute) {
+ this.mute = mute;
+ return this;
+ }
+
+ public GPUMp4Composer flipVertical(boolean flipVertical) {
+ this.flipVertical = flipVertical;
+ return this;
+ }
+
+ public GPUMp4Composer flipHorizontal(boolean flipHorizontal) {
+ this.flipHorizontal = flipHorizontal;
+ return this;
+ }
+
+ public GPUMp4Composer rotation(Rotation rotation) {
+ this.rotation = rotation;
+ return this;
+ }
+
+ public GPUMp4Composer fillMode(FillMode fillMode) {
+ this.fillMode = fillMode;
+ return this;
+ }
+
+ public GPUMp4Composer customFillMode(FillModeCustomItem fillModeCustomItem) {
+ this.fillModeCustomItem = fillModeCustomItem;
+ this.fillMode = FillMode.CUSTOM;
+ return this;
+ }
+
+
+ public GPUMp4Composer listener(Listener listener) {
+ this.listener = listener;
+ return this;
+ }
+
+ public GPUMp4Composer timeScale(final int timeScale) {
+ this.timeScale = timeScale;
+ return this;
+ }
+
+ private ExecutorService getExecutorService() {
+ if (executorService == null) {
+ executorService = Executors.newSingleThreadExecutor();
+ }
+ return executorService;
+ }
+
+
+ public GPUMp4Composer start() {
+ getExecutorService().execute(new Runnable() {
+ @Override
+ public void run() {
+ GPUMp4ComposerEngine engine = new GPUMp4ComposerEngine();
+
+ engine.setProgressCallback(new GPUMp4ComposerEngine.ProgressCallback() {
+ @Override
+ public void onProgress(final double progress) {
+ if (listener != null) {
+ listener.onProgress(progress);
+ }
+ }
+ });
+
+ final File srcFile = new File(srcPath);
+ final FileInputStream fileInputStream;
+ try {
+ if (srcPath.contains("content:/")) {
+ fileInputStream = (FileInputStream) context.getContentResolver().openInputStream(Uri.parse(srcPath));
+ } else {
+ fileInputStream = new FileInputStream(srcFile);
+ }
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ if (listener != null) {
+ listener.onFailed(e);
+ }
+ return;
+ } catch (NullPointerException e) {
+ Log.e(TAG, "Must have a context when use ScopedStorage");
+ e.printStackTrace();
+ if (listener != null) {
+ listener.onFailed(e);
+ }
+ return;
+ }
+
+ try {
+ engine.setDataSource(fileInputStream.getFD());
+ } catch (IOException e) {
+ e.printStackTrace();
+ if (listener != null) {
+ listener.onFailed(e);
+ }
+ return;
+ }
+
+ final int videoRotate = getVideoRotation(srcPath);
+ final Size srcVideoResolution = getVideoResolution(srcPath, videoRotate);
+
+ if (filter == null) {
+ filter = new GlFilter();
+ }
+
+ if (fillMode == null) {
+ fillMode = FillMode.PRESERVE_ASPECT_FIT;
+ }
+
+ if (fillModeCustomItem != null) {
+ fillMode = FillMode.CUSTOM;
+ }
+
+ if (outputResolution == null) {
+ if (fillMode == FillMode.CUSTOM) {
+ outputResolution = srcVideoResolution;
+ } else {
+ Rotation rotate = Rotation.fromInt(rotation.getRotation() + videoRotate);
+ if (rotate == Rotation.ROTATION_90 || rotate == Rotation.ROTATION_270) {
+ outputResolution = new Size(srcVideoResolution.getHeight(), srcVideoResolution.getWidth());
+ } else {
+ outputResolution = srcVideoResolution;
+ }
+ }
+ }
+// if (filter instanceof IResolutionFilter) {
+// ((IResolutionFilter) filter).setResolution(outputResolution);
+// }
+
+ if (timeScale < 2) {
+ timeScale = 1;
+ }
+
+ Log.d(TAG, "rotation = " + (rotation.getRotation() + videoRotate));
+ Log.d(TAG, "inputResolution width = " + srcVideoResolution.getWidth() + " height = " + srcVideoResolution.getHeight());
+ Log.d(TAG, "outputResolution width = " + outputResolution.getWidth() + " height = " + outputResolution.getHeight());
+ Log.d(TAG, "fillMode = " + fillMode);
+
+ try {
+ if (bitrate < 0) {
+ bitrate = calcBitRate(outputResolution.getWidth(), outputResolution.getHeight());
+ }
+ engine.compose(
+ destPath,
+ outputResolution,
+ filter,
+ bitrate,
+ mute,
+ Rotation.fromInt(rotation.getRotation() + videoRotate),
+ srcVideoResolution,
+ fillMode,
+ fillModeCustomItem,
+ timeScale,
+ flipVertical,
+ flipHorizontal
+ );
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ if (listener != null) {
+ listener.onFailed(e);
+ }
+ executorService.shutdown();
+ return;
+ }
+
+ if (listener != null) {
+ listener.onCompleted();
+ }
+ executorService.shutdown();
+ }
+ });
+
+ return this;
+ }
+
+ public void cancel() {
+ getExecutorService().shutdownNow();
+ }
+
+
+ public interface Listener {
+ /**
+ * Called to notify progress.
+ *
+ * @param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
+ */
+ void onProgress(double progress);
+
+ /**
+ * Called when transcode completed.
+ */
+ void onCompleted();
+
+ /**
+ * Called when transcode canceled.
+ */
+ void onCanceled();
+
+
+ void onFailed(Exception exception);
+ }
+
+ private int getVideoRotation(String videoFilePath) {
+ MediaMetadataRetriever mediaMetadataRetriever = null;
+ try {
+ mediaMetadataRetriever = new MediaMetadataRetriever();
+ mediaMetadataRetriever.setDataSource(videoFilePath);
+ String orientation = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
+ return Integer.valueOf(orientation);
+ } catch (IllegalArgumentException e) {
+ Log.e("MediaMetadataRetriever", "getVideoRotation IllegalArgumentException");
+ return 0;
+ } catch (RuntimeException e) {
+ Log.e("MediaMetadataRetriever", "getVideoRotation RuntimeException");
+ return 0;
+ } catch (Exception e) {
+ Log.e("MediaMetadataRetriever", "getVideoRotation Exception");
+ return 0;
+ } finally {
+ try {
+ if (mediaMetadataRetriever != null) {
+ mediaMetadataRetriever.release();
+ }
+ } catch (Exception e) {
+ Log.e(TAG, "Failed to release mediaMetadataRetriever.", e);
+ }
+ }
+ }
+
+ private int calcBitRate(int width, int height) {
+ final int bitrate = (int) (0.25 * 30 * width * height);
+ Log.i(TAG, "bitrate=" + bitrate);
+ return bitrate;
+ }
+
+ private Size getVideoResolution(final String path, final int rotation) {
+ MediaMetadataRetriever retriever = null;
+ try {
+ retriever = new MediaMetadataRetriever();
+ retriever.setDataSource(path);
+ int width = Integer.valueOf(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH));
+ int height = Integer.valueOf(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT));
+
+ return new Size(width, height);
+ } finally {
+ try {
+ if (retriever != null) {
+ retriever.release();
+ }
+ } catch (Exception e) {
+ Log.e(TAG, "Failed to release mediaMetadataRetriever.", e);
+ }
+ }
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/GPUMp4ComposerEngine.java b/gpuv/src/main/java/com/xypower/gpuv/composer/GPUMp4ComposerEngine.java
new file mode 100644
index 00000000..579a8d57
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/GPUMp4ComposerEngine.java
@@ -0,0 +1,228 @@
+package com.xypower.gpuv.composer;
+
+import android.media.*;
+import android.util.Log;
+import android.util.Size;
+import com.xypower.gpuv.egl.filter.GlFilter;
+
+import java.io.FileDescriptor;
+import java.io.IOException;
+
+// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java
+
+/**
+ * Internal engine, do not use this directly.
+ */
+class GPUMp4ComposerEngine {
+ private static final String TAG = "GPUMp4ComposerEngine";
+ private static final double PROGRESS_UNKNOWN = -1.0;
+ private static final long SLEEP_TO_WAIT_TRACK_TRANSCODERS = 10;
+ private static final long PROGRESS_INTERVAL_STEPS = 10;
+ private FileDescriptor inputFileDescriptor;
+ private VideoComposer videoComposer;
+ private IAudioComposer audioComposer;
+ private MediaExtractor mediaExtractor;
+ private MediaMuxer mediaMuxer;
+ private ProgressCallback progressCallback;
+ private long durationUs;
+ private MediaMetadataRetriever mediaMetadataRetriever;
+
+
+ void setDataSource(FileDescriptor fileDescriptor) {
+ inputFileDescriptor = fileDescriptor;
+ }
+
+ void setProgressCallback(ProgressCallback progressCallback) {
+ this.progressCallback = progressCallback;
+ }
+
+
+ void compose(
+ final String destPath,
+ final Size outputResolution,
+ final GlFilter filter,
+ final int bitrate,
+ final boolean mute,
+ final Rotation rotation,
+ final Size inputResolution,
+ final FillMode fillMode,
+ final FillModeCustomItem fillModeCustomItem,
+ final int timeScale,
+ final boolean flipVertical,
+ final boolean flipHorizontal
+ ) throws IOException {
+
+
+ try {
+ mediaExtractor = new MediaExtractor();
+ mediaExtractor.setDataSource(inputFileDescriptor);
+ mediaMuxer = new MediaMuxer(destPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+ mediaMetadataRetriever = new MediaMetadataRetriever();
+ mediaMetadataRetriever.setDataSource(inputFileDescriptor);
+ try {
+ durationUs = Long.parseLong(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)) * 1000;
+ } catch (NumberFormatException e) {
+ durationUs = -1;
+ }
+ Log.d(TAG, "Duration (us): " + durationUs);
+
+ MediaFormat videoOutputFormat = MediaFormat.createVideoFormat("video/avc", outputResolution.getWidth(), outputResolution.getHeight());
+
+ videoOutputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
+ videoOutputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
+ videoOutputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+ videoOutputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+
+
+ MuxRender muxRender = new MuxRender(mediaMuxer);
+
+ // identify track indices
+ MediaFormat format = mediaExtractor.getTrackFormat(0);
+ String mime = format.getString(MediaFormat.KEY_MIME);
+
+ final int videoTrackIndex;
+ final int audioTrackIndex;
+
+ if (mime.startsWith("video/")) {
+ videoTrackIndex = 0;
+ audioTrackIndex = 1;
+ } else {
+ videoTrackIndex = 1;
+ audioTrackIndex = 0;
+ }
+
+ // setup video composer
+ videoComposer = new VideoComposer(mediaExtractor, videoTrackIndex, videoOutputFormat, muxRender, timeScale);
+ videoComposer.setUp(filter, rotation, outputResolution, inputResolution, fillMode, fillModeCustomItem, flipVertical, flipHorizontal);
+ mediaExtractor.selectTrack(videoTrackIndex);
+
+ // setup audio if present and not muted
+ if (mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_AUDIO) != null && !mute) {
+ // has Audio video
+
+ if (timeScale < 2) {
+ audioComposer = new AudioComposer(mediaExtractor, audioTrackIndex, muxRender);
+ } else {
+ audioComposer = new RemixAudioComposer(mediaExtractor, audioTrackIndex, mediaExtractor.getTrackFormat(audioTrackIndex), muxRender, timeScale);
+ }
+
+ audioComposer.setup();
+
+ mediaExtractor.selectTrack(audioTrackIndex);
+
+ runPipelines();
+ } else {
+ // no audio video
+ runPipelinesNoAudio();
+ }
+
+
+ mediaMuxer.stop();
+ } finally {
+ try {
+ if (videoComposer != null) {
+ videoComposer.release();
+ videoComposer = null;
+ }
+ if (audioComposer != null) {
+ audioComposer.release();
+ audioComposer = null;
+ }
+ if (mediaExtractor != null) {
+ mediaExtractor.release();
+ mediaExtractor = null;
+ }
+ } catch (RuntimeException e) {
+ // Too fatal to make alive the app, because it may leak native resources.
+ //noinspection ThrowFromFinallyBlock
+ throw new Error("Could not shutdown mediaExtractor, codecs and mediaMuxer pipeline.", e);
+ }
+ try {
+ if (mediaMuxer != null) {
+ mediaMuxer.release();
+ mediaMuxer = null;
+ }
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Failed to release mediaMuxer.", e);
+ }
+ try {
+ if (mediaMetadataRetriever != null) {
+ mediaMetadataRetriever.release();
+ mediaMetadataRetriever = null;
+ }
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Failed to release mediaMetadataRetriever.", e);
+ }
+ }
+
+
+ }
+
+
+ private void runPipelines() {
+ long loopCount = 0;
+ if (durationUs <= 0) {
+ if (progressCallback != null) {
+ progressCallback.onProgress(PROGRESS_UNKNOWN);
+ }// unknown
+ }
+ while (!(videoComposer.isFinished() && audioComposer.isFinished())) {
+ boolean stepped = videoComposer.stepPipeline()
+ || audioComposer.stepPipeline();
+ loopCount++;
+ if (durationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
+ double videoProgress = videoComposer.isFinished() ? 1.0 : Math.min(1.0, (double) videoComposer.getWrittenPresentationTimeUs() / durationUs);
+ double audioProgress = audioComposer.isFinished() ? 1.0 : Math.min(1.0, (double) audioComposer.getWrittenPresentationTimeUs() / durationUs);
+ double progress = (videoProgress + audioProgress) / 2.0;
+ if (progressCallback != null) {
+ progressCallback.onProgress(progress);
+ }
+ }
+ if (!stepped) {
+ try {
+ Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
+ } catch (InterruptedException e) {
+ // nothing to do
+ }
+ }
+ }
+ }
+
+ private void runPipelinesNoAudio() {
+ long loopCount = 0;
+ if (durationUs <= 0) {
+ if (progressCallback != null) {
+ progressCallback.onProgress(PROGRESS_UNKNOWN);
+ } // unknown
+ }
+ while (!videoComposer.isFinished()) {
+ boolean stepped = videoComposer.stepPipeline();
+ loopCount++;
+ if (durationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
+ double videoProgress = videoComposer.isFinished() ? 1.0 : Math.min(1.0, (double) videoComposer.getWrittenPresentationTimeUs() / durationUs);
+ if (progressCallback != null) {
+ progressCallback.onProgress(videoProgress);
+ }
+ }
+ if (!stepped) {
+ try {
+ Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
+ } catch (InterruptedException e) {
+ // nothing to do
+ }
+ }
+ }
+
+
+ }
+
+
+ interface ProgressCallback {
+ /**
+ * Called to notify progress. Same thread which initiated transcode is used.
+ *
+ * @param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
+ */
+ void onProgress(double progress);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/IAudioComposer.java b/gpuv/src/main/java/com/xypower/gpuv/composer/IAudioComposer.java
new file mode 100644
index 00000000..cea0be74
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/IAudioComposer.java
@@ -0,0 +1,15 @@
+package com.xypower.gpuv.composer;
+
+
+interface IAudioComposer {
+
+ void setup();
+
+ boolean stepPipeline();
+
+ long getWrittenPresentationTimeUs();
+
+ boolean isFinished();
+
+ void release();
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/MediaCodecBufferCompatWrapper.java b/gpuv/src/main/java/com/xypower/gpuv/composer/MediaCodecBufferCompatWrapper.java
new file mode 100644
index 00000000..13cea5f8
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/MediaCodecBufferCompatWrapper.java
@@ -0,0 +1,46 @@
+package com.xypower.gpuv.composer;
+
+
+import android.media.MediaCodec;
+import android.os.Build;
+
+import java.nio.ByteBuffer;
+
+// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/compat/MediaCodecBufferCompatWrapper.java
+
+/**
+ * A Wrapper to MediaCodec that facilitates the use of API-dependent get{Input/Output}Buffer methods,
+ * in order to prevent: http://stackoverflow.com/q/30646885
+ */
+
+class MediaCodecBufferCompatWrapper {
+ private final MediaCodec mediaCodec;
+ private final ByteBuffer[] inputBuffers;
+ private final ByteBuffer[] putputBuffers;
+
+ MediaCodecBufferCompatWrapper(MediaCodec mediaCodec) {
+ this.mediaCodec = mediaCodec;
+
+ if (Build.VERSION.SDK_INT < 21) {
+ inputBuffers = mediaCodec.getInputBuffers();
+ putputBuffers = mediaCodec.getOutputBuffers();
+ } else {
+ inputBuffers = putputBuffers = null;
+ }
+ }
+
+ ByteBuffer getInputBuffer(final int index) {
+ if (Build.VERSION.SDK_INT >= 21) {
+ return mediaCodec.getInputBuffer(index);
+ }
+ return inputBuffers[index];
+ }
+
+ ByteBuffer getOutputBuffer(final int index) {
+ if (Build.VERSION.SDK_INT >= 21) {
+ return mediaCodec.getOutputBuffer(index);
+ }
+ return putputBuffers[index];
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/MuxRender.java b/gpuv/src/main/java/com/xypower/gpuv/composer/MuxRender.java
new file mode 100644
index 00000000..42c2e46a
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/MuxRender.java
@@ -0,0 +1,128 @@
+package com.xypower.gpuv.composer;
+
+import android.media.MediaCodec;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.List;
+
+// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java
+
+class MuxRender {
+ private static final String TAG = "MuxRender";
+ private static final int BUFFER_SIZE = 64 * 1024; // I have no idea whether this value is appropriate or not...
+ private final MediaMuxer muxer;
+ private MediaFormat videoFormat;
+ private MediaFormat audioFormat;
+ private int videoTrackIndex;
+ private int audioTrackIndex;
+ private ByteBuffer byteBuffer;
+ private final List sampleInfoList;
+ private boolean started;
+
+ MuxRender(MediaMuxer muxer) {
+ this.muxer = muxer;
+ sampleInfoList = new ArrayList<>();
+ }
+
+ void setOutputFormat(SampleType sampleType, MediaFormat format) {
+ switch (sampleType) {
+ case VIDEO:
+ videoFormat = format;
+ break;
+ case AUDIO:
+ audioFormat = format;
+ break;
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ void onSetOutputFormat() {
+
+ if (videoFormat != null && audioFormat != null) {
+
+ videoTrackIndex = muxer.addTrack(videoFormat);
+ Log.v(TAG, "Added track #" + videoTrackIndex + " with " + videoFormat.getString(MediaFormat.KEY_MIME) + " to muxer");
+ audioTrackIndex = muxer.addTrack(audioFormat);
+ Log.v(TAG, "Added track #" + audioTrackIndex + " with " + audioFormat.getString(MediaFormat.KEY_MIME) + " to muxer");
+
+ } else if (videoFormat != null) {
+
+ videoTrackIndex = muxer.addTrack(videoFormat);
+ Log.v(TAG, "Added track #" + videoTrackIndex + " with " + videoFormat.getString(MediaFormat.KEY_MIME) + " to muxer");
+
+ }
+
+ muxer.start();
+ started = true;
+
+ if (byteBuffer == null) {
+ byteBuffer = ByteBuffer.allocate(0);
+ }
+ byteBuffer.flip();
+ Log.v(TAG, "Output format determined, writing " + sampleInfoList.size() +
+ " samples / " + byteBuffer.limit() + " bytes to muxer.");
+ MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
+ int offset = 0;
+ for (SampleInfo sampleInfo : sampleInfoList) {
+ sampleInfo.writeToBufferInfo(bufferInfo, offset);
+ muxer.writeSampleData(getTrackIndexForSampleType(sampleInfo.sampleType), byteBuffer, bufferInfo);
+ offset += sampleInfo.size;
+ }
+ sampleInfoList.clear();
+ byteBuffer = null;
+
+
+ }
+
+ void writeSampleData(SampleType sampleType, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
+ if (started) {
+ muxer.writeSampleData(getTrackIndexForSampleType(sampleType), byteBuf, bufferInfo);
+ return;
+ }
+ byteBuf.limit(bufferInfo.offset + bufferInfo.size);
+ byteBuf.position(bufferInfo.offset);
+ if (byteBuffer == null) {
+ byteBuffer = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder());
+ }
+ byteBuffer.put(byteBuf);
+ sampleInfoList.add(new SampleInfo(sampleType, bufferInfo.size, bufferInfo));
+ }
+
+ private int getTrackIndexForSampleType(SampleType sampleType) {
+ switch (sampleType) {
+ case VIDEO:
+ return videoTrackIndex;
+ case AUDIO:
+ return audioTrackIndex;
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public enum SampleType {VIDEO, AUDIO}
+
+ private static class SampleInfo {
+ private final SampleType sampleType;
+ private final int size;
+ private final long presentationTimeUs;
+ private final int flags;
+
+ private SampleInfo(SampleType sampleType, int size, MediaCodec.BufferInfo bufferInfo) {
+ this.sampleType = sampleType;
+ this.size = size;
+ presentationTimeUs = bufferInfo.presentationTimeUs;
+ flags = bufferInfo.flags;
+ }
+
+ private void writeToBufferInfo(MediaCodec.BufferInfo bufferInfo, int offset) {
+ bufferInfo.set(offset, size, presentationTimeUs, flags);
+ }
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/RemixAudioComposer.java b/gpuv/src/main/java/com/xypower/gpuv/composer/RemixAudioComposer.java
new file mode 100644
index 00000000..cd6e331d
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/RemixAudioComposer.java
@@ -0,0 +1,218 @@
+package com.xypower.gpuv.composer;
+
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+
+import java.io.IOException;
+
+// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/AudioTrackTranscoder.java
+
+
+
+class RemixAudioComposer implements IAudioComposer {
+ private static final MuxRender.SampleType SAMPLE_TYPE = MuxRender.SampleType.AUDIO;
+
+ private static final int DRAIN_STATE_NONE = 0;
+ private static final int DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY = 1;
+ private static final int DRAIN_STATE_CONSUMED = 2;
+
+ private final MediaExtractor extractor;
+ private final MuxRender muxer;
+ private long writtenPresentationTimeUs;
+
+ private final int trackIndex;
+ private int muxCount = 1;
+
+ private final MediaFormat outputFormat;
+
+ private final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
+ private MediaCodec decoder;
+ private MediaCodec encoder;
+ private MediaFormat actualOutputFormat;
+
+ private MediaCodecBufferCompatWrapper decoderBuffers;
+ private MediaCodecBufferCompatWrapper encoderBuffers;
+
+ private boolean isExtractorEOS;
+ private boolean isDecoderEOS;
+ private boolean isEncoderEOS;
+ private boolean decoderStarted;
+ private boolean encoderStarted;
+
+ private AudioChannel audioChannel;
+ private final int timeScale;
+
+ public RemixAudioComposer(MediaExtractor extractor, int trackIndex,
+ MediaFormat outputFormat, MuxRender muxer, int timeScale) {
+ this.extractor = extractor;
+ this.trackIndex = trackIndex;
+ this.outputFormat = outputFormat;
+ this.muxer = muxer;
+ this.timeScale = timeScale;
+ }
+
+ @Override
+ public void setup() {
+ extractor.selectTrack(trackIndex);
+ try {
+ encoder = MediaCodec.createEncoderByType(outputFormat.getString(MediaFormat.KEY_MIME));
+ } catch (IOException e) {
+ throw new IllegalStateException(e);
+ }
+ encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+ encoder.start();
+ encoderStarted = true;
+ encoderBuffers = new MediaCodecBufferCompatWrapper(encoder);
+
+ final MediaFormat inputFormat = extractor.getTrackFormat(trackIndex);
+ try {
+ decoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
+ } catch (IOException e) {
+ throw new IllegalStateException(e);
+ }
+ decoder.configure(inputFormat, null, null, 0);
+ decoder.start();
+ decoderStarted = true;
+ decoderBuffers = new MediaCodecBufferCompatWrapper(decoder);
+
+ audioChannel = new AudioChannel(decoder, encoder, outputFormat);
+ }
+
+ @Override
+ public boolean stepPipeline() {
+ boolean busy = false;
+
+ int status;
+ while (drainEncoder(0) != DRAIN_STATE_NONE) busy = true;
+ do {
+ status = drainDecoder(0);
+ if (status != DRAIN_STATE_NONE) busy = true;
+ // NOTE: not repeating to keep from deadlock when encoder is full.
+ } while (status == DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY);
+
+ while (audioChannel.feedEncoder(0)) busy = true;
+ while (drainExtractor(0) != DRAIN_STATE_NONE) busy = true;
+
+ return busy;
+ }
+
+ private int drainExtractor(long timeoutUs) {
+ if (isExtractorEOS) return DRAIN_STATE_NONE;
+ int trackIndex = extractor.getSampleTrackIndex();
+ if (trackIndex >= 0 && trackIndex != this.trackIndex) {
+ return DRAIN_STATE_NONE;
+ }
+
+ final int result = decoder.dequeueInputBuffer(timeoutUs);
+ if (result < 0) return DRAIN_STATE_NONE;
+ if (trackIndex < 0) {
+ isExtractorEOS = true;
+ decoder.queueInputBuffer(result, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+ return DRAIN_STATE_NONE;
+ }
+
+ final int sampleSize = extractor.readSampleData(decoderBuffers.getInputBuffer(result), 0);
+ final boolean isKeyFrame = (extractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
+ decoder.queueInputBuffer(result, 0, sampleSize, extractor.getSampleTime(), isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0);
+ extractor.advance();
+ return DRAIN_STATE_CONSUMED;
+ }
+
+ private int drainDecoder(long timeoutUs) {
+ if (isDecoderEOS) return DRAIN_STATE_NONE;
+
+ int result = decoder.dequeueOutputBuffer(bufferInfo, timeoutUs);
+ switch (result) {
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return DRAIN_STATE_NONE;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ audioChannel.setActualDecodedFormat(decoder.getOutputFormat());
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ }
+
+ if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ isDecoderEOS = true;
+ audioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
+ } else if (bufferInfo.size > 0) {
+ audioChannel.drainDecoderBufferAndQueue(result, bufferInfo.presentationTimeUs / timeScale);
+ }
+
+ return DRAIN_STATE_CONSUMED;
+ }
+
+ private int drainEncoder(long timeoutUs) {
+ if (isEncoderEOS) return DRAIN_STATE_NONE;
+
+ int result = encoder.dequeueOutputBuffer(bufferInfo, timeoutUs);
+ switch (result) {
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return DRAIN_STATE_NONE;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ if (actualOutputFormat != null) {
+ throw new RuntimeException("Audio output format changed twice.");
+ }
+ actualOutputFormat = encoder.getOutputFormat();
+ muxer.setOutputFormat(SAMPLE_TYPE, actualOutputFormat);
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ encoderBuffers = new MediaCodecBufferCompatWrapper(encoder);
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ }
+
+ if (actualOutputFormat == null) {
+ throw new RuntimeException("Could not determine actual output format.");
+ }
+
+ if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ isEncoderEOS = true;
+ bufferInfo.set(0, 0, 0, bufferInfo.flags);
+ }
+ if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ // SPS or PPS, which should be passed by MediaFormat.
+ encoder.releaseOutputBuffer(result, false);
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ }
+
+ if (muxCount == 1) {
+ muxer.writeSampleData(SAMPLE_TYPE, encoderBuffers.getOutputBuffer(result), bufferInfo);
+ }
+ if (muxCount < timeScale) {
+ muxCount++;
+ } else {
+ muxCount = 1;
+ }
+
+ writtenPresentationTimeUs = bufferInfo.presentationTimeUs;
+ encoder.releaseOutputBuffer(result, false);
+ return DRAIN_STATE_CONSUMED;
+ }
+
+
+ @Override
+ public long getWrittenPresentationTimeUs() {
+ return writtenPresentationTimeUs;
+ }
+
+ @Override
+ public boolean isFinished() {
+ return isEncoderEOS;
+ }
+
+ @Override
+ public void release() {
+ if (decoder != null) {
+ if (decoderStarted) decoder.stop();
+ decoder.release();
+ decoder = null;
+ }
+ if (encoder != null) {
+ if (encoderStarted) encoder.stop();
+ encoder.release();
+ encoder = null;
+ }
+ }
+
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/Rotation.java b/gpuv/src/main/java/com/xypower/gpuv/composer/Rotation.java
new file mode 100644
index 00000000..9b4c51e0
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/Rotation.java
@@ -0,0 +1,27 @@
+package com.xypower.gpuv.composer;
+
+public enum Rotation {
+ NORMAL(0),
+ ROTATION_90(90),
+ ROTATION_180(180),
+ ROTATION_270(270);
+
+ private final int rotation;
+
+ Rotation(int rotation) {
+ this.rotation = rotation;
+ }
+
+ public int getRotation() {
+ return rotation;
+ }
+
+ public static Rotation fromInt(int rotate) {
+ for (Rotation rotation : Rotation.values()) {
+ if (rotate == rotation.getRotation()) return rotation;
+ }
+
+ return NORMAL;
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/composer/VideoComposer.java b/gpuv/src/main/java/com/xypower/gpuv/composer/VideoComposer.java
new file mode 100644
index 00000000..3ec18d4e
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/composer/VideoComposer.java
@@ -0,0 +1,238 @@
+
+package com.xypower.gpuv.composer;
+
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.util.Size;
+import com.xypower.gpuv.egl.filter.GlFilter;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+// Refer: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/ExtractDecodeEditEncodeMuxTest.java
+// Refer: https://github.com/ypresto/android-transcoder/blob/master/lib/src/main/java/net/ypresto/androidtranscoder/engine/VideoTrackTranscoder.java
+class VideoComposer {
+ private static final String TAG = "VideoComposer";
+ private static final int DRAIN_STATE_NONE = 0;
+ private static final int DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY = 1;
+ private static final int DRAIN_STATE_CONSUMED = 2;
+
+ private final MediaExtractor mediaExtractor;
+ private final int trackIndex;
+ private final MediaFormat outputFormat;
+ private final MuxRender muxRender;
+ private final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
+ private MediaCodec decoder;
+ private MediaCodec encoder;
+ private ByteBuffer[] decoderInputBuffers;
+ private ByteBuffer[] encoderOutputBuffers;
+ private MediaFormat actualOutputFormat;
+ private DecoderSurface decoderSurface;
+ private EncoderSurface encoderSurface;
+ private boolean isExtractorEOS;
+ private boolean isDecoderEOS;
+ private boolean isEncoderEOS;
+ private boolean decoderStarted;
+ private boolean encoderStarted;
+ private long writtenPresentationTimeUs;
+ private final int timeScale;
+
+ VideoComposer(MediaExtractor mediaExtractor, int trackIndex,
+ MediaFormat outputFormat, MuxRender muxRender, int timeScale) {
+ this.mediaExtractor = mediaExtractor;
+ this.trackIndex = trackIndex;
+ this.outputFormat = outputFormat;
+ this.muxRender = muxRender;
+ this.timeScale = timeScale;
+ }
+
+
+ void setUp(GlFilter filter,
+ Rotation rotation,
+ Size outputResolution,
+ Size inputResolution,
+ FillMode fillMode,
+ FillModeCustomItem fillModeCustomItem,
+ final boolean flipVertical,
+ final boolean flipHorizontal) {
+ mediaExtractor.selectTrack(trackIndex);
+ try {
+ encoder = MediaCodec.createEncoderByType(outputFormat.getString(MediaFormat.KEY_MIME));
+ } catch (IOException e) {
+ throw new IllegalStateException(e);
+ }
+ encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+ encoderSurface = new EncoderSurface(encoder.createInputSurface());
+ encoderSurface.makeCurrent();
+ encoder.start();
+ encoderStarted = true;
+ encoderOutputBuffers = encoder.getOutputBuffers();
+
+ MediaFormat inputFormat = mediaExtractor.getTrackFormat(trackIndex);
+ if (inputFormat.containsKey("rotation-degrees")) {
+ // Decoded video is rotated automatically in Android 5.0 lollipop.
+ // Turn off here because we don't want to encode rotated one.
+ // refer: https://android.googlesource.com/platform/frameworks/av/+blame/lollipop-release/media/libstagefright/Utils.cpp
+ inputFormat.setInteger("rotation-degrees", 0);
+ }
+ decoderSurface = new DecoderSurface(filter);
+ decoderSurface.setRotation(rotation);
+ decoderSurface.setOutputResolution(outputResolution);
+ decoderSurface.setInputResolution(inputResolution);
+ decoderSurface.setFillMode(fillMode);
+ decoderSurface.setFillModeCustomItem(fillModeCustomItem);
+ decoderSurface.setFlipHorizontal(flipHorizontal);
+ decoderSurface.setFlipVertical(flipVertical);
+ decoderSurface.completeParams();
+
+ try {
+ decoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
+ } catch (IOException e) {
+ throw new IllegalStateException(e);
+ }
+ decoder.configure(inputFormat, decoderSurface.getSurface(), null, 0);
+ decoder.start();
+ decoderStarted = true;
+ decoderInputBuffers = decoder.getInputBuffers();
+ }
+
+
+ boolean stepPipeline() {
+ boolean busy = false;
+
+ int status;
+ while (drainEncoder() != DRAIN_STATE_NONE) {
+ busy = true;
+ }
+ do {
+ status = drainDecoder();
+ if (status != DRAIN_STATE_NONE) {
+ busy = true;
+ }
+ // NOTE: not repeating to keep from deadlock when encoder is full.
+ } while (status == DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY);
+ while (drainExtractor() != DRAIN_STATE_NONE) {
+ busy = true;
+ }
+
+ return busy;
+ }
+
+
+ long getWrittenPresentationTimeUs() {
+ return writtenPresentationTimeUs;
+ }
+
+
+ boolean isFinished() {
+ return isEncoderEOS;
+ }
+
+
+ void release() {
+ if (decoderSurface != null) {
+ decoderSurface.release();
+ decoderSurface = null;
+ }
+ if (encoderSurface != null) {
+ encoderSurface.release();
+ encoderSurface = null;
+ }
+ if (decoder != null) {
+ if (decoderStarted) decoder.stop();
+ decoder.release();
+ decoder = null;
+ }
+ if (encoder != null) {
+ if (encoderStarted) encoder.stop();
+ encoder.release();
+ encoder = null;
+ }
+ }
+
+ private int drainExtractor() {
+ if (isExtractorEOS) return DRAIN_STATE_NONE;
+ int trackIndex = mediaExtractor.getSampleTrackIndex();
+ if (trackIndex >= 0 && trackIndex != this.trackIndex) {
+ return DRAIN_STATE_NONE;
+ }
+ int result = decoder.dequeueInputBuffer(0);
+ if (result < 0) return DRAIN_STATE_NONE;
+ if (trackIndex < 0) {
+ isExtractorEOS = true;
+ decoder.queueInputBuffer(result, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+ return DRAIN_STATE_NONE;
+ }
+ int sampleSize = mediaExtractor.readSampleData(decoderInputBuffers[result], 0);
+ boolean isKeyFrame = (mediaExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
+ decoder.queueInputBuffer(result, 0, sampleSize, mediaExtractor.getSampleTime() / timeScale, isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0);
+ mediaExtractor.advance();
+ return DRAIN_STATE_CONSUMED;
+ }
+
+ private int drainDecoder() {
+ if (isDecoderEOS) return DRAIN_STATE_NONE;
+ int result = decoder.dequeueOutputBuffer(bufferInfo, 0);
+ switch (result) {
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return DRAIN_STATE_NONE;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ }
+ if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ encoder.signalEndOfInputStream();
+ isDecoderEOS = true;
+ bufferInfo.size = 0;
+ }
+ boolean doRender = (bufferInfo.size > 0);
+ // NOTE: doRender will block if buffer (of encoder) is full.
+ // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
+ decoder.releaseOutputBuffer(result, doRender);
+ if (doRender) {
+ decoderSurface.awaitNewImage();
+ decoderSurface.drawImage();
+ encoderSurface.setPresentationTime(bufferInfo.presentationTimeUs * 1000);
+ encoderSurface.swapBuffers();
+ }
+ return DRAIN_STATE_CONSUMED;
+ }
+
+ private int drainEncoder() {
+ if (isEncoderEOS) return DRAIN_STATE_NONE;
+ int result = encoder.dequeueOutputBuffer(bufferInfo, 0);
+ switch (result) {
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return DRAIN_STATE_NONE;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ if (actualOutputFormat != null) {
+ throw new RuntimeException("Video output format changed twice.");
+ }
+ actualOutputFormat = encoder.getOutputFormat();
+ muxRender.setOutputFormat(MuxRender.SampleType.VIDEO, actualOutputFormat);
+ muxRender.onSetOutputFormat();
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ encoderOutputBuffers = encoder.getOutputBuffers();
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ }
+ if (actualOutputFormat == null) {
+ throw new RuntimeException("Could not determine actual output format.");
+ }
+
+ if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ isEncoderEOS = true;
+ bufferInfo.set(0, 0, 0, bufferInfo.flags);
+ }
+ if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ // SPS or PPS, which should be passed by MediaFormat.
+ encoder.releaseOutputBuffer(result, false);
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ }
+ muxRender.writeSampleData(MuxRender.SampleType.VIDEO, encoderOutputBuffers[result], bufferInfo);
+ writtenPresentationTimeUs = bufferInfo.presentationTimeUs;
+ encoder.releaseOutputBuffer(result, false);
+ return DRAIN_STATE_CONSUMED;
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/DefaultConfigChooser.java b/gpuv/src/main/java/com/xypower/gpuv/egl/DefaultConfigChooser.java
new file mode 100644
index 00000000..b9da7f92
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/DefaultConfigChooser.java
@@ -0,0 +1,139 @@
+package com.xypower.gpuv.egl;
+
+import android.opengl.GLSurfaceView;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLDisplay;
+
+import static javax.microedition.khronos.egl.EGL10.EGL_ALPHA_SIZE;
+import static javax.microedition.khronos.egl.EGL10.EGL_BLUE_SIZE;
+import static javax.microedition.khronos.egl.EGL10.EGL_DEPTH_SIZE;
+import static javax.microedition.khronos.egl.EGL10.EGL_GREEN_SIZE;
+import static javax.microedition.khronos.egl.EGL10.EGL_NONE;
+import static javax.microedition.khronos.egl.EGL10.EGL_RED_SIZE;
+import static javax.microedition.khronos.egl.EGL10.EGL_RENDERABLE_TYPE;
+import static javax.microedition.khronos.egl.EGL10.EGL_STENCIL_SIZE;
+
+
+
+public class DefaultConfigChooser implements GLSurfaceView.EGLConfigChooser {
+
+ private final int[] configSpec;
+ private final int redSize;
+ private final int greenSize;
+ private final int blueSize;
+ private final int alphaSize;
+ private final int depthSize;
+ private final int stencilSize;
+
+ public DefaultConfigChooser(final int version) {
+ this(true, version);
+ }
+
+ public DefaultConfigChooser(final boolean withDepthBuffer, final int version) {
+ this(
+ 8,
+ 8,
+ 8,
+ 0,
+ withDepthBuffer ? 16 : 0,
+ 0,
+ version
+ );
+ }
+
+ public DefaultConfigChooser(
+ final int redSize,
+ final int greenSize,
+ final int blueSize,
+ final int alphaSize,
+ final int depthSize,
+ final int stencilSize,
+ final int version) {
+ configSpec = filterConfigSpec(new int[]{
+ EGL_RED_SIZE, redSize,
+ EGL_GREEN_SIZE, greenSize,
+ EGL_BLUE_SIZE, blueSize,
+ EGL_ALPHA_SIZE, alphaSize,
+ EGL_DEPTH_SIZE, depthSize,
+ EGL_STENCIL_SIZE, stencilSize,
+ EGL_NONE
+ }, version);
+ this.redSize = redSize;
+ this.greenSize = greenSize;
+ this.blueSize = blueSize;
+ this.alphaSize = alphaSize;
+ this.depthSize = depthSize;
+ this.stencilSize = stencilSize;
+ }
+
+ private static final int EGL_OPENGL_ES2_BIT = 4;
+
+ private int[] filterConfigSpec(final int[] configSpec, final int version) {
+ if (version != 2) {
+ return configSpec;
+ }
+
+ final int len = configSpec.length;
+ final int[] newConfigSpec = new int[len + 2];
+ System.arraycopy(configSpec, 0, newConfigSpec, 0, len - 1);
+ newConfigSpec[len - 1] = EGL_RENDERABLE_TYPE;
+ newConfigSpec[len] = EGL_OPENGL_ES2_BIT;
+ newConfigSpec[len + 1] = EGL_NONE;
+ return newConfigSpec;
+ }
+
+ //////////////////////////////////////////////////////////////////////////
+
+ @Override
+ public EGLConfig chooseConfig(final EGL10 egl, final EGLDisplay display) {
+ // 要求されている仕様から使用可能な構成の数を抽出します。
+ final int[] num_config = new int[1];
+ if (!egl.eglChooseConfig(display, configSpec, null, 0, num_config)) {
+ throw new IllegalArgumentException("eglChooseConfig failed");
+ }
+ final int config_size = num_config[0];
+ if (config_size <= 0) {
+ throw new IllegalArgumentException("No configs match configSpec");
+ }
+
+ // 実際の構成を抽出します。
+ final EGLConfig[] configs = new EGLConfig[config_size];
+ if (!egl.eglChooseConfig(display, configSpec, configs, config_size, num_config)) {
+ throw new IllegalArgumentException("eglChooseConfig#2 failed");
+ }
+ final EGLConfig config = chooseConfig(egl, display, configs);
+ if (config == null) {
+ throw new IllegalArgumentException("No config chosen");
+ }
+ return config;
+ }
+
+ private EGLConfig chooseConfig(final EGL10 egl, final EGLDisplay display, final EGLConfig[] configs) {
+ for (final EGLConfig config : configs) {
+ final int d = findConfigAttrib(egl, display, config, EGL_DEPTH_SIZE, 0);
+ final int s = findConfigAttrib(egl, display, config, EGL_STENCIL_SIZE, 0);
+ if ((d >= depthSize) && (s >= stencilSize)) {
+ final int r = findConfigAttrib(egl, display, config, EGL_RED_SIZE, 0);
+ final int g = findConfigAttrib(egl, display, config, EGL_GREEN_SIZE, 0);
+ final int b = findConfigAttrib(egl, display, config, EGL_BLUE_SIZE, 0);
+ final int a = findConfigAttrib(egl, display, config, EGL_ALPHA_SIZE, 0);
+ if ((r == redSize) && (g == greenSize) && (b == blueSize) && (a == alphaSize)) {
+ return config;
+ }
+ }
+ }
+ return null;
+ }
+
+ private int findConfigAttrib(final EGL10 egl, final EGLDisplay display, final EGLConfig config, final int attribute, final int defaultValue) {
+ final int[] value = new int[1];
+ if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
+ return value[0];
+ }
+ return defaultValue;
+ }
+}
+
+
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/DefaultContextFactory.java b/gpuv/src/main/java/com/xypower/gpuv/egl/DefaultContextFactory.java
new file mode 100644
index 00000000..0d1a6427
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/DefaultContextFactory.java
@@ -0,0 +1,47 @@
+package com.xypower.gpuv.egl;
+
+import android.opengl.GLSurfaceView;
+import android.util.Log;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+
+import static javax.microedition.khronos.egl.EGL10.EGL_NONE;
+import static javax.microedition.khronos.egl.EGL10.EGL_NO_CONTEXT;
+
+
+
+public class DefaultContextFactory implements GLSurfaceView.EGLContextFactory {
+
+ private static final String TAG = "DefaultContextFactory";
+
+ private int EGLContextClientVersion;
+
+ public DefaultContextFactory(final int version) {
+ EGLContextClientVersion = version;
+ }
+
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ @Override
+ public EGLContext createContext(final EGL10 egl, final EGLDisplay display, final EGLConfig config) {
+ final int[] attrib_list;
+ if (EGLContextClientVersion != 0) {
+ attrib_list = new int[]{EGL_CONTEXT_CLIENT_VERSION, EGLContextClientVersion, EGL_NONE};
+ } else {
+ attrib_list = null;
+ }
+ return egl.eglCreateContext(display, config, EGL_NO_CONTEXT, attrib_list);
+ }
+
+ @Override
+ public void destroyContext(final EGL10 egl, final EGLDisplay display, final EGLContext context) {
+ if (!egl.eglDestroyContext(display, context)) {
+ Log.e(TAG, "display:" + display + " context: " + context);
+ throw new RuntimeException("eglDestroyContext" + egl.eglGetError());
+ }
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/EglUtil.java b/gpuv/src/main/java/com/xypower/gpuv/egl/EglUtil.java
new file mode 100644
index 00000000..0653293e
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/EglUtil.java
@@ -0,0 +1,124 @@
+package com.xypower.gpuv.egl;
+
+import android.graphics.Bitmap;
+import android.opengl.GLES20;
+import android.opengl.GLException;
+import android.opengl.GLUtils;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+import static android.opengl.GLES20.GL_ARRAY_BUFFER;
+import static android.opengl.GLES20.GL_CLAMP_TO_EDGE;
+import static android.opengl.GLES20.GL_LINK_STATUS;
+import static android.opengl.GLES20.GL_STATIC_DRAW;
+import static android.opengl.GLES20.GL_TEXTURE_MAG_FILTER;
+import static android.opengl.GLES20.GL_TEXTURE_MIN_FILTER;
+import static android.opengl.GLES20.GL_TEXTURE_WRAP_S;
+import static android.opengl.GLES20.GL_TEXTURE_WRAP_T;
+import static android.opengl.GLES20.GL_TRUE;
+import static android.opengl.GLES20.glCreateProgram;
+
+
+
+public class EglUtil {
+ private EglUtil() {
+ }
+
+ public static final int NO_TEXTURE = -1;
+ private static final int FLOAT_SIZE_BYTES = 4;
+
+
+ public static int loadShader(final String strSource, final int iType) {
+ int[] compiled = new int[1];
+ int iShader = GLES20.glCreateShader(iType);
+ GLES20.glShaderSource(iShader, strSource);
+ GLES20.glCompileShader(iShader);
+ GLES20.glGetShaderiv(iShader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ Log.d("Load Shader Failed", "Compilation\n" + GLES20.glGetShaderInfoLog(iShader));
+ return 0;
+ }
+ return iShader;
+ }
+
+ public static int createProgram(final int vertexShader, final int pixelShader) throws GLException {
+ final int program = glCreateProgram();
+ if (program == 0) {
+ throw new RuntimeException("Could not create program");
+ }
+
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, pixelShader);
+
+ GLES20.glLinkProgram(program);
+ final int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GL_TRUE) {
+ GLES20.glDeleteProgram(program);
+ throw new RuntimeException("Could not link program");
+ }
+ return program;
+ }
+
+ public static void setupSampler(final int target, final int mag, final int min) {
+ GLES20.glTexParameterf(target, GL_TEXTURE_MAG_FILTER, mag);
+ GLES20.glTexParameterf(target, GL_TEXTURE_MIN_FILTER, min);
+ GLES20.glTexParameteri(target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ }
+
+ public static int createBuffer(final float[] data) {
+ return createBuffer(toFloatBuffer(data));
+ }
+
+ public static int createBuffer(final FloatBuffer data) {
+ final int[] buffers = new int[1];
+ GLES20.glGenBuffers(buffers.length, buffers, 0);
+ updateBufferData(buffers[0], data);
+ return buffers[0];
+ }
+
+ public static FloatBuffer toFloatBuffer(final float[] data) {
+ final FloatBuffer buffer = ByteBuffer
+ .allocateDirect(data.length * FLOAT_SIZE_BYTES)
+ .order(ByteOrder.nativeOrder())
+ .asFloatBuffer();
+ buffer.put(data).position(0);
+ return buffer;
+ }
+
+ public static void updateBufferData(final int bufferName, final FloatBuffer data) {
+ GLES20.glBindBuffer(GL_ARRAY_BUFFER, bufferName);
+ GLES20.glBufferData(GL_ARRAY_BUFFER, data.capacity() * FLOAT_SIZE_BYTES, data, GL_STATIC_DRAW);
+ GLES20.glBindBuffer(GL_ARRAY_BUFFER, 0);
+ }
+
+ public static int loadTexture(final Bitmap img, final int usedTexId, final boolean recycle) {
+ int textures[] = new int[1];
+ if (usedTexId == NO_TEXTURE) {
+ GLES20.glGenTextures(1, textures, 0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+
+ GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img, 0);
+ } else {
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
+ GLUtils.texSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, img);
+ textures[0] = usedTexId;
+ }
+ if (recycle) {
+ img.recycle();
+ }
+ return textures[0];
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/GlConfigChooser.java b/gpuv/src/main/java/com/xypower/gpuv/egl/GlConfigChooser.java
new file mode 100644
index 00000000..caca1b1c
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/GlConfigChooser.java
@@ -0,0 +1,13 @@
+package com.xypower.gpuv.egl;
+
+
+
+public class GlConfigChooser extends DefaultConfigChooser {
+
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 2;
+
+ public GlConfigChooser(final boolean withDepthBuffer) {
+ super(withDepthBuffer, EGL_CONTEXT_CLIENT_VERSION);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/GlContextFactory.java b/gpuv/src/main/java/com/xypower/gpuv/egl/GlContextFactory.java
new file mode 100644
index 00000000..29dd364d
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/GlContextFactory.java
@@ -0,0 +1,13 @@
+package com.xypower.gpuv.egl;
+
+
+
+public class GlContextFactory extends DefaultContextFactory {
+
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 2;
+
+ public GlContextFactory() {
+ super(EGL_CONTEXT_CLIENT_VERSION);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/GlFrameBufferObjectRenderer.java b/gpuv/src/main/java/com/xypower/gpuv/egl/GlFrameBufferObjectRenderer.java
new file mode 100644
index 00000000..c1ce0f33
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/GlFrameBufferObjectRenderer.java
@@ -0,0 +1,78 @@
+package com.xypower.gpuv.egl;
+
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+
+import com.xypower.gpuv.egl.filter.GlFilter;
+
+import java.util.LinkedList;
+import java.util.Queue;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+import static android.opengl.GLES20.GL_COLOR_BUFFER_BIT;
+import static android.opengl.GLES20.GL_DEPTH_BUFFER_BIT;
+import static android.opengl.GLES20.GL_FRAMEBUFFER;
+
+
+
+public abstract class GlFrameBufferObjectRenderer implements GLSurfaceView.Renderer {
+
+ private GlFramebufferObject framebufferObject;
+ private GlFilter normalShader;
+
+ private final Queue runOnDraw;
+
+
+ protected GlFrameBufferObjectRenderer() {
+ runOnDraw = new LinkedList();
+ }
+
+
+ @Override
+ public final void onSurfaceCreated(final GL10 gl, final EGLConfig config) {
+ framebufferObject = new GlFramebufferObject();
+ normalShader = new GlFilter();
+ normalShader.setup();
+ onSurfaceCreated(config);
+ }
+
+ @Override
+ public final void onSurfaceChanged(final GL10 gl, final int width, final int height) {
+ framebufferObject.setup(width, height);
+ normalShader.setFrameSize(width, height);
+ onSurfaceChanged(width, height);
+ GLES20.glViewport(0, 0, framebufferObject.getWidth(), framebufferObject.getHeight());
+ }
+
+ @Override
+ public final void onDrawFrame(final GL10 gl) {
+ synchronized (runOnDraw) {
+ while (!runOnDraw.isEmpty()) {
+ runOnDraw.poll().run();
+ }
+ }
+ framebufferObject.enable();
+
+ onDrawFrame(framebufferObject);
+
+ GLES20.glBindFramebuffer(GL_FRAMEBUFFER, 0);
+
+ GLES20.glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
+ normalShader.draw(framebufferObject.getTexName(), null);
+
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+
+ }
+
+ public abstract void onSurfaceCreated(EGLConfig config);
+
+ public abstract void onSurfaceChanged(int width, int height);
+
+ public abstract void onDrawFrame(GlFramebufferObject fbo);
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/GlFramebufferObject.java b/gpuv/src/main/java/com/xypower/gpuv/egl/GlFramebufferObject.java
new file mode 100644
index 00000000..0e5fc1e6
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/GlFramebufferObject.java
@@ -0,0 +1,120 @@
+package com.xypower.gpuv.egl;
+
+import android.opengl.GLES20;
+
+import static android.opengl.GLES20.GL_COLOR_ATTACHMENT0;
+import static android.opengl.GLES20.GL_DEPTH_ATTACHMENT;
+import static android.opengl.GLES20.GL_DEPTH_COMPONENT16;
+import static android.opengl.GLES20.GL_FRAMEBUFFER;
+import static android.opengl.GLES20.GL_FRAMEBUFFER_BINDING;
+import static android.opengl.GLES20.GL_FRAMEBUFFER_COMPLETE;
+import static android.opengl.GLES20.GL_LINEAR;
+import static android.opengl.GLES20.GL_MAX_RENDERBUFFER_SIZE;
+import static android.opengl.GLES20.GL_MAX_TEXTURE_SIZE;
+import static android.opengl.GLES20.GL_NEAREST;
+import static android.opengl.GLES20.GL_RENDERBUFFER;
+import static android.opengl.GLES20.GL_RENDERBUFFER_BINDING;
+import static android.opengl.GLES20.GL_RGBA;
+import static android.opengl.GLES20.GL_TEXTURE_2D;
+import static android.opengl.GLES20.GL_TEXTURE_BINDING_2D;
+import static android.opengl.GLES20.GL_UNSIGNED_BYTE;
+
+
+
+public class GlFramebufferObject {
+ private int width;
+ private int height;
+ private int framebufferName;
+ private int renderBufferName;
+ private int texName;
+
+ public int getWidth() {
+ return width;
+ }
+
+ public int getHeight() {
+ return height;
+ }
+
+ public int getTexName() {
+ return texName;
+ }
+
+ public void setup(final int width, final int height) {
+ final int[] args = new int[1];
+
+ GLES20.glGetIntegerv(GL_MAX_TEXTURE_SIZE, args, 0);
+ if (width > args[0] || height > args[0]) {
+ throw new IllegalArgumentException("GL_MAX_TEXTURE_SIZE " + args[0]);
+ }
+
+ GLES20.glGetIntegerv(GL_MAX_RENDERBUFFER_SIZE, args, 0);
+ if (width > args[0] || height > args[0]) {
+ throw new IllegalArgumentException("GL_MAX_RENDERBUFFER_SIZE " + args[0]);
+ }
+
+ GLES20.glGetIntegerv(GL_FRAMEBUFFER_BINDING, args, 0);
+ final int saveFramebuffer = args[0];
+ GLES20.glGetIntegerv(GL_RENDERBUFFER_BINDING, args, 0);
+ final int saveRenderbuffer = args[0];
+ GLES20.glGetIntegerv(GL_TEXTURE_BINDING_2D, args, 0);
+ final int saveTexName = args[0];
+
+ release();
+
+ try {
+ this.width = width;
+ this.height = height;
+
+ GLES20.glGenFramebuffers(args.length, args, 0);
+ framebufferName = args[0];
+ GLES20.glBindFramebuffer(GL_FRAMEBUFFER, framebufferName);
+
+ GLES20.glGenRenderbuffers(args.length, args, 0);
+ renderBufferName = args[0];
+ GLES20.glBindRenderbuffer(GL_RENDERBUFFER, renderBufferName);
+ GLES20.glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height);
+ GLES20.glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, renderBufferName);
+
+ GLES20.glGenTextures(args.length, args, 0);
+ texName = args[0];
+ GLES20.glBindTexture(GL_TEXTURE_2D, texName);
+
+ EglUtil.setupSampler(GL_TEXTURE_2D, GL_LINEAR, GL_NEAREST);
+
+ GLES20.glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, null);
+ GLES20.glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texName, 0);
+
+ final int status = GLES20.glCheckFramebufferStatus(GL_FRAMEBUFFER);
+ if (status != GL_FRAMEBUFFER_COMPLETE) {
+ throw new RuntimeException("Failed to initialize framebuffer object " + status);
+ }
+ } catch (final RuntimeException e) {
+ release();
+ throw e;
+ }
+
+ GLES20.glBindFramebuffer(GL_FRAMEBUFFER, saveFramebuffer);
+ GLES20.glBindRenderbuffer(GL_RENDERBUFFER, saveRenderbuffer);
+ GLES20.glBindTexture(GL_TEXTURE_2D, saveTexName);
+ }
+
+ public void release() {
+ final int[] args = new int[1];
+ args[0] = texName;
+ GLES20.glDeleteTextures(args.length, args, 0);
+ texName = 0;
+ args[0] = renderBufferName;
+ GLES20.glDeleteRenderbuffers(args.length, args, 0);
+ renderBufferName = 0;
+ args[0] = framebufferName;
+ GLES20.glDeleteFramebuffers(args.length, args, 0);
+ framebufferName = 0;
+ }
+
+ public void enable() {
+ GLES20.glBindFramebuffer(GL_FRAMEBUFFER, framebufferName);
+ }
+
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/GlPreview.java b/gpuv/src/main/java/com/xypower/gpuv/egl/GlPreview.java
new file mode 100644
index 00000000..02c98d1a
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/GlPreview.java
@@ -0,0 +1,89 @@
+package com.xypower.gpuv.egl;
+
+import android.opengl.GLES20;
+
+import com.xypower.gpuv.egl.filter.GlFilter;
+
+import static android.opengl.GLES20.GL_ARRAY_BUFFER;
+import static android.opengl.GLES20.GL_FLOAT;
+import static android.opengl.GLES20.GL_TEXTURE0;
+import static android.opengl.GLES20.GL_TEXTURE_2D;
+import static android.opengl.GLES20.GL_TRIANGLE_STRIP;
+
+
+
+public class GlPreview extends GlFilter {
+
+ public static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
+
+ private static final String VERTEX_SHADER =
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "uniform float uCRatio;\n" +
+
+ "attribute vec4 aPosition;\n" +
+ "attribute vec4 aTextureCoord;\n" +
+ "varying highp vec2 vTextureCoord;\n" +
+
+ "void main() {\n" +
+ "vec4 scaledPos = aPosition;\n" +
+ "scaledPos.x = scaledPos.x * uCRatio;\n" +
+ "gl_Position = uMVPMatrix * scaledPos;\n" +
+ "vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+
+ private final int texTarget;
+
+ public GlPreview(final int texTarget) {
+ super(VERTEX_SHADER, createFragmentShaderSourceOESIfNeed(texTarget));
+ this.texTarget = texTarget;
+ }
+
+ private static String createFragmentShaderSourceOESIfNeed(final int texTarget) {
+ if (texTarget == GL_TEXTURE_EXTERNAL_OES) {
+ return new StringBuilder()
+ .append("#extension GL_OES_EGL_image_external : require\n")
+ .append(DEFAULT_FRAGMENT_SHADER.replace("sampler2D", "samplerExternalOES"))
+ .toString();
+ }
+ return DEFAULT_FRAGMENT_SHADER;
+ }
+
+ @Override
+ public void setup() {
+ super.setup();
+ getHandle("uMVPMatrix");
+ getHandle("uSTMatrix");
+ getHandle("uCRatio");
+ getHandle("aPosition");
+ getHandle("aTextureCoord");
+ }
+
+ public void draw(final int texName, final float[] mvpMatrix, final float[] stMatrix, final float aspectRatio) {
+ useProgram();
+
+ GLES20.glUniformMatrix4fv(getHandle("uMVPMatrix"), 1, false, mvpMatrix, 0);
+ GLES20.glUniformMatrix4fv(getHandle("uSTMatrix"), 1, false, stMatrix, 0);
+ GLES20.glUniform1f(getHandle("uCRatio"), aspectRatio);
+
+ GLES20.glBindBuffer(GL_ARRAY_BUFFER, getVertexBufferName());
+ GLES20.glEnableVertexAttribArray(getHandle("aPosition"));
+ GLES20.glVertexAttribPointer(getHandle("aPosition"), VERTICES_DATA_POS_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_POS_OFFSET);
+ GLES20.glEnableVertexAttribArray(getHandle("aTextureCoord"));
+ GLES20.glVertexAttribPointer(getHandle("aTextureCoord"), VERTICES_DATA_UV_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_UV_OFFSET);
+
+ GLES20.glActiveTexture(GL_TEXTURE0);
+ GLES20.glBindTexture(texTarget, texName);
+ GLES20.glUniform1i(getHandle(DEFAULT_UNIFORM_SAMPLER), 0);
+
+ GLES20.glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
+
+ GLES20.glDisableVertexAttribArray(getHandle("aPosition"));
+ GLES20.glDisableVertexAttribArray(getHandle("aTextureCoord"));
+ GLES20.glBindBuffer(GL_ARRAY_BUFFER, 0);
+ GLES20.glBindTexture(GL_TEXTURE_2D, 0);
+ }
+
+
+}
+
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/GlPreviewFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/GlPreviewFilter.java
new file mode 100644
index 00000000..f9f8b260
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/GlPreviewFilter.java
@@ -0,0 +1,70 @@
+package com.xypower.gpuv.egl;
+
+import android.opengl.GLES20;
+import com.xypower.gpuv.egl.filter.GlFilter;
+
+import static android.opengl.GLES20.*;
+
+public class GlPreviewFilter extends GlFilter {
+
+ public static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
+
+ private static final String VERTEX_SHADER =
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "uniform float uCRatio;\n" +
+
+ "attribute vec4 aPosition;\n" +
+ "attribute vec4 aTextureCoord;\n" +
+ "varying highp vec2 vTextureCoord;\n" +
+
+ "void main() {\n" +
+ "vec4 scaledPos = aPosition;\n" +
+ "scaledPos.x = scaledPos.x * uCRatio;\n" +
+ "gl_Position = uMVPMatrix * scaledPos;\n" +
+ "vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+
+ private final int texTarget;
+
+ public GlPreviewFilter(final int texTarget) {
+ super(VERTEX_SHADER, createFragmentShaderSourceOESIfNeed(texTarget));
+ this.texTarget = texTarget;
+ }
+
+ private static String createFragmentShaderSourceOESIfNeed(final int texTarget) {
+ if (texTarget == GL_TEXTURE_EXTERNAL_OES) {
+ return new StringBuilder()
+ .append("#extension GL_OES_EGL_image_external : require\n")
+ .append(DEFAULT_FRAGMENT_SHADER.replace("sampler2D", "samplerExternalOES"))
+ .toString();
+ }
+ return DEFAULT_FRAGMENT_SHADER;
+ }
+
+ public void draw(final int texName, final float[] mvpMatrix, final float[] stMatrix, final float aspectRatio) {
+ useProgram();
+
+ GLES20.glUniformMatrix4fv(getHandle("uMVPMatrix"), 1, false, mvpMatrix, 0);
+ GLES20.glUniformMatrix4fv(getHandle("uSTMatrix"), 1, false, stMatrix, 0);
+ GLES20.glUniform1f(getHandle("uCRatio"), aspectRatio);
+
+ GLES20.glBindBuffer(GL_ARRAY_BUFFER, getVertexBufferName());
+ GLES20.glEnableVertexAttribArray(getHandle("aPosition"));
+ GLES20.glVertexAttribPointer(getHandle("aPosition"), VERTICES_DATA_POS_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_POS_OFFSET);
+ GLES20.glEnableVertexAttribArray(getHandle("aTextureCoord"));
+ GLES20.glVertexAttribPointer(getHandle("aTextureCoord"), VERTICES_DATA_UV_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_UV_OFFSET);
+
+ GLES20.glActiveTexture(GL_TEXTURE0);
+ GLES20.glBindTexture(texTarget, texName);
+ GLES20.glUniform1i(getHandle(DEFAULT_UNIFORM_SAMPLER), 0);
+
+ GLES20.glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
+
+ GLES20.glDisableVertexAttribArray(getHandle("aPosition"));
+ GLES20.glDisableVertexAttribArray(getHandle("aTextureCoord"));
+ GLES20.glBindBuffer(GL_ARRAY_BUFFER, 0);
+ GLES20.glBindTexture(GL_TEXTURE_2D, 0);
+ }
+}
+
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/GlPreviewRenderer.java b/gpuv/src/main/java/com/xypower/gpuv/egl/GlPreviewRenderer.java
new file mode 100644
index 00000000..e5689f02
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/GlPreviewRenderer.java
@@ -0,0 +1,316 @@
+package com.xypower.gpuv.egl;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.opengl.Matrix;
+import android.os.Handler;
+import android.util.Size;
+import com.xypower.gpuv.camerarecorder.capture.MediaVideoEncoder;
+import com.xypower.gpuv.egl.filter.GlFilter;
+
+import javax.microedition.khronos.egl.EGLConfig;
+
+import static android.opengl.GLES20.*;
+
+
+
+public class GlPreviewRenderer extends GlFrameBufferObjectRenderer implements SurfaceTexture.OnFrameAvailableListener {
+
+ private final Handler handler = new Handler();
+
+ private GlSurfaceTexture previewTexture;
+
+ // private final Camera camera;
+ private int texName;
+
+ private float[] MVPMatrix = new float[16];
+ private float[] ProjMatrix = new float[16];
+ private float[] MMatrix = new float[16];
+ private float[] VMatrix = new float[16];
+ private float[] STMatrix = new float[16];
+
+
+ private final GLSurfaceView glView;
+
+ private GlFramebufferObject filterFramebufferObject;
+ private GlPreview previewShader;
+
+ private GlFilter glFilter;
+ private boolean isNewShader;
+
+ private int angle = 0;
+ private float aspectRatio = 1f;
+ private float scaleRatio = 1f;
+ private float drawScale = 1f;
+ private float gestureScale = 1f;
+
+ private Size cameraResolution;
+
+ private int updateTexImageCounter = 0;
+ private int updateTexImageCompare = 0;
+
+ private SurfaceCreateListener surfaceCreateListener;
+ private MediaVideoEncoder videoEncoder;
+
+
+ public GlPreviewRenderer(GLSurfaceView glView) {
+ this.glView = glView;
+ this.glView.setEGLConfigChooser(new GlConfigChooser(false));
+ this.glView.setEGLContextFactory(new GlContextFactory());
+ this.glView.setRenderer(this);
+ this.glView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+
+
+ Matrix.setIdentityM(STMatrix, 0);
+ }
+
+ public void onStartPreview(float cameraPreviewWidth, float cameraPreviewHeight, boolean isLandscapeDevice) {
+
+ Matrix.setIdentityM(MMatrix, 0);
+ Matrix.rotateM(MMatrix, 0, -angle, 0.0f, 0.0f, 1.0f);
+
+// Log.d("GPUCameraRecorder ", "angle" + angle);
+// Log.d("GPUCameraRecorder ", "getMeasuredHeight " + glView.getMeasuredHeight());
+// Log.d("GPUCameraRecorder ", "getMeasuredWidth " + glView.getMeasuredWidth());
+// Log.d("GPUCameraRecorder ", "cameraPreviewWidth " + cameraPreviewWidth);
+// Log.d("GPUCameraRecorder ", "cameraPreviewHeight " + cameraPreviewHeight);
+
+
+ if (isLandscapeDevice) {
+
+ if (glView.getMeasuredWidth() == glView.getMeasuredHeight()) {
+
+ float scale = Math.max(cameraPreviewWidth / cameraPreviewHeight,
+ cameraPreviewHeight / cameraPreviewWidth);
+ Matrix.scaleM(MMatrix, 0, 1f * scale, 1f * scale, 1);
+
+ } else {
+ float scale = Math.max(
+ (float) glView.getMeasuredHeight() / cameraPreviewWidth,
+ (float) glView.getMeasuredWidth() / cameraPreviewHeight);
+ Matrix.scaleM(MMatrix, 0, 1f * scale, 1f * scale, 1);
+ }
+
+ } else {
+ // Portlate
+ // View 1920 1080 Camera 1280 720 OK
+ // View 1920 1080 Camera 800 600 OK
+ // View 1440 1080 Camera 800 600 OK
+ // View 1080 1080 Camera 1280 720 Need Scale
+ // View 1080 1080 Camera 800 600 Need Scale
+
+
+ float viewAspect = (float) glView.getMeasuredHeight() / glView.getMeasuredWidth();
+ float cameraAspect = cameraPreviewWidth / cameraPreviewHeight;
+ if (viewAspect >= cameraAspect) {
+ Matrix.scaleM(MMatrix, 0, 1f, 1f, 1);
+ } else {
+ float adjust = cameraAspect / viewAspect;
+ Matrix.scaleM(MMatrix, 0, 1f * adjust, 1f * adjust, 1);
+ }
+ }
+
+ }
+
+ public void setGlFilter(final GlFilter filter) {
+ glView.queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ if (glFilter != null) {
+ glFilter.release();
+ }
+ glFilter = filter;
+ isNewShader = true;
+ glView.requestRender();
+ }
+ });
+ }
+
+
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ // increment every time a new frame is avail
+ updateTexImageCounter++;
+ glView.requestRender();
+ }
+
+ @Override
+ public void onSurfaceCreated(EGLConfig config) {
+ glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+
+ final int[] args = new int[1];
+
+ GLES20.glGenTextures(args.length, args, 0);
+ texName = args[0];
+
+ // SurfaceTextureを生成
+ previewTexture = new GlSurfaceTexture(texName);
+ previewTexture.setOnFrameAvailableListener(this);
+
+ GLES20.glBindTexture(previewTexture.getTextureTarget(), texName);
+ // GL_TEXTURE_EXTERNAL_OES
+ EglUtil.setupSampler(previewTexture.getTextureTarget(), GL_LINEAR, GL_NEAREST);
+ GLES20.glBindTexture(GL_TEXTURE_2D, 0);
+
+ filterFramebufferObject = new GlFramebufferObject();
+ // GL_TEXTURE_EXTERNAL_OES
+ previewShader = new GlPreview(previewTexture.getTextureTarget());
+ previewShader.setup();
+
+
+ Matrix.setLookAtM(VMatrix, 0,
+ 0.0f, 0.0f, 5.0f,
+ 0.0f, 0.0f, 0.0f,
+ 0.0f, 1.0f, 0.0f
+ );
+
+
+ if (glFilter != null) {
+ isNewShader = true;
+ }
+
+ GLES20.glGetIntegerv(GL_MAX_TEXTURE_SIZE, args, 0);
+
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (surfaceCreateListener != null) {
+ surfaceCreateListener.onCreated(previewTexture.getSurfaceTexture());
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onSurfaceChanged(int width, int height) {
+
+ filterFramebufferObject.setup(width, height);
+ previewShader.setFrameSize(width, height);
+ if (glFilter != null) {
+ glFilter.setFrameSize(width, height);
+ }
+ scaleRatio = (float) width / height;
+ Matrix.frustumM(ProjMatrix, 0, -scaleRatio, scaleRatio, -1, 1, 5, 7);
+ }
+
+ @Override
+ public void onDrawFrame(GlFramebufferObject fbo) {
+
+ if (drawScale != gestureScale) {
+
+ float tempScale = 1 / drawScale;
+ Matrix.scaleM(MMatrix, 0, tempScale, tempScale, 1);
+ drawScale = gestureScale;
+ Matrix.scaleM(MMatrix, 0, drawScale, drawScale, 1);
+ }
+
+ synchronized (this) {
+ if (updateTexImageCompare != updateTexImageCounter) {
+ // loop and call updateTexImage() for each time the onFrameAvailable() method was called below.
+ while (updateTexImageCompare != updateTexImageCounter) {
+
+ previewTexture.updateTexImage();
+ previewTexture.getTransformMatrix(STMatrix);
+ updateTexImageCompare++; // increment the compare value until it's the same as _updateTexImageCounter
+ }
+ }
+
+ }
+
+ if (isNewShader) {
+ if (glFilter != null) {
+ glFilter.setup();
+ glFilter.setFrameSize(fbo.getWidth(), fbo.getHeight());
+ }
+ isNewShader = false;
+ }
+
+ if (glFilter != null) {
+ filterFramebufferObject.enable();
+ }
+
+ GLES20.glClear(GL_COLOR_BUFFER_BIT);
+
+ Matrix.multiplyMM(MVPMatrix, 0, VMatrix, 0, MMatrix, 0);
+ Matrix.multiplyMM(MVPMatrix, 0, ProjMatrix, 0, MVPMatrix, 0);
+
+ previewShader.draw(texName, MVPMatrix, STMatrix, aspectRatio);
+
+
+ if (glFilter != null) {
+ fbo.enable();
+ GLES20.glClear(GL_COLOR_BUFFER_BIT);
+ glFilter.draw(filterFramebufferObject.getTexName(), fbo);
+ }
+
+ synchronized (this) {
+ if (videoEncoder != null) {
+ // notify to capturing thread that the camera frame is available.
+ videoEncoder.frameAvailableSoon(texName, STMatrix, MVPMatrix, aspectRatio);
+ }
+ }
+
+ }
+
+ public void setCameraResolution(Size cameraResolution) {
+ this.cameraResolution = cameraResolution;
+ }
+
+ public void setVideoEncoder(final MediaVideoEncoder encoder) {
+ glView.queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ synchronized (GlPreviewRenderer.this) {
+ if (encoder != null) {
+ encoder.setEglContext(EGL14.eglGetCurrentContext(), texName);
+ }
+ videoEncoder = encoder;
+ }
+ }
+ });
+
+ }
+
+ public GlSurfaceTexture getPreviewTexture() {
+ return previewTexture;
+ }
+
+ public void setAngle(int angle) {
+ this.angle = angle;
+ if (angle == 90 || angle == 270) {
+ aspectRatio = (float) cameraResolution.getWidth() / cameraResolution.getHeight();
+ } else {
+ aspectRatio = (float) cameraResolution.getHeight() / cameraResolution.getWidth();
+ }
+ }
+
+ public void setGestureScale(float gestureScale) {
+ this.gestureScale = gestureScale;
+ }
+
+ public GlFilter getFilter() {
+ return glFilter;
+ }
+
+ public void release() {
+ glView.queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ if (glFilter != null) {
+ glFilter.release();
+ }
+ }
+ });
+ }
+
+ public interface SurfaceCreateListener {
+ void onCreated(SurfaceTexture surface);
+ }
+
+ public void setSurfaceCreateListener(SurfaceCreateListener surfaceCreateListener) {
+ this.surfaceCreateListener = surfaceCreateListener;
+ }
+}
+
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/GlSurfaceTexture.java b/gpuv/src/main/java/com/xypower/gpuv/egl/GlSurfaceTexture.java
new file mode 100644
index 00000000..8f82dc99
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/GlSurfaceTexture.java
@@ -0,0 +1,49 @@
+package com.xypower.gpuv.egl;
+
+import android.graphics.SurfaceTexture;
+
+
+
+public class GlSurfaceTexture implements SurfaceTexture.OnFrameAvailableListener {
+
+ private SurfaceTexture surfaceTexture;
+ private SurfaceTexture.OnFrameAvailableListener onFrameAvailableListener;
+
+ public GlSurfaceTexture(final int texName) {
+ surfaceTexture = new SurfaceTexture(texName);
+ surfaceTexture.setOnFrameAvailableListener(this);
+ }
+
+
+ public void setOnFrameAvailableListener(final SurfaceTexture.OnFrameAvailableListener l) {
+ onFrameAvailableListener = l;
+ }
+
+
+ public int getTextureTarget() {
+ return GlPreview.GL_TEXTURE_EXTERNAL_OES;
+ }
+
+ public void updateTexImage() {
+ surfaceTexture.updateTexImage();
+ }
+
+ public void getTransformMatrix(final float[] mtx) {
+ surfaceTexture.getTransformMatrix(mtx);
+ }
+
+ public SurfaceTexture getSurfaceTexture() {
+ return surfaceTexture;
+ }
+
+ public void onFrameAvailable(final SurfaceTexture surfaceTexture) {
+ if (onFrameAvailableListener != null) {
+ onFrameAvailableListener.onFrameAvailable(this.surfaceTexture);
+ }
+ }
+
+ public void release() {
+ surfaceTexture.release();
+ }
+}
+
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBilateralFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBilateralFilter.java
new file mode 100644
index 00000000..0d45a6a4
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBilateralFilter.java
@@ -0,0 +1,149 @@
+package com.xypower.gpuv.egl.filter;
+
+import static android.opengl.GLES20.glUniform1f;
+
+
+public class GlBilateralFilter extends GlFilter {
+
+ private static final String VERTEX_SHADER =
+ "attribute vec4 aPosition;" +
+ "attribute vec4 aTextureCoord;" +
+
+ "const lowp int GAUSSIAN_SAMPLES = 9;" +
+
+ "uniform highp float texelWidthOffset;" +
+ "uniform highp float texelHeightOffset;" +
+ "uniform highp float blurSize;" +
+
+ "varying highp vec2 vTextureCoord;" +
+ "varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];" +
+
+ "void main() {" +
+ "gl_Position = aPosition;" +
+ "vTextureCoord = aTextureCoord.xy;" +
+
+ // Calculate the positions for the blur
+ "int multiplier = 0;" +
+ "highp vec2 blurStep;" +
+ "highp vec2 singleStepOffset = vec2(texelHeightOffset, texelWidthOffset) * blurSize;" +
+
+ "for (lowp int i = 0; i < GAUSSIAN_SAMPLES; i++) {" +
+ "multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));" +
+ // Blur in x (horizontal)
+ "blurStep = float(multiplier) * singleStepOffset;" +
+ "blurCoordinates[i] = vTextureCoord.xy + blurStep;" +
+ "}" +
+ "}";
+
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+
+ "uniform lowp sampler2D sTexture;" +
+
+ "const lowp int GAUSSIAN_SAMPLES = 9;" +
+ "varying highp vec2 vTextureCoord;" +
+ "varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];" +
+
+ "const mediump float distanceNormalizationFactor = 1.5;" +
+
+ "void main() {" +
+ "lowp vec4 centralColor = texture2D(sTexture, blurCoordinates[4]);" +
+ "lowp float gaussianWeightTotal = 0.18;" +
+ "lowp vec4 sum = centralColor * 0.18;" +
+
+ "lowp vec4 sampleColor = texture2D(sTexture, blurCoordinates[0]);" +
+ "lowp float distanceFromCentralColor;" +
+
+ "distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
+
+ "lowp float gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);" +
+ "gaussianWeightTotal += gaussianWeight;" +
+ "sum += sampleColor * gaussianWeight;" +
+
+ "sampleColor = texture2D(sTexture, blurCoordinates[1]);" +
+ "distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
+ "gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);" +
+ "gaussianWeightTotal += gaussianWeight;" +
+ "sum += sampleColor * gaussianWeight;" +
+
+ "sampleColor = texture2D(sTexture, blurCoordinates[2]);" +
+ "distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
+ "gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);" +
+ "gaussianWeightTotal += gaussianWeight;" +
+ "sum += sampleColor * gaussianWeight;" +
+
+ "sampleColor = texture2D(sTexture, blurCoordinates[3]);" +
+ "distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
+ "gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);" +
+ "gaussianWeightTotal += gaussianWeight;" +
+ "sum += sampleColor * gaussianWeight;" +
+
+ "sampleColor = texture2D(sTexture, blurCoordinates[5]);" +
+ "distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
+ "gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);" +
+ "gaussianWeightTotal += gaussianWeight;" +
+ "sum += sampleColor * gaussianWeight;" +
+
+ "sampleColor = texture2D(sTexture, blurCoordinates[6]);" +
+ "distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
+ "gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);" +
+ "gaussianWeightTotal += gaussianWeight;" +
+ "sum += sampleColor * gaussianWeight;" +
+
+ "sampleColor = texture2D(sTexture, blurCoordinates[7]);" +
+ "distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
+ "gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);" +
+ "gaussianWeightTotal += gaussianWeight;" +
+ "sum += sampleColor * gaussianWeight;" +
+
+ "sampleColor = texture2D(sTexture, blurCoordinates[8]);" +
+ "distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);" +
+ "gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);" +
+ "gaussianWeightTotal += gaussianWeight;" +
+ "sum += sampleColor * gaussianWeight;" +
+
+ "gl_FragColor = sum / gaussianWeightTotal;" +
+ "}";
+
+ private float texelWidthOffset = 0.004f;
+ private float texelHeightOffset = 0.004f;
+ private float blurSize = 1.0f;
+
+ public GlBilateralFilter() {
+ super(VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+
+ public float getTexelWidthOffset() {
+ return texelWidthOffset;
+ }
+
+ public void setTexelWidthOffset(final float texelWidthOffset) {
+ this.texelWidthOffset = texelWidthOffset;
+ }
+
+ public float getTexelHeightOffset() {
+ return texelHeightOffset;
+ }
+
+ public void setTexelHeightOffset(final float texelHeightOffset) {
+ this.texelHeightOffset = texelHeightOffset;
+ }
+
+ public float getBlurSize() {
+ return blurSize;
+ }
+
+ public void setBlurSize(final float blurSize) {
+ this.blurSize = blurSize;
+ }
+
+ @Override
+ public void onDraw() {
+ glUniform1f(getHandle("texelWidthOffset"), texelWidthOffset);
+ glUniform1f(getHandle("texelHeightOffset"), texelHeightOffset);
+ glUniform1f(getHandle("blurSize"), blurSize);
+ }
+
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBoxBlurFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBoxBlurFilter.java
new file mode 100644
index 00000000..cc525910
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBoxBlurFilter.java
@@ -0,0 +1,96 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+
+public class GlBoxBlurFilter extends GlFilter {
+
+ private static final String VERTEX_SHADER =
+ "attribute vec4 aPosition;" +
+ "attribute vec4 aTextureCoord;" +
+
+ "uniform highp float texelWidthOffset;" +
+ "uniform highp float texelHeightOffset;" +
+ "uniform highp float blurSize;" +
+
+ "varying highp vec2 centerTextureCoordinate;" +
+ "varying highp vec2 oneStepLeftTextureCoordinate;" +
+ "varying highp vec2 twoStepsLeftTextureCoordinate;" +
+ "varying highp vec2 oneStepRightTextureCoordinate;" +
+ "varying highp vec2 twoStepsRightTextureCoordinate;" +
+
+ "void main() {" +
+ "gl_Position = aPosition;" +
+
+ "vec2 firstOffset = vec2(1.5 * texelWidthOffset, 1.5 * texelHeightOffset) * blurSize;" +
+ "vec2 secondOffset = vec2(3.5 * texelWidthOffset, 3.5 * texelHeightOffset) * blurSize;" +
+
+ "centerTextureCoordinate = aTextureCoord.xy;" +
+ "oneStepLeftTextureCoordinate = centerTextureCoordinate - firstOffset;" +
+ "twoStepsLeftTextureCoordinate = centerTextureCoordinate - secondOffset;" +
+ "oneStepRightTextureCoordinate = centerTextureCoordinate + firstOffset;" +
+ "twoStepsRightTextureCoordinate = centerTextureCoordinate + secondOffset;" +
+ "}";
+
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+
+ "uniform lowp sampler2D sTexture;" +
+
+ "varying highp vec2 centerTextureCoordinate;" +
+ "varying highp vec2 oneStepLeftTextureCoordinate;" +
+ "varying highp vec2 twoStepsLeftTextureCoordinate;" +
+ "varying highp vec2 oneStepRightTextureCoordinate;" +
+ "varying highp vec2 twoStepsRightTextureCoordinate;" +
+
+ "void main() {" +
+ "lowp vec4 color = texture2D(sTexture, centerTextureCoordinate) * 0.2;" +
+ "color += texture2D(sTexture, oneStepLeftTextureCoordinate) * 0.2;" +
+ "color += texture2D(sTexture, oneStepRightTextureCoordinate) * 0.2;" +
+ "color += texture2D(sTexture, twoStepsLeftTextureCoordinate) * 0.2;" +
+ "color += texture2D(sTexture, twoStepsRightTextureCoordinate) * 0.2;" +
+ "gl_FragColor = color;" +
+ "}";
+
+ private float texelWidthOffset = 0.003f;
+ private float texelHeightOffset = 0.003f;
+ private float blurSize = 1.0f;
+
+
+ public GlBoxBlurFilter() {
+ super(VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+ public float getTexelWidthOffset() {
+ return texelWidthOffset;
+ }
+
+ public void setTexelWidthOffset(final float texelWidthOffset) {
+ this.texelWidthOffset = texelWidthOffset;
+ }
+
+ public float getTexelHeightOffset() {
+ return texelHeightOffset;
+ }
+
+ public void setTexelHeightOffset(final float texelHeightOffset) {
+ this.texelHeightOffset = texelHeightOffset;
+ }
+
+ public float getBlurSize() {
+ return blurSize;
+ }
+
+ public void setBlurSize(final float blurSize) {
+ this.blurSize = blurSize;
+ }
+
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("texelWidthOffset"), texelWidthOffset);
+ GLES20.glUniform1f(getHandle("texelHeightOffset"), texelHeightOffset);
+ GLES20.glUniform1f(getHandle("blurSize"), blurSize);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBrightnessFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBrightnessFilter.java
new file mode 100644
index 00000000..f4599d50
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBrightnessFilter.java
@@ -0,0 +1,38 @@
+package com.xypower.gpuv.egl.filter;
+
+
+import android.opengl.GLES20;
+
+/**
+ * brightness value ranges from -1.0 to 1.0, with 0.0 as the normal level
+ */
+public class GlBrightnessFilter extends GlFilter {
+ private static final String BRIGHTNESS_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ " \n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " uniform lowp float brightness;\n" +
+ " \n" +
+ " void main()\n" +
+ " {\n" +
+ " lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " \n" +
+ " gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);\n" +
+ " }";
+
+ public GlBrightnessFilter() {
+ super(DEFAULT_VERTEX_SHADER, BRIGHTNESS_FRAGMENT_SHADER);
+ }
+
+ private float brightness = 0f;
+
+ public void setBrightness(float brightness) {
+ this.brightness = brightness;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("brightness"), brightness);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBulgeDistortionFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBulgeDistortionFilter.java
new file mode 100644
index 00000000..753de5ff
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlBulgeDistortionFilter.java
@@ -0,0 +1,81 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+
+public class GlBulgeDistortionFilter extends GlFilter {
+
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+
+ "varying highp vec2 vTextureCoord;" +
+ "uniform lowp sampler2D sTexture;" +
+
+ "uniform highp vec2 center;" +
+ "uniform highp float radius;" +
+ "uniform highp float scale;" +
+
+ "void main() {" +
+ "highp vec2 textureCoordinateToUse = vTextureCoord;" +
+ "highp float dist = distance(center, vTextureCoord);" +
+ "textureCoordinateToUse -= center;" +
+ "if (dist < radius) {" +
+ "highp float percent = 1.0 - ((radius - dist) / radius) * scale;" +
+ "percent = percent * percent;" +
+ "textureCoordinateToUse = textureCoordinateToUse * percent;" +
+ "}" +
+ "textureCoordinateToUse += center;" +
+
+ "gl_FragColor = texture2D(sTexture, textureCoordinateToUse);" +
+ "}";
+
+ private float centerX = 0.5f;
+ private float centerY = 0.5f;
+ private float radius = 0.25f;
+ private float scale = 0.5f;
+
+ public GlBulgeDistortionFilter() {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+ public float getCenterX() {
+ return centerX;
+ }
+
+ public void setCenterX(final float centerX) {
+ this.centerX = centerX;
+ }
+
+ public float getCenterY() {
+ return centerY;
+ }
+
+ public void setCenterY(final float centerY) {
+ this.centerY = centerY;
+ }
+
+ public float getRadius() {
+ return radius;
+ }
+
+ public void setRadius(final float radius) {
+ this.radius = radius;
+ }
+
+ public float getScale() {
+ return scale;
+ }
+
+ public void setScale(final float scale) {
+ this.scale = scale;
+ }
+
+ //////////////////////////////////////////////////////////////////////////
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform2f(getHandle("center"), centerX, centerY);
+ GLES20.glUniform1f(getHandle("radius"), radius);
+ GLES20.glUniform1f(getHandle("scale"), scale);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlCGAColorspaceFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlCGAColorspaceFilter.java
new file mode 100644
index 00000000..dc8f0038
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlCGAColorspaceFilter.java
@@ -0,0 +1,54 @@
+package com.xypower.gpuv.egl.filter;
+
+
+
+public class GlCGAColorspaceFilter extends GlFilter {
+
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+
+ "varying vec2 vTextureCoord;" +
+ "uniform lowp sampler2D sTexture;" +
+
+ "void main() {" +
+ "highp vec2 sampleDivisor = vec2(1.0 / 200.0, 1.0 / 320.0);" +
+
+ "highp vec2 samplePos = vTextureCoord - mod(vTextureCoord, sampleDivisor);" +
+ "highp vec4 color = texture2D(sTexture, samplePos);" +
+
+ "mediump vec4 colorCyan = vec4(85.0 / 255.0, 1.0, 1.0, 1.0);" +
+ "mediump vec4 colorMagenta = vec4(1.0, 85.0 / 255.0, 1.0, 1.0);" +
+ "mediump vec4 colorWhite = vec4(1.0, 1.0, 1.0, 1.0);" +
+ "mediump vec4 colorBlack = vec4(0.0, 0.0, 0.0, 1.0);" +
+
+ "mediump vec4 endColor;" +
+ "highp float blackDistance = distance(color, colorBlack);" +
+ "highp float whiteDistance = distance(color, colorWhite);" +
+ "highp float magentaDistance = distance(color, colorMagenta);" +
+ "highp float cyanDistance = distance(color, colorCyan);" +
+
+ "mediump vec4 finalColor;" +
+
+ "highp float colorDistance = min(magentaDistance, cyanDistance);" +
+ "colorDistance = min(colorDistance, whiteDistance);" +
+ "colorDistance = min(colorDistance, blackDistance);" +
+
+ "if (colorDistance == blackDistance) {" +
+ "finalColor = colorBlack;" +
+ "} else if (colorDistance == whiteDistance) {" +
+ "finalColor = colorWhite;" +
+ "} else if (colorDistance == cyanDistance) {" +
+ "finalColor = colorCyan;" +
+ "} else {" +
+ "finalColor = colorMagenta;" +
+ "}" +
+
+ "gl_FragColor = finalColor;" +
+ "}";
+
+
+ public GlCGAColorspaceFilter() {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlContrastFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlContrastFilter.java
new file mode 100644
index 00000000..f92b65fc
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlContrastFilter.java
@@ -0,0 +1,41 @@
+package com.xypower.gpuv.egl.filter;
+
+
+import android.opengl.GLES20;
+
+/**
+ * Changes the contrast of the image.
+ * contrast value ranges from 0.0 to 4.0, with 1.0 as the normal level
+ */
+public class GlContrastFilter extends GlFilter {
+
+ private static final String CONTRAST_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ " \n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " uniform lowp float contrast;\n" +
+ " \n" +
+ " void main()\n" +
+ " {\n" +
+ " lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " \n" +
+ " gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);\n" +
+ " }";
+
+
+ public GlContrastFilter() {
+ super(DEFAULT_VERTEX_SHADER, CONTRAST_FRAGMENT_SHADER);
+ }
+
+ private float contrast = 1.2f;
+
+ public void setContrast(float contrast) {
+ this.contrast = contrast;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("contrast"), contrast);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlCrosshatchFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlCrosshatchFilter.java
new file mode 100644
index 00000000..4af94909
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlCrosshatchFilter.java
@@ -0,0 +1,85 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlCrosshatchFilter extends GlFilter {
+
+ private static final String CROSSHATCH_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ "uniform highp float crossHatchSpacing;\n" +
+ "uniform highp float lineWidth;\n" +
+ "const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n" +
+ "void main()\n" +
+ "{\n" +
+ "highp float luminance = dot(texture2D(sTexture, vTextureCoord).rgb, W);\n" +
+ "lowp vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, 1.0);\n" +
+ "if (luminance < 1.00)\n" +
+ "{\n" +
+ "if (mod(vTextureCoord.x + vTextureCoord.y, crossHatchSpacing) <= lineWidth)\n" +
+ "{\n" +
+ "colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n" +
+ "}\n" +
+ "}\n" +
+ "if (luminance < 0.75)\n" +
+ "{\n" +
+ "if (mod(vTextureCoord.x - vTextureCoord.y, crossHatchSpacing) <= lineWidth)\n" +
+ "{\n" +
+ "colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n" +
+ "}\n" +
+ "}\n" +
+ "if (luminance < 0.50)\n" +
+ "{\n" +
+ "if (mod(vTextureCoord.x + vTextureCoord.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)\n" +
+ "{\n" +
+ "colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n" +
+ "}\n" +
+ "}\n" +
+ "if (luminance < 0.3)\n" +
+ "{\n" +
+ "if (mod(vTextureCoord.x - vTextureCoord.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)\n" +
+ "{\n" +
+ "colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n" +
+ "}\n" +
+ "}\n" +
+ "gl_FragColor = colorToDisplay;\n" +
+ "}\n";
+
+ public GlCrosshatchFilter() {
+ super(DEFAULT_VERTEX_SHADER, CROSSHATCH_FRAGMENT_SHADER);
+ }
+
+ private float crossHatchSpacing = 0.03f;
+ private float lineWidth = 0.003f;
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("crossHatchSpacing"), crossHatchSpacing);
+ GLES20.glUniform1f(getHandle("lineWidth"), lineWidth);
+ }
+
+ public void setCrossHatchSpacing(float crossHatchSpacing) {
+ this.crossHatchSpacing = crossHatchSpacing;
+ }
+
+ public void setLineWidth(float lineWidth) {
+ this.lineWidth = lineWidth;
+ }
+
+ @Override
+ public void setFrameSize(int width, int height) {
+ super.setFrameSize(width, height);
+
+ float singlePixelSpacing;
+ if (width != 0) {
+ singlePixelSpacing = 1.0f / (float) width;
+ } else {
+ singlePixelSpacing = 1.0f / 2048.0f;
+ }
+ if (crossHatchSpacing < singlePixelSpacing) {
+ this.crossHatchSpacing = singlePixelSpacing;
+ }
+
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlExposureFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlExposureFilter.java
new file mode 100644
index 00000000..cb699ed7
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlExposureFilter.java
@@ -0,0 +1,38 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+/**
+ * exposure: The adjusted exposure (-10.0 - 10.0, with 0.0 as the default)
+ */
+public class GlExposureFilter extends GlFilter {
+
+ private static final String EXPOSURE_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ " \n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " uniform highp float exposure;\n" +
+ " \n" +
+ " void main()\n" +
+ " {\n" +
+ " highp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " \n" +
+ " gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);\n" +
+ " } ";
+
+ public GlExposureFilter() {
+ super(DEFAULT_VERTEX_SHADER, EXPOSURE_FRAGMENT_SHADER);
+ }
+
+ private float exposure = 1f;
+
+ public void setExposure(float exposure) {
+ this.exposure = exposure;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("exposure"), exposure);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlFilter.java
new file mode 100644
index 00000000..2f6035e0
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlFilter.java
@@ -0,0 +1,160 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+import com.xypower.gpuv.egl.EglUtil;
+import com.xypower.gpuv.egl.GlFramebufferObject;
+
+import java.util.HashMap;
+
+import static android.opengl.GLES20.GL_FLOAT;
+
+
+public class GlFilter {
+ public static final String DEFAULT_UNIFORM_SAMPLER = "sTexture";
+
+ protected static final String DEFAULT_VERTEX_SHADER =
+ "attribute highp vec4 aPosition;\n" +
+ "attribute highp vec4 aTextureCoord;\n" +
+ "varying highp vec2 vTextureCoord;\n" +
+ "void main() {\n" +
+ "gl_Position = aPosition;\n" +
+ "vTextureCoord = aTextureCoord.xy;\n" +
+ "}\n";
+
+ protected static final String DEFAULT_FRAGMENT_SHADER =
+ "precision mediump float;\n" +
+ "varying highp vec2 vTextureCoord;\n" +
+ "uniform lowp sampler2D sTexture;\n" +
+ "void main() {\n" +
+ "gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+ "}\n";
+
+ private static final float[] VERTICES_DATA = new float[]{
+ // X, Y, Z, U, V
+ -1.0f, 1.0f, 0.0f, 0.0f, 1.0f,
+ 1.0f, 1.0f, 0.0f, 1.0f, 1.0f,
+ -1.0f, -1.0f, 0.0f, 0.0f, 0.0f,
+ 1.0f, -1.0f, 0.0f, 1.0f, 0.0f
+ };
+
+ private static final int FLOAT_SIZE_BYTES = 4;
+ protected static final int VERTICES_DATA_POS_SIZE = 3;
+ protected static final int VERTICES_DATA_UV_SIZE = 2;
+ protected static final int VERTICES_DATA_STRIDE_BYTES = (VERTICES_DATA_POS_SIZE + VERTICES_DATA_UV_SIZE) * FLOAT_SIZE_BYTES;
+ protected static final int VERTICES_DATA_POS_OFFSET = 0 * FLOAT_SIZE_BYTES;
+ protected static final int VERTICES_DATA_UV_OFFSET = VERTICES_DATA_POS_OFFSET + VERTICES_DATA_POS_SIZE * FLOAT_SIZE_BYTES;
+
+ private final String vertexShaderSource;
+ private String fragmentShaderSource;
+
+ private int program;
+
+ private int vertexShader;
+ private int fragmentShader;
+
+ private int vertexBufferName;
+
+ private final HashMap handleMap = new HashMap();
+
+ public GlFilter() {
+ this(DEFAULT_VERTEX_SHADER, DEFAULT_FRAGMENT_SHADER);
+ }
+
+ public GlFilter(final Resources res, final int vertexShaderSourceResId, final int fragmentShaderSourceResId) {
+ this(res.getString(vertexShaderSourceResId), res.getString(fragmentShaderSourceResId));
+ }
+
+ public GlFilter(final String vertexShaderSource, final String fragmentShaderSource) {
+ this.vertexShaderSource = vertexShaderSource;
+ this.fragmentShaderSource = fragmentShaderSource;
+ }
+
+ public void setup() {
+ release();
+ vertexShader = EglUtil.loadShader(vertexShaderSource, GLES20.GL_VERTEX_SHADER);
+ fragmentShader = EglUtil.loadShader(fragmentShaderSource, GLES20.GL_FRAGMENT_SHADER);
+ program = EglUtil.createProgram(vertexShader, fragmentShader);
+ vertexBufferName = EglUtil.createBuffer(VERTICES_DATA);
+
+ getHandle("aPosition");
+ getHandle("aTextureCoord");
+ getHandle("sTexture");
+ }
+
+ public void setFragmentShaderSource(String fragmentShaderSource) {
+ this.fragmentShaderSource = fragmentShaderSource;
+ }
+
+
+ public void setFrameSize(final int width, final int height) {
+ // do nothing
+ }
+
+ public void release() {
+ GLES20.glDeleteProgram(program);
+ program = 0;
+ GLES20.glDeleteShader(vertexShader);
+ vertexShader = 0;
+ GLES20.glDeleteShader(fragmentShader);
+ fragmentShader = 0;
+ GLES20.glDeleteBuffers(1, new int[]{vertexBufferName}, 0);
+ vertexBufferName = 0;
+
+ handleMap.clear();
+ }
+
+ //
+ public void draw(final int texName, final GlFramebufferObject fbo) {
+ useProgram();
+
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferName);
+ GLES20.glEnableVertexAttribArray(getHandle("aPosition"));
+ GLES20.glVertexAttribPointer(getHandle("aPosition"), VERTICES_DATA_POS_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_POS_OFFSET);
+ GLES20.glEnableVertexAttribArray(getHandle("aTextureCoord"));
+ GLES20.glVertexAttribPointer(getHandle("aTextureCoord"), VERTICES_DATA_UV_SIZE, GL_FLOAT, false, VERTICES_DATA_STRIDE_BYTES, VERTICES_DATA_UV_OFFSET);
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texName);
+ GLES20.glUniform1i(getHandle("sTexture"), 0);
+
+ onDraw();
+
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ GLES20.glDisableVertexAttribArray(getHandle("aPosition"));
+ GLES20.glDisableVertexAttribArray(getHandle("aTextureCoord"));
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
+ }
+
+ protected void onDraw() {
+ }
+
+ protected final void useProgram() {
+ GLES20.glUseProgram(program);
+ }
+
+ protected final int getVertexBufferName() {
+ return vertexBufferName;
+ }
+
+ protected final int getHandle(final String name) {
+ final Integer value = handleMap.get(name);
+ if (value != null) {
+ return value;
+ }
+
+ int location = GLES20.glGetAttribLocation(program, name);
+ if (location == -1) {
+ location = GLES20.glGetUniformLocation(program, name);
+ }
+ if (location == -1) {
+ throw new IllegalStateException("Could not get attrib or uniform location for " + name);
+ }
+ handleMap.put(name, Integer.valueOf(location));
+ return location;
+ }
+
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlFilterGroup.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlFilterGroup.java
new file mode 100644
index 00000000..76671d6a
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlFilterGroup.java
@@ -0,0 +1,108 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+import android.util.Pair;
+import com.xypower.gpuv.egl.GlFramebufferObject;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+
+import static android.opengl.GLES20.GL_COLOR_BUFFER_BIT;
+import static android.opengl.GLES20.GL_FRAMEBUFFER;
+
+
+public class GlFilterGroup extends GlFilter {
+
+ private final Collection filters;
+
+ private final ArrayList> list = new ArrayList>();
+
+ public GlFilterGroup(final GlFilter... glFilters) {
+ this(Arrays.asList(glFilters));
+ }
+
+ public GlFilterGroup(final Collection glFilters) {
+ filters = glFilters;
+ }
+
+ @Override
+ public void setup() {
+ super.setup();
+
+ if (filters != null) {
+ final int max = filters.size();
+ int count = 0;
+
+ for (final GlFilter shader : filters) {
+ shader.setup();
+ final GlFramebufferObject fbo;
+ if ((count + 1) < max) {
+ fbo = new GlFramebufferObject();
+ } else {
+ fbo = null;
+ }
+ list.add(Pair.create(shader, fbo));
+ count++;
+ }
+ }
+ }
+
+ @Override
+ public void release() {
+ for (final Pair pair : list) {
+ if (pair.first != null) {
+ pair.first.release();
+ }
+ if (pair.second != null) {
+ pair.second.release();
+ }
+ }
+ list.clear();
+ super.release();
+ }
+
+ @Override
+ public void setFrameSize(final int width, final int height) {
+ super.setFrameSize(width, height);
+
+ for (final Pair pair : list) {
+ if (pair.first != null) {
+ pair.first.setFrameSize(width, height);
+ }
+ if (pair.second != null) {
+ pair.second.setup(width, height);
+ }
+ }
+ }
+
+ private int prevTexName;
+
+ @Override
+ public void draw(final int texName, final GlFramebufferObject fbo) {
+ prevTexName = texName;
+ for (final Pair pair : list) {
+ if (pair.second != null) {
+ if (pair.first != null) {
+ pair.second.enable();
+ GLES20.glClear(GL_COLOR_BUFFER_BIT);
+
+ pair.first.draw(prevTexName, pair.second);
+ }
+ prevTexName = pair.second.getTexName();
+
+ } else {
+ if (fbo != null) {
+ fbo.enable();
+ } else {
+ GLES20.glBindFramebuffer(GL_FRAMEBUFFER, 0);
+ }
+
+ if (pair.first != null) {
+ pair.first.draw(prevTexName, fbo);
+ }
+ }
+ }
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlGammaFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlGammaFilter.java
new file mode 100644
index 00000000..936d293c
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlGammaFilter.java
@@ -0,0 +1,36 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlGammaFilter extends GlFilter {
+ private static final String GAMMA_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ " \n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " uniform lowp float gamma;\n" +
+ " \n" +
+ " void main()\n" +
+ " {\n" +
+ " lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " \n" +
+ " gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);\n" +
+ " }";
+
+ public GlGammaFilter() {
+ super(DEFAULT_VERTEX_SHADER, GAMMA_FRAGMENT_SHADER);
+ }
+
+ private float gamma = 1.2f;
+
+ public void setGamma(float gamma) {
+ this.gamma = gamma;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("gamma"), gamma);
+ }
+
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlGaussianBlurFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlGaussianBlurFilter.java
new file mode 100644
index 00000000..febe1d09
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlGaussianBlurFilter.java
@@ -0,0 +1,102 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+
+
+public class GlGaussianBlurFilter extends GlFilter {
+
+ private static final String VERTEX_SHADER =
+ "attribute vec4 aPosition;" +
+ "attribute vec4 aTextureCoord;" +
+
+ "const lowp int GAUSSIAN_SAMPLES = 9;" +
+
+ "uniform highp float texelWidthOffset;" +
+ "uniform highp float texelHeightOffset;" +
+ "uniform highp float blurSize;" +
+
+ "varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];" +
+
+ "void main() {" +
+ "gl_Position = aPosition;" +
+ "highp vec2 vTextureCoord = aTextureCoord.xy;" +
+
+ // Calculate the positions for the blur
+ "int multiplier = 0;" +
+ "highp vec2 blurStep;" +
+ "highp vec2 singleStepOffset = vec2(texelHeightOffset, texelWidthOffset) * blurSize;" +
+
+ "for (lowp int i = 0; i < GAUSSIAN_SAMPLES; i++) {" +
+ "multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));" +
+ // Blur in x (horizontal)
+ "blurStep = float(multiplier) * singleStepOffset;" +
+ "blurCoordinates[i] = vTextureCoord.xy + blurStep;" +
+ "}" +
+ "}";
+
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+
+ "const lowp int GAUSSIAN_SAMPLES = 9;" +
+ "varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];" +
+
+ "uniform lowp sampler2D sTexture;" +
+
+ "void main() {" +
+ "lowp vec4 sum = vec4(0.0);" +
+
+ "sum += texture2D(sTexture, blurCoordinates[0]) * 0.05;" +
+ "sum += texture2D(sTexture, blurCoordinates[1]) * 0.09;" +
+ "sum += texture2D(sTexture, blurCoordinates[2]) * 0.12;" +
+ "sum += texture2D(sTexture, blurCoordinates[3]) * 0.15;" +
+ "sum += texture2D(sTexture, blurCoordinates[4]) * 0.18;" +
+ "sum += texture2D(sTexture, blurCoordinates[5]) * 0.15;" +
+ "sum += texture2D(sTexture, blurCoordinates[6]) * 0.12;" +
+ "sum += texture2D(sTexture, blurCoordinates[7]) * 0.09;" +
+ "sum += texture2D(sTexture, blurCoordinates[8]) * 0.05;" +
+
+ "gl_FragColor = sum;" +
+ "}";
+
+ private float texelWidthOffset = 0.01f;
+ private float texelHeightOffset = 0.01f;
+ private float blurSize = 0.2f;
+
+ public GlGaussianBlurFilter() {
+ super(VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+ public float getTexelWidthOffset() {
+ return texelWidthOffset;
+ }
+
+ public void setTexelWidthOffset(final float texelWidthOffset) {
+ this.texelWidthOffset = texelWidthOffset;
+ }
+
+ public float getTexelHeightOffset() {
+ return texelHeightOffset;
+ }
+
+ public void setTexelHeightOffset(final float texelHeightOffset) {
+ this.texelHeightOffset = texelHeightOffset;
+ }
+
+ public float getBlurSize() {
+ return blurSize;
+ }
+
+ public void setBlurSize(final float blurSize) {
+ this.blurSize = blurSize;
+ }
+
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("texelWidthOffset"), texelWidthOffset);
+ GLES20.glUniform1f(getHandle("texelHeightOffset"), texelHeightOffset);
+ GLES20.glUniform1f(getHandle("blurSize"), blurSize);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlGrayScaleFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlGrayScaleFilter.java
new file mode 100644
index 00000000..c8c19220
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlGrayScaleFilter.java
@@ -0,0 +1,21 @@
+package com.xypower.gpuv.egl.filter;
+
+
+
+public class GlGrayScaleFilter extends GlFilter {
+
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+ "varying vec2 vTextureCoord;" +
+ "uniform lowp sampler2D sTexture;" +
+ "const highp vec3 weight = vec3(0.2125, 0.7154, 0.0721);" +
+ "void main() {" +
+ "float luminance = dot(texture2D(sTexture, vTextureCoord).rgb, weight);" +
+ "gl_FragColor = vec4(vec3(luminance), 1.0);" +
+ "}";
+
+ public GlGrayScaleFilter() {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHalftoneFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHalftoneFilter.java
new file mode 100644
index 00000000..c47fe420
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHalftoneFilter.java
@@ -0,0 +1,53 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlHalftoneFilter extends GlFilter {
+
+ private static final String HALFTONE_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+
+ " uniform lowp sampler2D sTexture;\n" +
+
+ "uniform highp float fractionalWidthOfPixel;\n" +
+ "uniform highp float aspectRatio;\n" +
+
+ "const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n" +
+
+ "void main()\n" +
+ "{\n" +
+ " highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n" +
+ " highp vec2 samplePos = vTextureCoord - mod(vTextureCoord, sampleDivisor) + 0.5 * sampleDivisor;\n" +
+ " highp vec2 textureCoordinateToUse = vec2(vTextureCoord.x, (vTextureCoord.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n" +
+ " highp vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n" +
+ " highp float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);\n" +
+ " lowp vec3 sampledColor = texture2D(sTexture, samplePos).rgb;\n" +
+ " highp float dotScaling = 1.0 - dot(sampledColor, W);\n" +
+ " lowp float checkForPresenceWithinDot = 1.0 - step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);\n" +
+ " gl_FragColor = vec4(vec3(checkForPresenceWithinDot), 1.0);\n" +
+ "}";
+
+ public GlHalftoneFilter() {
+ super(DEFAULT_VERTEX_SHADER, HALFTONE_FRAGMENT_SHADER);
+ }
+
+ private float fractionalWidthOfPixel = 0.01f;
+ private float aspectRatio = 1f;
+
+ public void setFractionalWidthOfAPixel(float fractionalWidthOfAPixel) {
+ this.fractionalWidthOfPixel = fractionalWidthOfAPixel;
+ }
+
+ @Override
+ public void setFrameSize(int width, int height) {
+ super.setFrameSize(width, height);
+ aspectRatio = (float) height / (float) width;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("fractionalWidthOfPixel"), fractionalWidthOfPixel);
+ GLES20.glUniform1f(getHandle("aspectRatio"), aspectRatio);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHazeFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHazeFilter.java
new file mode 100644
index 00000000..5c555c2e
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHazeFilter.java
@@ -0,0 +1,54 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+
+
+public class GlHazeFilter extends GlFilter {
+
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+ "varying highp vec2 vTextureCoord;" +
+ "uniform lowp sampler2D sTexture;" +
+ "uniform lowp float distance;" +
+ "uniform highp float slope;" +
+
+ "void main() {" +
+ "highp vec4 color = vec4(1.0);" +
+
+ "highp float d = vTextureCoord.y * slope + distance;" +
+
+ "highp vec4 c = texture2D(sTexture, vTextureCoord);" +
+ "c = (c - d * color) / (1.0 -d);" +
+ "gl_FragColor = c;" + // consider using premultiply(c);
+ "}";
+
+ private float distance = 0.2f;
+ private float slope = 0.0f;
+
+ public GlHazeFilter() {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+ public float getDistance() {
+ return distance;
+ }
+
+ public void setDistance(final float distance) {
+ this.distance = distance;
+ }
+
+ public float getSlope() {
+ return slope;
+ }
+
+ public void setSlope(final float slope) {
+ this.slope = slope;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("distance"), distance);
+ GLES20.glUniform1f(getHandle("slope"), slope);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHighlightShadowFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHighlightShadowFilter.java
new file mode 100644
index 00000000..8cfd9be7
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHighlightShadowFilter.java
@@ -0,0 +1,49 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlHighlightShadowFilter extends GlFilter {
+
+ private static final String HIGHLIGHT_SHADOW_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " varying vec2 vTextureCoord;\n" +
+ " \n" +
+ " uniform lowp float shadows;\n" +
+ " uniform lowp float highlights;\n" +
+ " \n" +
+ " const mediump vec3 luminanceWeighting = vec3(0.3, 0.3, 0.3);\n" +
+ " \n" +
+ " void main()\n" +
+ " {\n" +
+ " lowp vec4 source = texture2D(sTexture, vTextureCoord);\n" +
+ " mediump float luminance = dot(source.rgb, luminanceWeighting);\n" +
+ " \n" +
+ " mediump float shadow = clamp((pow(luminance, 1.0/(shadows+1.0)) + (-0.76)*pow(luminance, 2.0/(shadows+1.0))) - luminance, 0.0, 1.0);\n" +
+ " mediump float highlight = clamp((1.0 - (pow(1.0-luminance, 1.0/(2.0-highlights)) + (-0.8)*pow(1.0-luminance, 2.0/(2.0-highlights)))) - luminance, -1.0, 0.0);\n" +
+ " lowp vec3 result = vec3(0.0, 0.0, 0.0) + ((luminance + shadow + highlight) - 0.0) * ((source.rgb - vec3(0.0, 0.0, 0.0))/(luminance - 0.0));\n" +
+ " \n" +
+ " gl_FragColor = vec4(result.rgb, source.a);\n" +
+ " }";
+
+ public GlHighlightShadowFilter() {
+ super(DEFAULT_VERTEX_SHADER, HIGHLIGHT_SHADOW_FRAGMENT_SHADER);
+ }
+
+ private float shadows = 1f;
+ private float highlights = 0f;
+
+ public void setShadows(float shadows) {
+ this.shadows = shadows;
+ }
+
+ public void setHighlights(float highlights) {
+ this.highlights = highlights;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("shadows"), shadows);
+ GLES20.glUniform1f(getHandle("highlights"), highlights);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHueFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHueFilter.java
new file mode 100644
index 00000000..864aace8
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlHueFilter.java
@@ -0,0 +1,67 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlHueFilter extends GlFilter {
+
+ private static final String HUE_FRAGMENT_SHADER = "" +
+ "precision highp float;\n" +
+ " varying vec2 vTextureCoord;\n" +
+ "\n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ "uniform mediump float hueAdjust;\n" +
+ "const highp vec4 kRGBToYPrime = vec4 (0.299, 0.587, 0.114, 0.0);\n" +
+ "const highp vec4 kRGBToI = vec4 (0.595716, -0.274453, -0.321263, 0.0);\n" +
+ "const highp vec4 kRGBToQ = vec4 (0.211456, -0.522591, 0.31135, 0.0);\n" +
+ "\n" +
+ "const highp vec4 kYIQToR = vec4 (1.0, 0.9563, 0.6210, 0.0);\n" +
+ "const highp vec4 kYIQToG = vec4 (1.0, -0.2721, -0.6474, 0.0);\n" +
+ "const highp vec4 kYIQToB = vec4 (1.0, -1.1070, 1.7046, 0.0);\n" +
+ "\n" +
+ "void main ()\n" +
+ "{\n" +
+ " // Sample the input pixel\n" +
+ " highp vec4 color = texture2D(sTexture, vTextureCoord);\n" +
+ "\n" +
+ " // Convert to YIQ\n" +
+ " highp float YPrime = dot (color, kRGBToYPrime);\n" +
+ " highp float I = dot (color, kRGBToI);\n" +
+ " highp float Q = dot (color, kRGBToQ);\n" +
+ "\n" +
+ " // Calculate the hue and chroma\n" +
+ " highp float hue = atan (Q, I);\n" +
+ " highp float chroma = sqrt (I * I + Q * Q);\n" +
+ "\n" +
+ " // Make the user's adjustments\n" +
+ " hue += (-hueAdjust); //why negative rotation?\n" +
+ "\n" +
+ " // Convert back to YIQ\n" +
+ " Q = chroma * sin (hue);\n" +
+ " I = chroma * cos (hue);\n" +
+ "\n" +
+ " // Convert back to RGB\n" +
+ " highp vec4 yIQ = vec4 (YPrime, I, Q, 0.0);\n" +
+ " color.r = dot (yIQ, kYIQToR);\n" +
+ " color.g = dot (yIQ, kYIQToG);\n" +
+ " color.b = dot (yIQ, kYIQToB);\n" +
+ "\n" +
+ " // Save the result\n" +
+ " gl_FragColor = color;\n" +
+ "}\n";
+
+ public GlHueFilter() {
+ super(DEFAULT_VERTEX_SHADER, HUE_FRAGMENT_SHADER);
+ }
+
+ private float hue = 90f;
+
+ public void setHue(float hue) {
+ this.hue = hue;
+ }
+
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("hueAdjust"), hue);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlInvertFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlInvertFilter.java
new file mode 100644
index 00000000..0203c4e9
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlInvertFilter.java
@@ -0,0 +1,18 @@
+package com.xypower.gpuv.egl.filter;
+
+
+
+public class GlInvertFilter extends GlFilter {
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+ "varying vec2 vTextureCoord;" +
+ "uniform lowp sampler2D sTexture;" +
+ "void main() {" +
+ "lowp vec4 color = texture2D(sTexture, vTextureCoord);" +
+ "gl_FragColor = vec4((1.0 - color.rgb), color.w);" +
+ "}";
+
+ public GlInvertFilter() {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlLookUpTableFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlLookUpTableFilter.java
new file mode 100644
index 00000000..2e529059
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlLookUpTableFilter.java
@@ -0,0 +1,91 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.content.res.Resources;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.opengl.GLES20;
+
+import com.xypower.gpuv.egl.EglUtil;
+
+
+
+public class GlLookUpTableFilter extends GlFilter {
+
+ private final static String FRAGMENT_SHADER =
+ "precision mediump float;" +
+ "uniform mediump sampler2D lutTexture; \n" +
+ "uniform lowp sampler2D sTexture; \n" +
+ "varying highp vec2 vTextureCoord; \n" +
+ "vec4 sampleAs3DTexture(vec3 uv) {\n" +
+ " float width = 16.;\n" +
+ " float sliceSize = 1.0 / width;\n" +
+ " float slicePixelSize = sliceSize / width;\n" +
+ " float sliceInnerSize = slicePixelSize * (width - 1.0);\n" +
+ " float zSlice0 = min(floor(uv.z * width), width - 1.0);\n" +
+ " float zSlice1 = min(zSlice0 + 1.0, width - 1.0);\n" +
+ " float xOffset = slicePixelSize * 0.5 + uv.x * sliceInnerSize;\n" +
+ " float s0 = xOffset + (zSlice0 * sliceSize);\n" +
+ " float s1 = xOffset + (zSlice1 * sliceSize);\n" +
+ " vec4 slice0Color = texture2D(lutTexture, vec2(s0, uv.y));\n" +
+ " vec4 slice1Color = texture2D(lutTexture, vec2(s1, uv.y));\n" +
+ " float zOffset = mod(uv.z * width, 1.0);\n" +
+ " vec4 result = mix(slice0Color, slice1Color, zOffset);\n" +
+ " return result;\n" +
+ "}\n" +
+ "void main() {\n" +
+ " vec4 pixel = texture2D(sTexture, vTextureCoord);\n" +
+ " vec4 gradedPixel = sampleAs3DTexture(pixel.rgb);\n" +
+ " gradedPixel.a = pixel.a;\n" +
+ " pixel = gradedPixel;\n" +
+ " gl_FragColor = pixel;\n " +
+ "}";
+
+ public GlLookUpTableFilter(Bitmap bitmap) {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ this.lutTexture = bitmap;
+ hTex = EglUtil.NO_TEXTURE;
+ }
+
+
+ public GlLookUpTableFilter(Resources resources, int fxID) {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ this.lutTexture = BitmapFactory.decodeResource(resources, fxID);
+ hTex = EglUtil.NO_TEXTURE;
+ }
+
+ private int hTex;
+
+ private Bitmap lutTexture;
+
+ @Override
+ public void onDraw() {
+ int offsetDepthMapTextureUniform = getHandle("lutTexture");
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, hTex);
+ GLES20.glUniform1i(offsetDepthMapTextureUniform, 3);
+ }
+
+ @Override
+ public void setup() {
+ super.setup();
+ loadTexture();
+ }
+
+ private void loadTexture() {
+ if (hTex == EglUtil.NO_TEXTURE) {
+ hTex = EglUtil.loadTexture(lutTexture, EglUtil.NO_TEXTURE, false);
+ }
+ }
+
+ public void releaseLutBitmap() {
+ if (lutTexture != null && !lutTexture.isRecycled()) {
+ lutTexture.recycle();
+ lutTexture = null;
+ }
+ }
+
+ public void reset() {
+ hTex = EglUtil.NO_TEXTURE;
+ hTex = EglUtil.loadTexture(lutTexture, EglUtil.NO_TEXTURE, false);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlLuminanceFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlLuminanceFilter.java
new file mode 100644
index 00000000..dda465a1
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlLuminanceFilter.java
@@ -0,0 +1,26 @@
+package com.xypower.gpuv.egl.filter;
+
+public class GlLuminanceFilter extends GlFilter {
+
+ private static final String LUMINANCE_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ "\n" +
+ " varying vec2 vTextureCoord;\n" +
+ "\n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ "\n" +
+ "// Values from \"Graphics Shaders: Theory and Practice\" by Bailey and Cunningham\n" +
+ "const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n" +
+ "\n" +
+ "void main()\n" +
+ "{\n" +
+ " lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " float luminance = dot(textureColor.rgb, W);\n" +
+ " \n" +
+ " gl_FragColor = vec4(vec3(luminance), textureColor.a);\n" +
+ "}";
+
+ public GlLuminanceFilter() {
+ super(DEFAULT_VERTEX_SHADER, LUMINANCE_FRAGMENT_SHADER);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlLuminanceThresholdFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlLuminanceThresholdFilter.java
new file mode 100644
index 00000000..457fbab1
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlLuminanceThresholdFilter.java
@@ -0,0 +1,39 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlLuminanceThresholdFilter extends GlFilter {
+
+ private static final String LUMINANCE_THRESHOLD_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ "varying highp vec2 vTextureCoord;\n" +
+ "\n" +
+ "uniform lowp sampler2D sTexture;\n" +
+ "uniform highp float threshold;\n" +
+ "\n" +
+ "const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n" +
+ "\n" +
+ "void main()\n" +
+ "{\n" +
+ " highp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " highp float luminance = dot(textureColor.rgb, W);\n" +
+ " highp float thresholdResult = step(threshold, luminance);\n" +
+ " \n" +
+ " gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);\n" +
+ "}";
+
+ public GlLuminanceThresholdFilter() {
+ super(DEFAULT_VERTEX_SHADER, LUMINANCE_THRESHOLD_FRAGMENT_SHADER);
+ }
+
+ private float threshold = 0.5f;
+
+ public void setThreshold(float threshold) {
+ this.threshold = threshold;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("threshold"), threshold);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlMonochromeFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlMonochromeFilter.java
new file mode 100644
index 00000000..9b7c5b52
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlMonochromeFilter.java
@@ -0,0 +1,54 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+
+
+public class GlMonochromeFilter extends GlFilter {
+
+ private static final String FRAGMENT_SHADER =
+ "precision lowp float;" +
+
+ "varying highp vec2 vTextureCoord;" +
+ "uniform lowp sampler2D sTexture;" +
+ "uniform float intensity;" +
+
+ "const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);" +
+
+ "void main() {" +
+
+ "lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);" +
+ "float luminance = dot(textureColor.rgb, luminanceWeighting);" +
+
+ "lowp vec4 desat = vec4(vec3(luminance), 1.0);" +
+
+ "lowp vec4 outputColor = vec4(" +
+ "(desat.r < 0.5 ? (2.0 * desat.r * 0.6) : (1.0 - 2.0 * (1.0 - desat.r) * (1.0 - 0.6)))," +
+ "(desat.g < 0.5 ? (2.0 * desat.g * 0.45) : (1.0 - 2.0 * (1.0 - desat.g) * (1.0 - 0.45)))," +
+ "(desat.b < 0.5 ? (2.0 * desat.b * 0.3) : (1.0 - 2.0 * (1.0 - desat.b) * (1.0 - 0.3)))," +
+ "1.0" +
+ ");" +
+
+ "gl_FragColor = vec4(mix(textureColor.rgb, outputColor.rgb, intensity), textureColor.a);" +
+ "}";
+
+ private float intensity = 1.0f;
+
+ public GlMonochromeFilter() {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+ public float getIntensity() {
+ return intensity;
+ }
+
+ public void setIntensity(float intensity) {
+ this.intensity = intensity;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("intensity"), intensity);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlOpacityFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlOpacityFilter.java
new file mode 100644
index 00000000..6dcc8a43
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlOpacityFilter.java
@@ -0,0 +1,40 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+/**
+ * Adjusts the alpha channel of the incoming image
+ * opacity: The value to multiply the incoming alpha channel for each pixel by (0.0 - 1.0, with 1.0 as the default)
+ */
+public class GlOpacityFilter extends GlFilter {
+
+ private static final String OPACITY_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying highp vec2 vTextureCoord;\n" +
+ " \n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " uniform lowp float opacity;\n" +
+ " \n" +
+ " void main()\n" +
+ " {\n" +
+ " lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " \n" +
+ " gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);\n" +
+ " }\n";
+
+ public GlOpacityFilter() {
+ super(DEFAULT_VERTEX_SHADER, OPACITY_FRAGMENT_SHADER);
+ }
+
+ private float opacity = 1f;
+
+ public void setOpacity(float opacity) {
+ this.opacity = opacity;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("opacity"), opacity);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlOverlayFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlOverlayFilter.java
new file mode 100644
index 00000000..f3ca899f
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlOverlayFilter.java
@@ -0,0 +1,99 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.opengl.GLES20;
+import android.opengl.GLUtils;
+import android.util.Size;
+
+
+
+public abstract class GlOverlayFilter extends GlFilter {
+
+ private int[] textures = new int[1];
+
+ private Bitmap bitmap = null;
+
+ protected Size inputResolution = new Size(1280, 720);
+
+ public GlOverlayFilter() {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+ private final static String FRAGMENT_SHADER =
+ "precision mediump float;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "uniform lowp sampler2D sTexture;\n" +
+ "uniform lowp sampler2D oTexture;\n" +
+ "void main() {\n" +
+ " lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " lowp vec4 textureColor2 = texture2D(oTexture, vTextureCoord);\n" +
+ " \n" +
+ " gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);\n" +
+ "}\n";
+
+ public void setResolution(Size resolution) {
+ this.inputResolution = resolution;
+ }
+
+ @Override
+ public void setFrameSize(int width, int height) {
+ super.setFrameSize(width, height);
+ setResolution(new Size(width, height));
+ }
+
+ private void createBitmap() {
+ releaseBitmap(bitmap);
+ bitmap = Bitmap.createBitmap(inputResolution.getWidth(), inputResolution.getHeight(), Bitmap.Config.ARGB_8888);
+ }
+
+ @Override
+ public void setup() {
+ super.setup();// 1
+ GLES20.glGenTextures(1, textures, 0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
+
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+
+ createBitmap();
+ }
+
+ @Override
+ public void onDraw() {
+ if (bitmap == null) {
+ createBitmap();
+ }
+ if (bitmap.getWidth() != inputResolution.getWidth() || bitmap.getHeight() != inputResolution.getHeight()) {
+ createBitmap();
+ }
+
+ bitmap.eraseColor(Color.argb(0, 0, 0, 0));
+ Canvas bitmapCanvas = new Canvas(bitmap);
+ bitmapCanvas.scale(1, -1, bitmapCanvas.getWidth() / 2, bitmapCanvas.getHeight() / 2);
+ drawCanvas(bitmapCanvas);
+
+ int offsetDepthMapTextureUniform = getHandle("oTexture");// 3
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
+
+ if (bitmap != null && !bitmap.isRecycled()) {
+ GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, bitmap, 0);
+ }
+
+ GLES20.glUniform1i(offsetDepthMapTextureUniform, 3);
+ }
+
+ protected abstract void drawCanvas(Canvas canvas);
+
+ public static void releaseBitmap(Bitmap bitmap) {
+ if (bitmap != null && !bitmap.isRecycled()) {
+ bitmap.recycle();
+ bitmap = null;
+ }
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlPixelationFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlPixelationFilter.java
new file mode 100644
index 00000000..74ddf87c
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlPixelationFilter.java
@@ -0,0 +1,52 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlPixelationFilter extends GlFilter {
+
+ private static final String PIXELATION_FRAGMENT_SHADER = "" +
+ "precision highp float;\n" +
+
+ "varying highp vec2 vTextureCoord;\n" +
+
+ "uniform float imageWidthFactor;\n" +
+ "uniform float imageHeightFactor;\n" +
+ "uniform lowp sampler2D sTexture;\n" +
+ "uniform float pixel;\n" +
+
+ "void main()\n" +
+ "{\n" +
+ " vec2 uv = vTextureCoord.xy;\n" +
+ " float dx = pixel * imageWidthFactor;\n" +
+ " float dy = pixel * imageHeightFactor;\n" +
+ " vec2 coord = vec2(dx * floor(uv.x / dx), dy * floor(uv.y / dy));\n" +
+ " vec3 tc = texture2D(sTexture, coord).xyz;\n" +
+ " gl_FragColor = vec4(tc, 1.0);\n" +
+ "}";
+
+ public GlPixelationFilter() {
+ super(DEFAULT_VERTEX_SHADER, PIXELATION_FRAGMENT_SHADER);
+ }
+
+ private float pixel = 1f;
+ private float imageWidthFactor = 1f / 720;
+ private float imageHeightFactor = 1f / 720;
+
+ @Override
+ public void setFrameSize(int width, int height) {
+ super.setFrameSize(width, height);
+ imageWidthFactor = 1f / width;
+ imageHeightFactor = 1f / height;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("pixel"), pixel);
+ GLES20.glUniform1f(getHandle("imageWidthFactor"), imageWidthFactor);
+ GLES20.glUniform1f(getHandle("imageHeightFactor"), imageHeightFactor);
+ }
+
+ public void setPixel(final float pixel) {
+ this.pixel = pixel;
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlPosterizeFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlPosterizeFilter.java
new file mode 100644
index 00000000..ee144540
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlPosterizeFilter.java
@@ -0,0 +1,41 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlPosterizeFilter extends GlFilter {
+
+ private static final String POSTERIZE_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ "\n" +
+ "uniform lowp sampler2D sTexture;\n" +
+ "uniform highp float colorLevels;\n" +
+ "\n" +
+ "void main()\n" +
+ "{\n" +
+ " highp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " \n" +
+ " gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;\n" +
+ "}";
+
+ public GlPosterizeFilter() {
+ super(DEFAULT_VERTEX_SHADER, POSTERIZE_FRAGMENT_SHADER);
+ }
+
+ private int colorLevels = 10;
+
+ public void setColorLevels(int colorLevels) {
+ if (colorLevels < 0) {
+ this.colorLevels = 0;
+ } else if (colorLevels > 256) {
+ this.colorLevels = 256;
+ } else {
+ this.colorLevels = colorLevels;
+ }
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("colorLevels"), colorLevels);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlRGBFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlRGBFilter.java
new file mode 100644
index 00000000..0c32d685
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlRGBFilter.java
@@ -0,0 +1,55 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+/**
+ * Adjusts the individual RGB channels of an image
+ * red: Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default.
+ * green:
+ * blue:
+ */
+public class GlRGBFilter extends GlFilter {
+
+ private static final String RGB_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ " \n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " uniform highp float red;\n" +
+ " uniform highp float green;\n" +
+ " uniform highp float blue;\n" +
+ " \n" +
+ " void main()\n" +
+ " {\n" +
+ " highp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " \n" +
+ " gl_FragColor = vec4(textureColor.r * red, textureColor.g * green, textureColor.b * blue, 1.0);\n" +
+ " }\n";
+
+ public GlRGBFilter() {
+ super(DEFAULT_VERTEX_SHADER, RGB_FRAGMENT_SHADER);
+ }
+
+ private float red = 1f;
+ private float green = 1f;
+ private float blue = 1f;
+
+ public void setRed(float red) {
+ this.red = red;
+ }
+
+ public void setGreen(float green) {
+ this.green = green;
+ }
+
+ public void setBlue(float blue) {
+ this.blue = blue;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("red"), red);
+ GLES20.glUniform1f(getHandle("green"), green);
+ GLES20.glUniform1f(getHandle("blue"), blue);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSaturationFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSaturationFilter.java
new file mode 100644
index 00000000..dc4009ec
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSaturationFilter.java
@@ -0,0 +1,41 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlSaturationFilter extends GlFilter {
+ private static final String SATURATION_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ " \n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " uniform lowp float saturation;\n" +
+ " \n" +
+ " const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\n" +
+ " \n" +
+ " void main()\n" +
+ " {\n" +
+ " lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " lowp float luminance = dot(textureColor.rgb, luminanceWeighting);\n" +
+ " lowp vec3 greyScaleColor = vec3(luminance);\n" +
+ " \n" +
+ " gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);\n" +
+ " \n" +
+ " }";
+
+
+ public GlSaturationFilter() {
+ super(DEFAULT_VERTEX_SHADER, SATURATION_FRAGMENT_SHADER);
+ }
+
+ private float saturation = 1f;
+
+ public void setSaturation(float saturation) {
+ this.saturation = saturation;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("saturation"), saturation);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSepiaFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSepiaFilter.java
new file mode 100644
index 00000000..fff46805
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSepiaFilter.java
@@ -0,0 +1,20 @@
+package com.xypower.gpuv.egl.filter;
+
+
+public class GlSepiaFilter extends GlFilter {
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+ "varying vec2 vTextureCoord;" +
+ "uniform lowp sampler2D sTexture;" +
+ "const highp vec3 weight = vec3(0.2125, 0.7154, 0.0721);" +
+ "void main() {" +
+ " vec4 FragColor = texture2D(sTexture, vTextureCoord);\n" +
+ " gl_FragColor.r = dot(FragColor.rgb, vec3(.393, .769, .189));\n" +
+ " gl_FragColor.g = dot(FragColor.rgb, vec3(.349, .686, .168));\n" +
+ " gl_FragColor.b = dot(FragColor.rgb, vec3(.272, .534, .131));\n" +
+ "}";
+
+ public GlSepiaFilter() {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSharpenFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSharpenFilter.java
new file mode 100644
index 00000000..b97c6685
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSharpenFilter.java
@@ -0,0 +1,96 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+
+
+public class GlSharpenFilter extends GlFilter {
+
+ private static final String VERTEX_SHADER =
+ "attribute vec4 aPosition;" +
+ "attribute vec4 aTextureCoord;" +
+
+ "uniform float imageWidthFactor;" +
+ "uniform float imageHeightFactor;" +
+ "uniform float sharpness;" +
+
+ "varying highp vec2 textureCoordinate;" +
+ "varying highp vec2 leftTextureCoordinate;" +
+ "varying highp vec2 rightTextureCoordinate;" +
+ "varying highp vec2 topTextureCoordinate;" +
+ "varying highp vec2 bottomTextureCoordinate;" +
+
+ "varying float centerMultiplier;" +
+ "varying float edgeMultiplier;" +
+
+ "void main() {" +
+ "gl_Position = aPosition;" +
+
+ "mediump vec2 widthStep = vec2(imageWidthFactor, 0.0);" +
+ "mediump vec2 heightStep = vec2(0.0, imageHeightFactor);" +
+
+ "textureCoordinate = aTextureCoord.xy;" +
+ "leftTextureCoordinate = textureCoordinate - widthStep;" +
+ "rightTextureCoordinate = textureCoordinate + widthStep;" +
+ "topTextureCoordinate = textureCoordinate + heightStep;" +
+ "bottomTextureCoordinate = textureCoordinate - heightStep;" +
+
+ "centerMultiplier = 1.0 + 4.0 * sharpness;" +
+ "edgeMultiplier = sharpness;" +
+ "}";
+
+ private static final String FRAGMENT_SHADER =
+ "precision highp float;" +
+
+ "uniform lowp sampler2D sTexture;" +
+
+ "varying highp vec2 textureCoordinate;" +
+ "varying highp vec2 leftTextureCoordinate;" +
+ "varying highp vec2 rightTextureCoordinate;" +
+ "varying highp vec2 topTextureCoordinate;" +
+ "varying highp vec2 bottomTextureCoordinate;" +
+
+ "varying float centerMultiplier;" +
+ "varying float edgeMultiplier;" +
+
+ "void main() {" +
+ "mediump vec3 textureColor = texture2D(sTexture, textureCoordinate).rgb;" +
+ "mediump vec3 leftTextureColor = texture2D(sTexture, leftTextureCoordinate).rgb;" +
+ "mediump vec3 rightTextureColor = texture2D(sTexture, rightTextureCoordinate).rgb;" +
+ "mediump vec3 topTextureColor = texture2D(sTexture, topTextureCoordinate).rgb;" +
+ "mediump vec3 bottomTextureColor = texture2D(sTexture, bottomTextureCoordinate).rgb;" +
+
+ "gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(sTexture, bottomTextureCoordinate).w);" +
+ "}";
+
+ private float imageWidthFactor = 0.004f;
+ private float imageHeightFactor = 0.004f;
+ private float sharpness = 1.f;
+
+ public GlSharpenFilter() {
+ super(VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+ public float getSharpness() {
+ return sharpness;
+ }
+
+ public void setSharpness(final float sharpness) {
+ this.sharpness = sharpness;
+ }
+
+
+ @Override
+ public void setFrameSize(final int width, final int height) {
+ imageWidthFactor = 1f / width;
+ imageHeightFactor = 1f / height;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("imageWidthFactor"), imageWidthFactor);
+ GLES20.glUniform1f(getHandle("imageHeightFactor"), imageHeightFactor);
+ GLES20.glUniform1f(getHandle("sharpness"), sharpness);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSolarizeFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSolarizeFilter.java
new file mode 100644
index 00000000..5d349c9c
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSolarizeFilter.java
@@ -0,0 +1,40 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlSolarizeFilter extends GlFilter {
+
+ private static final String SOLATIZE_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ "\n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " uniform highp float threshold;\n" +
+ "\n" +
+ " const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n" +
+ "\n" +
+ "void main()\n" +
+ "{\n" +
+ " highp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " highp float luminance = dot(textureColor.rgb, W);\n" +
+ " highp float thresholdResult = step(luminance, threshold);\n" +
+ " highp vec3 finalColor = abs(thresholdResult - textureColor.rgb);\n" +
+ " \n" +
+ " gl_FragColor = vec4(finalColor, textureColor.w);\n" +
+ "}";
+
+ public GlSolarizeFilter() {
+ super(DEFAULT_VERTEX_SHADER, SOLATIZE_FRAGMENT_SHADER);
+ }
+
+ private float threshold = 0.5f;
+
+ public void setThreshold(float threshold) {
+ this.threshold = threshold;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("threshold"), threshold);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSphereRefractionFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSphereRefractionFilter.java
new file mode 100644
index 00000000..180fa64e
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSphereRefractionFilter.java
@@ -0,0 +1,73 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+
+
+public class GlSphereRefractionFilter extends GlFilter {
+
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+
+ "varying vec2 vTextureCoord;" +
+ "uniform lowp sampler2D sTexture;" +
+ "uniform highp vec2 center;" +
+ "uniform highp float radius;" +
+ "uniform highp float aspectRatio;" +
+ "uniform highp float refractiveIndex;" +
+
+ "void main() {" +
+ "highp vec2 textureCoordinateToUse = vec2(vTextureCoord.x, (vTextureCoord.y * aspectRatio + 0.5 - 0.5 * aspectRatio));" +
+ "highp float distanceFromCenter = distance(center, textureCoordinateToUse);" +
+ "lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);" +
+
+ "distanceFromCenter = distanceFromCenter / radius;" +
+
+ "highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);" +
+ "highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));" +
+
+ "highp vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);" +
+
+ "gl_FragColor = texture2D(sTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere;" +
+ "}";
+
+ private float centerX = 0.5f;
+ private float centerY = 0.5f;
+ private float radius = 0.5f;
+ private float aspectRatio = 1.0f;
+ private float refractiveIndex = 0.71f;
+
+ public GlSphereRefractionFilter() {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+ public void setCenterX(float centerX) {
+ this.centerX = centerX;
+ }
+
+ public void setCenterY(float centerY) {
+ this.centerY = centerY;
+ }
+
+ public void setRadius(float radius) {
+ this.radius = radius;
+ }
+
+ public void setAspectRatio(float aspectRatio) {
+ this.aspectRatio = aspectRatio;
+ }
+
+ public void setRefractiveIndex(float refractiveIndex) {
+ this.refractiveIndex = refractiveIndex;
+ }
+
+ //////////////////////////////////////////////////////////////////////////
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform2f(getHandle("center"), centerX, centerY);
+ GLES20.glUniform1f(getHandle("radius"), radius);
+ GLES20.glUniform1f(getHandle("aspectRatio"), aspectRatio);
+ GLES20.glUniform1f(getHandle("refractiveIndex"), refractiveIndex);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSwirlFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSwirlFilter.java
new file mode 100644
index 00000000..c71ab942
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlSwirlFilter.java
@@ -0,0 +1,65 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.graphics.PointF;
+import android.opengl.GLES20;
+
+public class GlSwirlFilter extends GlFilter {
+
+ private static final String SWIRL_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ "\n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ "\n" +
+ "uniform highp vec2 center;\n" +
+ "uniform highp float radius;\n" +
+ "uniform highp float angle;\n" +
+ "\n" +
+ "void main()\n" +
+ "{\n" +
+ "highp vec2 textureCoordinateToUse = vTextureCoord;\n" +
+ "highp float dist = distance(center, vTextureCoord);\n" +
+ "if (dist < radius)\n" +
+ "{\n" +
+ "textureCoordinateToUse -= center;\n" +
+ "highp float percent = (radius - dist) / radius;\n" +
+ "highp float theta = percent * percent * angle * 8.0;\n" +
+ "highp float s = sin(theta);\n" +
+ "highp float c = cos(theta);\n" +
+ "textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c)));\n" +
+ "textureCoordinateToUse += center;\n" +
+ "}\n" +
+ "\n" +
+ "gl_FragColor = texture2D(sTexture, textureCoordinateToUse );\n" +
+ "\n" +
+ "}\n";
+
+ public GlSwirlFilter() {
+ super(DEFAULT_VERTEX_SHADER, SWIRL_FRAGMENT_SHADER);
+ }
+
+ private float angle = 1.0f;
+ private float radius = 0.5f;
+ private PointF center = new PointF(0.5f, 0.5f);
+
+ public void setAngle(float angle) {
+ this.angle = angle;
+ }
+
+ public void setRadius(float radius) {
+ this.radius = radius;
+ }
+
+ public void setCenter(PointF center) {
+ this.center = center;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform2f(getHandle("center"), center.x, center.y);
+ GLES20.glUniform1f(getHandle("radius"), radius);
+ GLES20.glUniform1f(getHandle("angle"), angle);
+ }
+
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlThreex3TextureSamplingFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlThreex3TextureSamplingFilter.java
new file mode 100644
index 00000000..c9247ae7
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlThreex3TextureSamplingFilter.java
@@ -0,0 +1,83 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+
+
+public class GlThreex3TextureSamplingFilter extends GlFilter {
+ private static final String THREE_X_THREE_TEXTURE_SAMPLING_VERTEX_SHADER =
+ "attribute vec4 aPosition;" +
+ "attribute vec4 aTextureCoord;" +
+
+ "uniform highp float texelWidth;" +
+ "uniform highp float texelHeight;" +
+
+ "varying highp vec2 textureCoordinate;" +
+ "varying highp vec2 leftTextureCoordinate;" +
+ "varying highp vec2 rightTextureCoordinate;" +
+
+ "varying highp vec2 topTextureCoordinate;" +
+ "varying highp vec2 topLeftTextureCoordinate;" +
+ "varying highp vec2 topRightTextureCoordinate;" +
+
+ "varying highp vec2 bottomTextureCoordinate;" +
+ "varying highp vec2 bottomLeftTextureCoordinate;" +
+ "varying highp vec2 bottomRightTextureCoordinate;" +
+
+ "void main() {" +
+ "gl_Position = aPosition;" +
+
+ "vec2 widthStep = vec2(texelWidth, 0.0);" +
+ "vec2 heightStep = vec2(0.0, texelHeight);" +
+ "vec2 widthHeightStep = vec2(texelWidth, texelHeight);" +
+ "vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);" +
+
+ "textureCoordinate = aTextureCoord.xy;" +
+ "leftTextureCoordinate = textureCoordinate - widthStep;" +
+ "rightTextureCoordinate = textureCoordinate + widthStep;" +
+
+ "topTextureCoordinate = textureCoordinate - heightStep;" +
+ "topLeftTextureCoordinate = textureCoordinate - widthHeightStep;" +
+ "topRightTextureCoordinate = textureCoordinate + widthNegativeHeightStep;" +
+
+ "bottomTextureCoordinate = textureCoordinate + heightStep;" +
+ "bottomLeftTextureCoordinate = textureCoordinate - widthNegativeHeightStep;" +
+ "bottomRightTextureCoordinate = textureCoordinate + widthHeightStep;" +
+ "}";
+
+ private float texelWidth;
+ private float texelHeight;
+
+ public GlThreex3TextureSamplingFilter(String fragmentShaderSource) {
+ super(THREE_X_THREE_TEXTURE_SAMPLING_VERTEX_SHADER, fragmentShaderSource);
+ }
+
+ public float getTexelWidth() {
+ return texelWidth;
+ }
+
+ public void setTexelWidth(float texelWidth) {
+ this.texelWidth = texelWidth;
+ }
+
+ public float getTexelHeight() {
+ return texelHeight;
+ }
+
+ public void setTexelHeight(float texelHeight) {
+ this.texelHeight = texelHeight;
+ }
+
+ @Override
+ public void setFrameSize(final int width, final int height) {
+ texelWidth = 1f / width;
+ texelHeight = 1f / height;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("texelWidth"), texelWidth);
+ GLES20.glUniform1f(getHandle("texelHeight"), texelHeight);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlToneCurveFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlToneCurveFilter.java
new file mode 100644
index 00000000..c8ff6d9a
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlToneCurveFilter.java
@@ -0,0 +1,371 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.graphics.Point;
+import android.graphics.PointF;
+import android.opengl.GLES20;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.LinkedList;
+
+
+public class GlToneCurveFilter extends GlFilter {
+
+ private final static String FRAGMENT_SHADER =
+ "precision mediump float;\n" +
+ " varying highp vec2 vTextureCoord;\n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " uniform mediump sampler2D toneCurveTexture;\n" +
+ "\n" +
+ " void main()\n" +
+ " {\n" +
+ " lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);\n" +
+ " lowp float redCurveValue = texture2D(toneCurveTexture, vec2(textureColor.r, 0.0)).r;\n" +
+ " lowp float greenCurveValue = texture2D(toneCurveTexture, vec2(textureColor.g, 0.0)).g;\n" +
+ " lowp float blueCurveValue = texture2D(toneCurveTexture, vec2(textureColor.b, 0.0)).b;\n" +
+ "\n" +
+ " gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);\n" +
+ " }";
+
+ private PointF[] rgbCompositeControlPoints;
+ private PointF[] redControlPoints;
+ private PointF[] greenControlPoints;
+ private PointF[] blueControlPoints;
+
+ private ArrayList rgbCompositeCurve;
+ private ArrayList redCurve;
+ private ArrayList greenCurve;
+ private ArrayList blueCurve;
+
+ private final LinkedList runOnDraw;
+
+ private int[] textures = new int[1];
+
+ private byte[] toneCurveByteArray;
+
+
+ public GlToneCurveFilter(InputStream input) {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ PointF[] defaultCurvePoints = new PointF[]{new PointF(0.0f, 0.0f), new PointF(0.5f, 0.5f), new PointF(1.0f, 1.0f)};
+ rgbCompositeControlPoints = defaultCurvePoints;
+ redControlPoints = defaultCurvePoints;
+ greenControlPoints = defaultCurvePoints;
+ blueControlPoints = defaultCurvePoints;
+
+ runOnDraw = new LinkedList<>();
+
+ setFromCurveFileInputStream(input);
+
+ setRgbCompositeControlPoints(rgbCompositeControlPoints);
+ setRedControlPoints(redControlPoints);
+ setGreenControlPoints(greenControlPoints);
+ setBlueControlPoints(blueControlPoints);
+
+ }
+
+ @Override
+ public void setup() {
+ super.setup();// 1
+ GLES20.glGenTextures(1, textures, 0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
+
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+
+ while (!runOnDraw.isEmpty()) {
+ runOnDraw.removeFirst().run();
+ }
+ }
+
+ @Override
+ public void onDraw() {
+
+ int offsetDepthMapTextureUniform = getHandle("toneCurveTexture");// 3
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
+
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 256 /*width*/, 1 /*height*/, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(toneCurveByteArray));
+
+ GLES20.glUniform1i(offsetDepthMapTextureUniform, 3);
+ }
+
+ private void setFromCurveFileInputStream(InputStream input) {
+ try {
+ int version = readShort(input);
+ int totalCurves = readShort(input);
+
+ ArrayList curves = new ArrayList(totalCurves);
+ float pointRate = 1.0f / 255;
+
+ for (int i = 0; i < totalCurves; i++) {
+ // 2 bytes, Count of points in the curve (short integer toAndroidFormat 2...19)
+ short pointCount = readShort(input);
+
+ PointF[] points = new PointF[pointCount];
+
+ // point count * 4
+ // Curve points. Each curve point is a pair of short integers where
+ // the first number is the output getNode (vertical coordinate on the
+ // Curves dialog graph) and the second is the input getNode. All coordinates have range 0 to 255.
+ for (int j = 0; j < pointCount; j++) {
+ short y = readShort(input);
+ short x = readShort(input);
+
+ points[j] = new PointF(x * pointRate, y * pointRate);
+ }
+
+ curves.add(points);
+ }
+ input.close();
+
+ rgbCompositeControlPoints = curves.get(0);
+ redControlPoints = curves.get(1);
+ greenControlPoints = curves.get(2);
+ blueControlPoints = curves.get(3);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private short readShort(InputStream input) throws IOException {
+ return (short) (input.read() << 8 | input.read());
+ }
+
+ private void setRgbCompositeControlPoints(PointF[] points) {
+ rgbCompositeControlPoints = points;
+ rgbCompositeCurve = createSplineCurve(rgbCompositeControlPoints);
+ updateToneCurveTexture();
+ }
+
+ private void setRedControlPoints(PointF[] points) {
+ redControlPoints = points;
+ redCurve = createSplineCurve(redControlPoints);
+ updateToneCurveTexture();
+ }
+
+ private void setGreenControlPoints(PointF[] points) {
+ greenControlPoints = points;
+ greenCurve = createSplineCurve(greenControlPoints);
+ updateToneCurveTexture();
+ }
+
+ private void setBlueControlPoints(PointF[] points) {
+ blueControlPoints = points;
+ blueCurve = createSplineCurve(blueControlPoints);
+ updateToneCurveTexture();
+ }
+
+ private void runOnDraw(final Runnable runnable) {
+ synchronized (runOnDraw) {
+ runOnDraw.addLast(runnable);
+ }
+ }
+
+ private void updateToneCurveTexture() {
+ runOnDraw(new Runnable() {
+ @Override
+ public void run() {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
+
+ if ((redCurve.size() >= 256) && (greenCurve.size() >= 256) && (blueCurve.size() >= 256) && (rgbCompositeCurve.size() >= 256)) {
+ toneCurveByteArray = new byte[256 * 4];
+ for (int currentCurveIndex = 0; currentCurveIndex < 256; currentCurveIndex++) {
+ // BGRA for upload to texture
+ toneCurveByteArray[currentCurveIndex * 4 + 2] = (byte) ((int) Math.min(Math.max(currentCurveIndex + blueCurve.get(currentCurveIndex) + rgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff);
+ toneCurveByteArray[currentCurveIndex * 4 + 1] = (byte) ((int) Math.min(Math.max(currentCurveIndex + greenCurve.get(currentCurveIndex) + rgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff);
+ toneCurveByteArray[currentCurveIndex * 4] = (byte) ((int) Math.min(Math.max(currentCurveIndex + redCurve.get(currentCurveIndex) + rgbCompositeCurve.get(currentCurveIndex), 0), 255) & 0xff);
+ toneCurveByteArray[currentCurveIndex * 4 + 3] = (byte) (255 & 0xff);
+ }
+
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 256 /*width*/, 1 /*height*/, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(toneCurveByteArray));
+ }
+// Buffer pixels!
+// GLES20.glTexImage2D(int target,
+// int level,
+// int internalformat,
+// int width,
+// int height,
+// int border,
+// int format,
+// int type,
+// java.nio.Buffer pixels);
+ }
+ });
+ }
+
+ private ArrayList createSplineCurve(PointF[] points) {
+ if (points == null || points.length <= 0) {
+ return null;
+ }
+
+ // Sort the array
+ PointF[] pointsSorted = points.clone();
+ Arrays.sort(pointsSorted, new Comparator() {
+ @Override
+ public int compare(PointF point1, PointF point2) {
+ if (point1.x < point2.x) {
+ return -1;
+ } else if (point1.x > point2.x) {
+ return 1;
+ } else {
+ return 0;
+ }
+ }
+ });
+
+ // Convert toAndroidFormat (0, 1) to (0, 255).
+ Point[] convertedPoints = new Point[pointsSorted.length];
+ for (int i = 0; i < points.length; i++) {
+ PointF point = pointsSorted[i];
+ convertedPoints[i] = new Point((int) (point.x * 255), (int) (point.y * 255));
+ }
+
+ ArrayList splinePoints = createSplineCurve2(convertedPoints);
+
+ // If we have a first point like (0.3, 0) we'll be missing some points at the beginning
+ // that should be 0.
+ Point firstSplinePoint = splinePoints.get(0);
+ if (firstSplinePoint.x > 0) {
+ for (int i = firstSplinePoint.x; i >= 0; i--) {
+ splinePoints.add(0, new Point(i, 0));
+ }
+ }
+
+ // Insert points similarly at the end, if necessary.
+ Point lastSplinePoint = splinePoints.get(splinePoints.size() - 1);
+ if (lastSplinePoint.x < 255) {
+ for (int i = lastSplinePoint.x + 1; i <= 255; i++) {
+ splinePoints.add(new Point(i, 255));
+ }
+ }
+
+ // Prepare the spline points.
+ ArrayList preparedSplinePoints = new ArrayList(splinePoints.size());
+ for (Point newPoint : splinePoints) {
+ Point origPoint = new Point(newPoint.x, newPoint.x);
+
+ float distance = (float) Math.sqrt(Math.pow((origPoint.x - newPoint.x), 2.0) + Math.pow((origPoint.y - newPoint.y), 2.0));
+
+ if (origPoint.y > newPoint.y) {
+ distance = -distance;
+ }
+
+ preparedSplinePoints.add(distance);
+ }
+
+ return preparedSplinePoints;
+ }
+
+ private ArrayList createSplineCurve2(Point[] points) {
+ ArrayList sdA = createSecondDerivative(points);
+
+ // Is [points count] equal to [sdA count]?
+// int n = [points count];
+ int n = sdA.size();
+ if (n < 1) {
+ return null;
+ }
+ double sd[] = new double[n];
+
+ // From NSMutableArray to sd[n];
+ for (int i = 0; i < n; i++) {
+ sd[i] = sdA.get(i);
+ }
+
+
+ ArrayList output = new ArrayList(n + 1);
+
+ for (int i = 0; i < n - 1; i++) {
+ Point cur = points[i];
+ Point next = points[i + 1];
+
+ for (int x = cur.x; x < next.x; x++) {
+ double t = (double) (x - cur.x) / (next.x - cur.x);
+
+ double a = 1 - t;
+ double b = t;
+ double h = next.x - cur.x;
+
+ double y = a * cur.y + b * next.y + (h * h / 6) * ((a * a * a - a) * sd[i] + (b * b * b - b) * sd[i + 1]);
+
+ if (y > 255.0) {
+ y = 255.0;
+ } else if (y < 0.0) {
+ y = 0.0;
+ }
+
+ output.add(new Point(x, (int) Math.round(y)));
+ }
+ }
+
+ // If the last point is (255, 255) it doesn't get added.
+ if (output.size() == 255) {
+ output.add(points[points.length - 1]);
+ }
+ return output;
+ }
+
+ private ArrayList createSecondDerivative(Point[] points) {
+ int n = points.length;
+ if (n <= 1) {
+ return null;
+ }
+
+ double matrix[][] = new double[n][3];
+ double result[] = new double[n];
+ matrix[0][1] = 1;
+ // What about matrix[0][1] and matrix[0][0]? Assuming 0 for now (Brad L.)
+ matrix[0][0] = 0;
+ matrix[0][2] = 0;
+
+ for (int i = 1; i < n - 1; i++) {
+ Point P1 = points[i - 1];
+ Point P2 = points[i];
+ Point P3 = points[i + 1];
+
+ matrix[i][0] = (double) (P2.x - P1.x) / 6;
+ matrix[i][1] = (double) (P3.x - P1.x) / 3;
+ matrix[i][2] = (double) (P3.x - P2.x) / 6;
+ result[i] = (double) (P3.y - P2.y) / (P3.x - P2.x) - (double) (P2.y - P1.y) / (P2.x - P1.x);
+ }
+
+ // What about result[0] and result[n-1]? Assuming 0 for now (Brad L.)
+ result[0] = 0;
+ result[n - 1] = 0;
+
+ matrix[n - 1][1] = 1;
+ // What about matrix[n-1][0] and matrix[n-1][2]? For now, assuming they are 0 (Brad L.)
+ matrix[n - 1][0] = 0;
+ matrix[n - 1][2] = 0;
+
+ // solving pass1 (up->down)
+ for (int i = 1; i < n; i++) {
+ double k = matrix[i][0] / matrix[i - 1][1];
+ matrix[i][1] -= k * matrix[i - 1][2];
+ matrix[i][0] = 0;
+ result[i] -= k * result[i - 1];
+ }
+ // solving pass2 (down->up)
+ for (int i = n - 2; i >= 0; i--) {
+ double k = matrix[i][2] / matrix[i + 1][1];
+ matrix[i][1] -= k * matrix[i + 1][0];
+ matrix[i][2] = 0;
+ result[i] -= k * result[i + 1];
+ }
+
+ ArrayList output = new ArrayList(n);
+ for (int i = 0; i < n; i++) output.add(result[i] / matrix[i][1]);
+
+ return output;
+ }
+
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlToneFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlToneFilter.java
new file mode 100644
index 00000000..52f44bd7
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlToneFilter.java
@@ -0,0 +1,85 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+
+
+public class GlToneFilter extends GlThreex3TextureSamplingFilter {
+
+ private static final String FRAGMENT_SHADER =
+ "precision highp float;\n" +
+
+ "uniform lowp sampler2D sTexture;\n" +
+
+ "varying vec2 textureCoordinate;\n" +
+ "varying vec2 leftTextureCoordinate;\n" +
+ "varying vec2 rightTextureCoordinate;\n" +
+
+ "varying vec2 topTextureCoordinate;\n" +
+ "varying vec2 topLeftTextureCoordinate;\n" +
+ "varying vec2 topRightTextureCoordinate;\n" +
+
+ "varying vec2 bottomTextureCoordinate;\n" +
+ "varying vec2 bottomLeftTextureCoordinate;\n" +
+ "varying vec2 bottomRightTextureCoordinate;\n" +
+
+// "uniform highp float intensity;" +
+ "uniform highp float threshold;" +
+ "uniform highp float quantizationLevels;" +
+
+ "const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);" +
+
+ "void main() {\n" +
+ "vec4 textureColor = texture2D(sTexture, textureCoordinate);" +
+
+ "float bottomLeftIntensity = texture2D(sTexture, bottomLeftTextureCoordinate).r;" +
+ "float topRightIntensity = texture2D(sTexture, topRightTextureCoordinate).r;" +
+ "float topLeftIntensity = texture2D(sTexture, topLeftTextureCoordinate).r;" +
+ "float bottomRightIntensity = texture2D(sTexture, bottomRightTextureCoordinate).r;" +
+ "float leftIntensity = texture2D(sTexture, leftTextureCoordinate).r;" +
+ "float rightIntensity = texture2D(sTexture, rightTextureCoordinate).r;" +
+ "float bottomIntensity = texture2D(sTexture, bottomTextureCoordinate).r;" +
+ "float topIntensity = texture2D(sTexture, topTextureCoordinate).r;" +
+ "float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;" +
+ "float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;" +
+
+ "float mag = length(vec2(h, v));" +
+ "vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;" +
+ "float thresholdTest = 1.0 - step(threshold, mag);" +
+ "gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);" +
+ "}";
+
+ private float threshold = 0.2f;
+ private float quantizationLevels = 10f;
+
+
+ public GlToneFilter() {
+ super(FRAGMENT_SHADER);
+ }
+
+ //////////////////////////////////////////////////////////////////////////
+
+ public float getThreshold() {
+ return threshold;
+ }
+
+ public void setThreshold(final float threshold) {
+ this.threshold = threshold;
+ }
+
+ public float getQuantizationLevels() {
+ return quantizationLevels;
+ }
+
+ public void setQuantizationLevels(final float quantizationLevels) {
+ this.quantizationLevels = quantizationLevels;
+ }
+
+ //////////////////////////////////////////////////////////////////////////
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("threshold"), threshold);
+ GLES20.glUniform1f(getHandle("quantizationLevels"), quantizationLevels);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlVibranceFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlVibranceFilter.java
new file mode 100644
index 00000000..459c06e0
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlVibranceFilter.java
@@ -0,0 +1,37 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlVibranceFilter extends GlFilter {
+
+ private static final String VIBRANCE_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ "\n" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " uniform lowp float vibrance;\n" +
+ "\n" +
+ "void main() {\n" +
+ " lowp vec4 color = texture2D(sTexture, vTextureCoord);\n" +
+ " lowp float average = (color.r + color.g + color.b) / 3.0;\n" +
+ " lowp float mx = max(color.r, max(color.g, color.b));\n" +
+ " lowp float amt = (mx - average) * (-vibrance * 3.0);\n" +
+ " color.rgb = mix(color.rgb, vec3(mx), amt);\n" +
+ " gl_FragColor = color;\n" +
+ "}";
+
+ public GlVibranceFilter() {
+ super(DEFAULT_VERTEX_SHADER, VIBRANCE_FRAGMENT_SHADER);
+ }
+
+ private float vibrance = 0f;
+
+ public void setVibrance(float vibrance) {
+ this.vibrance = vibrance;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("vibrance"), vibrance);
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlVignetteFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlVignetteFilter.java
new file mode 100644
index 00000000..5cac4c8c
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlVignetteFilter.java
@@ -0,0 +1,61 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+
+
+public class GlVignetteFilter extends GlFilter {
+
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;" +
+
+ "varying vec2 vTextureCoord;" +
+ "uniform lowp sampler2D sTexture;" +
+
+ "uniform lowp vec2 vignetteCenter;" +
+ "uniform highp float vignetteStart;" +
+ "uniform highp float vignetteEnd;" +
+
+ "void main() {" +
+ "lowp vec3 rgb = texture2D(sTexture, vTextureCoord).rgb;" +
+ "lowp float d = distance(vTextureCoord, vec2(vignetteCenter.x, vignetteCenter.y));" +
+ "lowp float percent = smoothstep(vignetteStart, vignetteEnd, d);" +
+ "gl_FragColor = vec4(mix(rgb.x, 0.0, percent), mix(rgb.y, 0.0, percent), mix(rgb.z, 0.0, percent), 1.0);" +
+ "}";
+
+ private float vignetteCenterX = 0.5f;
+ private float vignetteCenterY = 0.5f;
+ private float vignetteStart = 0.2f;
+ private float vignetteEnd = 0.85f;
+
+ public GlVignetteFilter() {
+ super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
+ }
+
+
+ public float getVignetteStart() {
+ return vignetteStart;
+ }
+
+ public void setVignetteStart(final float vignetteStart) {
+ this.vignetteStart = vignetteStart;
+ }
+
+ public float getVignetteEnd() {
+ return vignetteEnd;
+ }
+
+ public void setVignetteEnd(final float vignetteEnd) {
+ this.vignetteEnd = vignetteEnd;
+ }
+
+ //////////////////////////////////////////////////////////////////////////
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform2f(getHandle("vignetteCenter"), vignetteCenterX, vignetteCenterY);
+ GLES20.glUniform1f(getHandle("vignetteStart"), vignetteStart);
+ GLES20.glUniform1f(getHandle("vignetteEnd"), vignetteEnd);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlWatermarkFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlWatermarkFilter.java
new file mode 100644
index 00000000..26aeeb18
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlWatermarkFilter.java
@@ -0,0 +1,56 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Rect;
+
+public class GlWatermarkFilter extends GlOverlayFilter {
+
+ private Bitmap bitmap;
+ private Position position = Position.LEFT_TOP;
+
+ public GlWatermarkFilter(Bitmap bitmap) {
+ this.bitmap = bitmap;
+ }
+
+
+ public GlWatermarkFilter(Bitmap bitmap, Position position) {
+ this.bitmap = bitmap;
+ this.position = position;
+ }
+
+ @Override
+ protected void drawCanvas(Canvas canvas) {
+ synchronized (bitmap) {
+ canvas.drawBitmap(bitmap, null, canvas.getClipBounds(), null);
+ }
+
+ /*
+ if (bitmap != null && !bitmap.isRecycled()) {
+ switch (position) {
+ case LEFT_TOP:
+ // canvas.drawBitmap(bitmap, 0, 0, null);
+ canvas.drawBitmap(bitmap, null, canvas.getClipBounds(), null);
+ break;
+ case LEFT_BOTTOM:
+ canvas.drawBitmap(bitmap, 0, canvas.getHeight() - bitmap.getHeight(), null);
+ break;
+ case RIGHT_TOP:
+ canvas.drawBitmap(bitmap, canvas.getWidth() - bitmap.getWidth(), 0, null);
+ break;
+ case RIGHT_BOTTOM:
+ canvas.drawBitmap(bitmap, canvas.getWidth() - bitmap.getWidth(), canvas.getHeight() - bitmap.getHeight(), null);
+ break;
+ }
+ }
+
+ */
+ }
+
+ public enum Position {
+ LEFT_TOP,
+ LEFT_BOTTOM,
+ RIGHT_TOP,
+ RIGHT_BOTTOM
+ }
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlWeakPixelInclusionFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlWeakPixelInclusionFilter.java
new file mode 100644
index 00000000..f9fcc238
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlWeakPixelInclusionFilter.java
@@ -0,0 +1,46 @@
+package com.xypower.gpuv.egl.filter;
+
+
+
+public class GlWeakPixelInclusionFilter extends GlThreex3TextureSamplingFilter {
+
+ private static final String FRAGMENT_SHADER =
+ "precision lowp float;\n" +
+
+ "uniform lowp sampler2D sTexture;\n" +
+
+ "varying vec2 textureCoordinate;\n" +
+ "varying vec2 leftTextureCoordinate;\n" +
+ "varying vec2 rightTextureCoordinate;\n" +
+
+ "varying vec2 topTextureCoordinate;\n" +
+ "varying vec2 topLeftTextureCoordinate;\n" +
+ "varying vec2 topRightTextureCoordinate;\n" +
+
+ "varying vec2 bottomTextureCoordinate;\n" +
+ "varying vec2 bottomLeftTextureCoordinate;\n" +
+ "varying vec2 bottomRightTextureCoordinate;\n" +
+
+ "void main() {\n" +
+ "float bottomLeftIntensity = texture2D(sTexture, bottomLeftTextureCoordinate).r;" +
+ "float topRightIntensity = texture2D(sTexture, topRightTextureCoordinate).r;" +
+ "float topLeftIntensity = texture2D(sTexture, topLeftTextureCoordinate).r;" +
+ "float bottomRightIntensity = texture2D(sTexture, bottomRightTextureCoordinate).r;" +
+ "float leftIntensity = texture2D(sTexture, leftTextureCoordinate).r;" +
+ "float rightIntensity = texture2D(sTexture, rightTextureCoordinate).r;" +
+ "float bottomIntensity = texture2D(sTexture, bottomTextureCoordinate).r;" +
+ "float topIntensity = texture2D(sTexture, topTextureCoordinate).r;" +
+ "float centerIntensity = texture2D(sTexture, textureCoordinate).r;" +
+
+ "float pixelIntensitySum = bottomLeftIntensity + topRightIntensity + topLeftIntensity + bottomRightIntensity + leftIntensity + rightIntensity + bottomIntensity + topIntensity + centerIntensity;" +
+ "float sumTest = step(1.5, pixelIntensitySum);" +
+ "float pixelTest = step(0.01, centerIntensity);" +
+
+ "gl_FragColor = vec4(vec3(sumTest * pixelTest), 1.0);" +
+ "}";
+
+ public GlWeakPixelInclusionFilter() {
+ super(FRAGMENT_SHADER);
+ }
+
+}
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlWhiteBalanceFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlWhiteBalanceFilter.java
new file mode 100644
index 00000000..c7089239
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlWhiteBalanceFilter.java
@@ -0,0 +1,58 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.opengl.GLES20;
+
+public class GlWhiteBalanceFilter extends GlFilter {
+
+ private static final String WHITE_BALANCE_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " uniform lowp sampler2D sTexture;\n" +
+ " varying vec2 vTextureCoord;\n" +
+
+ " \n" +
+ "uniform lowp float temperature;\n" +
+ "uniform lowp float tint;\n" +
+ "\n" +
+ "const lowp vec3 warmFilter = vec3(0.93, 0.54, 0.0);\n" +
+ "\n" +
+ "const mediump mat3 RGBtoYIQ = mat3(0.299, 0.587, 0.114, 0.596, -0.274, -0.322, 0.212, -0.523, 0.311);\n" +
+ "const mediump mat3 YIQtoRGB = mat3(1.0, 0.956, 0.621, 1.0, -0.272, -0.647, 1.0, -1.105, 1.702);\n" +
+ "\n" +
+ "void main()\n" +
+ "{\n" +
+ " lowp vec4 source = texture2D(sTexture, vTextureCoord);\n" +
+ " \n" +
+ " mediump vec3 yiq = RGBtoYIQ * source.rgb; //adjusting tint\n" +
+ " yiq.b = clamp(yiq.b + tint*0.5226*0.1, -0.5226, 0.5226);\n" +
+ " lowp vec3 rgb = YIQtoRGB * yiq;\n" +
+ "\n" +
+ " lowp vec3 processed = vec3(\n" +
+ " (rgb.r < 0.5 ? (2.0 * rgb.r * warmFilter.r) : (1.0 - 2.0 * (1.0 - rgb.r) * (1.0 - warmFilter.r))), //adjusting temperature\n" +
+ " (rgb.g < 0.5 ? (2.0 * rgb.g * warmFilter.g) : (1.0 - 2.0 * (1.0 - rgb.g) * (1.0 - warmFilter.g))), \n" +
+ " (rgb.b < 0.5 ? (2.0 * rgb.b * warmFilter.b) : (1.0 - 2.0 * (1.0 - rgb.b) * (1.0 - warmFilter.b))));\n" +
+ "\n" +
+ " gl_FragColor = vec4(mix(rgb, processed, temperature), source.a);\n" +
+ "}";
+
+ public GlWhiteBalanceFilter() {
+ super(DEFAULT_VERTEX_SHADER, WHITE_BALANCE_FRAGMENT_SHADER);
+ }
+
+ private float temperature = 5000f;
+ private float tint = 0f;
+
+ public void setTemperature(final float temperature) {
+ this.temperature = temperature < 5000 ? (float) (0.0004 * (temperature - 5000.0)) : (float) (0.00006 * (temperature - 5000.0));
+ }
+
+ public void setTint(final float tint) {
+ this.tint = (float) (tint / 100.0);
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform1f(getHandle("temperature"), temperature);
+ GLES20.glUniform1f(getHandle("tint"), tint);
+ }
+
+}
\ No newline at end of file
diff --git a/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlZoomBlurFilter.java b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlZoomBlurFilter.java
new file mode 100644
index 00000000..69489407
--- /dev/null
+++ b/gpuv/src/main/java/com/xypower/gpuv/egl/filter/GlZoomBlurFilter.java
@@ -0,0 +1,56 @@
+package com.xypower.gpuv.egl.filter;
+
+import android.graphics.PointF;
+import android.opengl.GLES20;
+
+public class GlZoomBlurFilter extends GlFilter {
+
+ private static final String ZOOM_BLUR_FRAGMENT_SHADER = "" +
+ "precision mediump float;" +
+ " varying vec2 vTextureCoord;\n" +
+ "\n" +
+ "uniform lowp sampler2D sTexture;\n" +
+ "\n" +
+ "uniform highp vec2 blurCenter;\n" +
+ "uniform highp float blurSize;\n" +
+ "\n" +
+ "void main()\n" +
+ "{\n" +
+ " // TODO: Do a more intelligent scaling based on resolution here\n" +
+ " highp vec2 samplingOffset = 1.0/100.0 * (blurCenter - vTextureCoord) * blurSize;\n" +
+ " \n" +
+ " lowp vec4 fragmentColor = texture2D(sTexture, vTextureCoord) * 0.18;\n" +
+ " fragmentColor += texture2D(sTexture, vTextureCoord + samplingOffset) * 0.15;\n" +
+ " fragmentColor += texture2D(sTexture, vTextureCoord + (2.0 * samplingOffset)) * 0.12;\n" +
+ " fragmentColor += texture2D(sTexture, vTextureCoord + (3.0 * samplingOffset)) * 0.09;\n" +
+ " fragmentColor += texture2D(sTexture, vTextureCoord + (4.0 * samplingOffset)) * 0.05;\n" +
+ " fragmentColor += texture2D(sTexture, vTextureCoord - samplingOffset) * 0.15;\n" +
+ " fragmentColor += texture2D(sTexture, vTextureCoord - (2.0 * samplingOffset)) * 0.12;\n" +
+ " fragmentColor += texture2D(sTexture, vTextureCoord - (3.0 * samplingOffset)) * 0.09;\n" +
+ " fragmentColor += texture2D(sTexture, vTextureCoord - (4.0 * samplingOffset)) * 0.05;\n" +
+ " \n" +
+ " gl_FragColor = fragmentColor;\n" +
+ "}\n";
+
+ private PointF blurCenter = new PointF(0.5f, 0.5f);
+ private float blurSize = 1f;
+
+ public GlZoomBlurFilter() {
+ super(DEFAULT_VERTEX_SHADER, ZOOM_BLUR_FRAGMENT_SHADER);
+ }
+
+ public void setBlurCenter(PointF blurCenter) {
+ this.blurCenter = blurCenter;
+ }
+
+ public void setBlurSize(float blurSize) {
+ this.blurSize = blurSize;
+ }
+
+ @Override
+ public void onDraw() {
+ GLES20.glUniform2f(getHandle("blurCenter"), blurCenter.x, blurCenter.y);
+ GLES20.glUniform1f(getHandle("blurSize"), blurSize);
+ }
+
+}
diff --git a/gpuv/src/main/res/values/strings.xml b/gpuv/src/main/res/values/strings.xml
new file mode 100644
index 00000000..50ba2e68
--- /dev/null
+++ b/gpuv/src/main/res/values/strings.xml
@@ -0,0 +1,3 @@
+
+ gpuv
+
diff --git a/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java b/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java
index dc7c367e..75965a94 100644
--- a/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java
+++ b/mpmaster/src/main/java/com/xypower/mpmaster/MpMasterService.java
@@ -294,8 +294,7 @@ public class MpMasterService extends Service {
}
private void loadIccid() {
- if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP_MR1)
- {
+ if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
try {
SubscriptionManager sm = SubscriptionManager.from(this);
List sis = sm.getActiveSubscriptionInfoList();
diff --git a/settings.gradle b/settings.gradle
index 1e222e1f..001b34d7 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -15,6 +15,7 @@ dependencyResolutionManagement {
rootProject.name = "MicroPhoto"
include ':app'
include ':mpmaster'
+include ':gpuv'
// include ':opencv'
// project(':opencv').projectDir = new File(opencvsdk + '/sdk')