diff --git a/samples/RecordActivity.java b/samples/RecordActivity.java index 3ebf42a5..9ab70841 100644 --- a/samples/RecordActivity.java +++ b/samples/RecordActivity.java @@ -80,7 +80,7 @@ import android.view.View.OnClickListener; import android.view.WindowManager; import android.widget.Button; -import android.widget.LinearLayout; +import android.widget.LinearLayout; import android.widget.RelativeLayout; import java.io.IOException; @@ -90,8 +90,11 @@ import java.util.Comparator; import java.util.List; -import org.bytedeco.javacv.Frame; +import org.bytedeco.javacpp.avutil; +import org.bytedeco.javacv.FFmpegFrameFilter; import org.bytedeco.javacv.FFmpegFrameRecorder; +import org.bytedeco.javacv.Frame; +import org.bytedeco.javacv.FrameFilter; public class RecordActivity extends Activity implements OnClickListener { @@ -107,6 +110,11 @@ public class RecordActivity extends Activity implements OnClickListener { private boolean isPreviewOn = false; + /*Filter information, change boolean to true if adding a fitler*/ + private boolean addFilter = false; + private String filterString = ""; + FFmpegFrameFilter filter; + private int sampleAudioRateInHz = 44100; private int imageWidth = 320; private int imageHeight = 240; @@ -114,8 +122,7 @@ public class RecordActivity extends Activity implements OnClickListener { /* audio data getting thread */ private AudioRecord audioRecord; - private AudioRecordRunnable audioRecordRunnable; - private Thread audioThread; + private AudioRecordRunnable audioRecordRunnable; private Thread audioThread; volatile boolean runAudioThread = true; /* video data getting thread */ @@ -177,8 +184,8 @@ private void initLayout() { Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); screenWidth = display.getWidth(); screenHeight = display.getHeight(); - RelativeLayout.LayoutParams layoutParam = null; - LayoutInflater myInflate = null; + RelativeLayout.LayoutParams layoutParam = null; + LayoutInflater myInflate = null; myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); RelativeLayout topLayout = new RelativeLayout(this); setContentView(topLayout); @@ -363,9 +370,9 @@ public void run() { ShortBuffer audioData; int bufferReadResult; - bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, + bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); - audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, + audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); if (RECORD_LENGTH > 0) { @@ -472,7 +479,7 @@ public int compare(final Camera.Size a, final Camera.Size b) { camParams.setPreviewSize(imageWidth, imageHeight); Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate); - + camParams.setPreviewFrameRate(frameRate); Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate()); @@ -523,6 +530,8 @@ public void onPreviewFrame(byte[] data, Camera camera) { yuvImage = images[i]; timestamps[i] = 1000 * (System.currentTimeMillis() - startTime); } + + /* get video data */ if (yuvImage != null && recording) { ((ByteBuffer)yuvImage.image[0].position(0)).put(data); @@ -533,8 +542,30 @@ public void onPreviewFrame(byte[] data, Camera camera) { if (t > recorder.getTimestamp()) { recorder.setTimestamp(t); } - recorder.record(yuvImage); - } catch (FFmpegFrameRecorder.Exception e) { + + // Add a filter for each of your frames. the filterString is any + // ffmpeg filter. Here is the link for a list: https://ffmpeg.org/ffmpeg-filters.html + if(addFilter) { + + filterString = "transpose=0"; + filter = new FFmpegFrameFilter(filterString, imageWidth, imageHeight); + + //default format on android + filter.setPixelFormat(avutil.AV_PIX_FMT_NV21); + + //Must be initalized before it is applied to every frame + filter.start(); + + + filter.push(yuvImage); + Frame frame2; + while ((frame2 = filter.pull()) != null) { + recorder.record(frame2); + } + } else { + recorder.record(yuvImage); + } + } catch (FFmpegFrameRecorder.Exception | FrameFilter.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); }