diff --git a/build.gradle b/build.gradle
index 1a53806b..83aad61a 100644
--- a/build.gradle
+++ b/build.gradle
@@ -69,7 +69,7 @@ android {
defaultConfig {
applicationId "org.havenapp.main"
- versionCode 113
+ versionCode 116
versionName getVersionName()
archivesBaseName = "Haven-$versionName"
minSdkVersion 16
@@ -80,8 +80,15 @@ android {
}
multiDexEnabled true
vectorDrawables.useSupportLibrary = true
+ renderscriptTargetApi 16
+ renderscriptSupportModeEnabled true
+
+ ndk {
+ abiFilters "armeabi", "armeabi-v7a", "x86"
+ }
}
+
buildTypes {
release {
minifyEnabled false
@@ -100,20 +107,20 @@ android {
}
dependencies {
+ implementation 'com.android.support:support-v4:27.1.1'
implementation 'com.android.support:appcompat-v7:27.1.1'
implementation 'com.android.support:design:27.1.1'
implementation 'com.android.support:cardview-v7:27.1.1'
- implementation 'com.android.support.constraint:constraint-layout:1.1.0'
- implementation 'com.github.guardianproject:signal-cli-android:-SNAPSHOT'
+ implementation 'com.android.support.constraint:constraint-layout:1.1.2'
+ implementation 'com.github.guardianproject:signal-cli-android:v0.6.0-android-beta-1'
implementation 'com.github.satyan:sugar:1.5'
- implementation 'com.squareup.picasso:picasso:2.71828'
implementation 'net.the4thdimension:audio-wife:1.0.3'
implementation 'com.github.apl-devs:appintro:v4.2.3'
implementation 'info.guardianproject.netcipher:netcipher:2.0.0-alpha1'
implementation 'com.nanohttpd:nanohttpd-webserver:2.2.0'
implementation 'me.angrybyte.picker:picker:1.3.1'
implementation 'com.github.stfalcon:frescoimageviewer:0.5.0'
- implementation 'com.facebook.fresco:fresco:1.8.1'
+ implementation 'com.facebook.fresco:fresco:1.9.0'
implementation 'com.github.derlio.waveform:library:1.0.1'
implementation 'org.firezenk:audiowaves:1.1@aar'
implementation 'com.maxproj.simplewaveform:app:1.0.0'
@@ -123,5 +130,9 @@ dependencies {
implementation('com.mikepenz:aboutlibraries:6.0.2@aar') {
transitive = true
}
+ implementation 'com.asksira.android:cameraviewplus:0.9.4'
implementation 'com.github.halilozercan:BetterVideoPlayer:1.1.0'
+ implementation 'io.github.silvaren:easyrs:0.5.3'
+ implementation 'org.jcodec:jcodec:0.2.3'
+ implementation 'org.jcodec:jcodec-android:0.2.3'
}
diff --git a/src/main/AndroidManifest.xml b/src/main/AndroidManifest.xml
index 58f6ee31..ecab15ba 100644
--- a/src/main/AndroidManifest.xml
+++ b/src/main/AndroidManifest.xml
@@ -64,9 +64,14 @@
android:screenOrientation="portrait" />
+ android:screenOrientation="portrait"
+ android:launchMode="singleTop"
+ android:resizeableActivity="true"
+ android:supportsPictureInPicture="true"
+ android:configChanges=
+ "screenSize|smallestScreenSize|screenLayout|orientation"
+ />
@@ -111,6 +117,17 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/src/main/java/org/havenapp/main/HavenApp.java b/src/main/java/org/havenapp/main/HavenApp.java
index 75dea58d..69ae819d 100644
--- a/src/main/java/org/havenapp/main/HavenApp.java
+++ b/src/main/java/org/havenapp/main/HavenApp.java
@@ -23,6 +23,8 @@
import android.util.Log;
import com.facebook.drawee.backends.pipeline.Fresco;
+import com.facebook.imagepipeline.core.ImagePipelineConfig;
+import com.facebook.imagepipeline.decoder.SimpleProgressiveJpegConfig;
import com.orm.SugarContext;
import java.io.IOException;
@@ -44,15 +46,22 @@ public class HavenApp extends MultiDexApplication {
public void onCreate() {
super.onCreate();
+ SugarContext.init(this);
+
mPrefs = new PreferenceManager(this);
- Fresco.initialize(this);
- SugarContext.init(this);
+ ImagePipelineConfig config = ImagePipelineConfig.newBuilder(this)
+ .setProgressiveJpegConfig(new SimpleProgressiveJpegConfig())
+ .setResizeAndRotateEnabledForNetwork(true)
+ .setDownsampleEnabled(true)
+ .build();
+
+ Fresco.initialize(this,config);
+
AppCompatDelegate.setCompatVectorFromResourcesEnabled(true);
if (mPrefs.getRemoteAccessActive())
startServer();
-
}
@@ -60,10 +69,12 @@ public void startServer ()
{
if (mOnionServer == null || (!mOnionServer.isAlive()))
{
- try {
- mOnionServer = new WebServer(this, mPrefs.getRemoteAccessCredential());
- } catch (IOException ioe) {
- Log.e("OnioNServer", "unable to start onion server", ioe);
+ if ( mPrefs.getRemoteAccessCredential() != null) {
+ try {
+ mOnionServer = new WebServer(this, mPrefs.getRemoteAccessCredential());
+ } catch (IOException ioe) {
+ Log.e("OnioNServer", "unable to start onion server", ioe);
+ }
}
}
}
diff --git a/src/main/java/org/havenapp/main/ListActivity.java b/src/main/java/org/havenapp/main/ListActivity.java
index 0a136d09..bc9bb873 100644
--- a/src/main/java/org/havenapp/main/ListActivity.java
+++ b/src/main/java/org/havenapp/main/ListActivity.java
@@ -178,30 +178,19 @@ public void onItemClick(View view, int position) {
private void deleteEvent (final Event event, final int position)
{
- final Runnable runnableDelete = new Runnable ()
- {
- public void run ()
- {
- event.delete();
- }
- };
-
- handler.postDelayed(runnableDelete,3000);
+ final Runnable runnableDelete = () -> event.delete();
+ handler.postDelayed(runnableDelete,5000);
events.remove(position);
adapter.notifyItemRemoved(position);
- event.delete();
-
Snackbar.make(recyclerView, getString(R.string.event_deleted), Snackbar.LENGTH_SHORT)
- .setAction(getString(R.string.undo), new View.OnClickListener() {
- @Override
- public void onClick(View v) {
- handler.removeCallbacks(runnableDelete);
- event.save();
- events.add(position, event);
- adapter.notifyItemInserted(position);
- }
+ .setAction(getString(R.string.undo), v -> {
+ handler.removeCallbacks(runnableDelete);
+ event.save();
+ events.add(position, event);
+ adapter.notifyItemInserted(position);
+ recyclerView.scrollToPosition(position);
})
.show();
}
@@ -343,16 +332,13 @@ public void run ()
handler.postDelayed(runnableDelete, 3000);
Snackbar.make(recyclerView, getString(R.string.events_deleted), Snackbar.LENGTH_SHORT)
- .setAction(getString(R.string.undo), new View.OnClickListener() {
- @Override
- public void onClick(View v) {
- handler.removeCallbacks(runnableDelete);
-
- for (Event event : removedEvents) {
- event.save();
- events.add(event);
- adapter.notifyItemInserted(events.size() - 1);
- }
+ .setAction(getString(R.string.undo), v -> {
+ handler.removeCallbacks(runnableDelete);
+
+ for (Event event : removedEvents) {
+ event.save();
+ events.add(event);
+ adapter.notifyItemInserted(events.size() - 1);
}
})
.show();
diff --git a/src/main/java/org/havenapp/main/MonitorActivity.java b/src/main/java/org/havenapp/main/MonitorActivity.java
index a454169d..39a359b0 100644
--- a/src/main/java/org/havenapp/main/MonitorActivity.java
+++ b/src/main/java/org/havenapp/main/MonitorActivity.java
@@ -19,6 +19,8 @@
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
+import android.content.res.Configuration;
+import android.os.Build;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.os.Environment;
@@ -186,10 +188,13 @@ private void updateTimerValue(int val) {
private void doCancel() {
+ boolean wasTimer = false;
+
if (cTimer != null) {
cTimer.cancel();
cTimer = null;
mOnTimerTicking = false;
+ wasTimer = true;
}
if (mIsMonitoring) {
@@ -205,10 +210,25 @@ private void doCancel() {
int timeM = preferences.getTimerDelay() * 1000;
txtTimer.setText(getTimerText(timeM));
+
+ if (!wasTimer)
+ finish();
}
}
+ @Override
+ public void onPictureInPictureModeChanged (boolean isInPictureInPictureMode, Configuration newConfig) {
+ if (isInPictureInPictureMode) {
+ // Hide the full-screen UI (controls, etc.) while in picture-in-picture mode.
+ findViewById(R.id.buttonBar).setVisibility(View.GONE);
+ } else {
+ // Restore the full-screen UI.
+ findViewById(R.id.buttonBar).setVisibility(View.VISIBLE);
+
+ }
+ }
+
private void showSettings() {
Intent i = new Intent(this, SettingsActivity.class);
@@ -233,7 +253,7 @@ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
}
else if (requestCode == REQUEST_CAMERA)
{
- mFragmentCamera.resetCamera();
+ mFragmentCamera.initCamera();
}
}
@@ -288,21 +308,36 @@ private void initMonitor() {
}
- /**
- * Closes the monitor activity and unset session properties
- */
- private void close() {
-
- finish();
+ @Override
+ public void onUserLeaveHint () {
+ if (mIsMonitoring) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ enterPictureInPictureMode();
+ }
+ }
}
-
/**
* When user closes the activity
*/
@Override
public void onBackPressed() {
- close();
+
+ if (mIsMonitoring) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
+ enterPictureInPictureMode();
+ }
+ else
+ {
+ finish();
+ }
+ }
+ else
+ {
+ finish();
+ }
+
+
}
private void showTimeDelayDialog() {
diff --git a/src/main/java/org/havenapp/main/sensors/media/MotionAsyncTask.java b/src/main/java/org/havenapp/main/sensors/media/MotionAsyncTask.java
deleted file mode 100644
index c8773244..00000000
--- a/src/main/java/org/havenapp/main/sensors/media/MotionAsyncTask.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (c) 2013-2015 Marco Ziccardi, Luca Bonato
- * Licensed under the MIT license.
- */
-
-
-package org.havenapp.main.sensors.media;
-
-
-import android.graphics.Bitmap;
-import android.graphics.BitmapFactory;
-import android.graphics.Color;
-import android.graphics.ImageFormat;
-import android.graphics.Matrix;
-import android.graphics.Rect;
-import android.graphics.YuvImage;
-import android.os.Handler;
-import android.util.Log;
-
-import org.havenapp.main.sensors.motion.IMotionDetector;
-import org.havenapp.main.sensors.motion.LuminanceMotionDetector;
-
-import java.io.ByteArrayOutputStream;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Task doing all image processing in backgrounds,
- * has a collection of listeners to notify in after having processed
- * the image
- * @author marco
- *
- */
-public class MotionAsyncTask extends Thread {
-
- // Input data
-
- private List listeners = new ArrayList<>();
- private byte[] rawOldPic;
- private byte[] rawNewPic;
- private int width;
- private int height;
- private Handler handler;
- private int motionSensitivity;
-
- // Output data
-
- private Bitmap lastBitmap;
- private Bitmap newBitmap;
- private Bitmap rawBitmap;
- private boolean hasChanged;
-
- private IMotionDetector detector;
-
- public interface MotionListener {
- public void onProcess(Bitmap oldBitmap,
- Bitmap newBitmap,
- Bitmap rawBitmap,
- boolean motionDetected);
- }
-
- public void addListener(MotionListener listener) {
- listeners.add(listener);
- }
-
- public MotionAsyncTask(
- byte[] rawOldPic,
- byte[] rawNewPic,
- int width,
- int height,
- Handler updateHandler,
- int motionSensitivity) {
- this.rawOldPic = rawOldPic;
- this.rawNewPic = rawNewPic;
- this.width = width;
- this.height = height;
- this.handler = updateHandler;
- this.motionSensitivity = motionSensitivity;
-
- }
-
- public void setMotionSensitivity (int motionSensitivity)
- {
- this.motionSensitivity = motionSensitivity;
- detector.setThreshold(motionSensitivity);
- }
-
- @Override
- public void run() {
- int[] newPicLuma = ImageCodec.N21toLuma(rawNewPic, width, height);
- if (rawOldPic == null) {
- newBitmap = ImageCodec.lumaToBitmapGreyscale(newPicLuma, width, height);
- lastBitmap = newBitmap;
- } else {
- int[] oldPicLuma = ImageCodec.N21toLuma(rawOldPic, width, height);
- detector = new LuminanceMotionDetector();
- detector.setThreshold(motionSensitivity);
- List changedPixels =
- detector.detectMotion(oldPicLuma, newPicLuma, width, height);
- hasChanged = false;
-
- int[] newPic = ImageCodec.lumaToGreyscale(newPicLuma, width, height);
- if (changedPixels != null) {
- hasChanged = true;
- for (int changedPixel : changedPixels) {
- newPic[changedPixel] = Color.YELLOW;
- }
- }
-
- lastBitmap = ImageCodec.lumaToBitmapGreyscale(oldPicLuma, width, height);
- newBitmap = Bitmap.createBitmap(newPic, width, height, Bitmap.Config.RGB_565);
-
- if (hasChanged) {
- YuvImage image = new YuvImage(rawNewPic, ImageFormat.NV21, width, height, null);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- image.compressToJpeg(
- new Rect(0, 0, image.getWidth(), image.getHeight()), 90,
- baos);
-
- byte[] imageBytes = baos.toByteArray();
- rawBitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
- // Setting post rotate to 90
- Matrix mtx = new Matrix();
- mtx.postRotate(-90);
- // Rotating Bitmap
- rawBitmap = Bitmap.createBitmap(rawBitmap, 0, 0, width, height, mtx, true);
- }
- else
- {
- rawBitmap = null;
- }
- }
-
- Log.i("MotionAsyncTask", "Finished processing, sending results");
- handler.post(new Runnable() {
-
- public void run() {
- for (MotionListener listener : listeners) {
- Log.i("MotionAsyncTask", "Updating back view");
- listener.onProcess(
- lastBitmap,
- newBitmap,
- rawBitmap,
- hasChanged);
- }
-
- }
- });
- }
-
-
-}
diff --git a/src/main/java/org/havenapp/main/sensors/media/VideoEncoder.java b/src/main/java/org/havenapp/main/sensors/media/VideoEncoder.java
new file mode 100644
index 00000000..b88854cc
--- /dev/null
+++ b/src/main/java/org/havenapp/main/sensors/media/VideoEncoder.java
@@ -0,0 +1,488 @@
+package org.havenapp.main.sensors.media;
+
+
+import android.annotation.SuppressLint;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.os.Build;
+import android.support.annotation.RequiresApi;
+import android.util.Log;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+/**
+ * Generates a series of video frames, encodes them, decodes them, and tests for
+ * significant divergence from the original.
+ */
+public class VideoEncoder {
+
+ private static final String TAG = "EncodeDecode";
+ private static final boolean VERBOSE = false; // lots of logging
+ // parameters for the encoder
+ private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video
+ // Coding
+ private static final int FRAME_RATE = 10; // 10fps
+ private static final int IFRAME_INTERVAL = 10; // 10 seconds between
+ // I-frames
+ // size of a frame, in pixels
+ private int mWidth = -1;
+ private int mHeight = -1;
+ // bit rate, in bits per second
+ private int mBitRate = -1;
+ // largest color component delta seen (i.e. actual vs. expected)
+ private int mLargestColorDelta;
+
+ private File outputFile = null;
+ private MediaCodec mEncoder;
+ private MediaMuxer mMuxer;
+ private int mTrackIndex;
+ private boolean mMuxerStarted;
+ private ArrayList frames;
+
+ public VideoEncoder(ArrayList frames, File outputFile)
+ {
+ this.frames = frames;
+ this.outputFile = outputFile;
+ }
+
+ /**
+ * Tests streaming of AVC video through the encoder and decoder. Data is
+ * encoded from a series of byte[] buffers and decoded into Surfaces. The
+ * output is checked for validity.
+ */
+ @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
+ public boolean encodeDecodeVideoFromBufferToSurface(int width, int height,
+ int bitRate) throws Throwable
+ {
+ setParameters(width, height, bitRate);
+ return encodeDecodeVideoFromBuffer();
+ }
+
+ /**
+ * Sets the desired frame size and bit rate.
+ */
+ private void setParameters(int width, int height, int bitRate)
+ {
+ if ((width % 16) != 0 || (height % 16) != 0)
+ {
+ Log.w(TAG, "WARNING: width or height not multiple of 16");
+ }
+ mWidth = width;
+ mHeight = height;
+ mBitRate = bitRate;
+ }
+
+ /**
+ * Tests encoding and subsequently decoding video from frames generated into
+ * a buffer.
+ */
+ @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
+ @SuppressLint("InlinedApi")
+ public boolean encodeDecodeVideoFromBuffer()
+ throws Exception
+ {
+ mLargestColorDelta = -1;
+ boolean result = true;
+ try
+ {
+ MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
+ if (codecInfo == null)
+ {
+ // Don't fail CTS if they don't have an AVC codec
+ Log.e(TAG, "Unable to find an appropriate codec for "
+ + MIME_TYPE);
+ return false;
+ }
+ if (VERBOSE)
+ Log.d(TAG, "found codec: " + codecInfo.getName());
+ int colorFormat;
+ try
+ {
+ colorFormat = selectColorFormat(codecInfo, MIME_TYPE);
+ } catch (Exception e)
+ {
+ colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
+ }
+ if (VERBOSE)
+ Log.d(TAG, "found colorFormat: " + colorFormat);
+ // We avoid the device-specific limitations on width and height by
+ // using values that
+ // are multiples of 16, which all tested devices seem to be able to
+ // handle.
+ MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE,
+ mWidth, mHeight);
+ // Set some properties. Failing to specify some of these can cause
+ // the MediaCodec
+ // configure() call to throw an unhelpful exception.
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
+ if (VERBOSE)
+ Log.d(TAG, "format: " + format);
+ // Create a MediaCodec for the desired codec, then configure it as
+ // an encoder with
+ // our desired properties.
+ mEncoder = MediaCodec.createByCodecName(codecInfo.getName());
+ mEncoder.configure(format, null, null,
+ MediaCodec.CONFIGURE_FLAG_ENCODE);
+ mEncoder.start();
+ // Create a MediaCodec for the decoder, just based on the MIME type.
+ // The various
+ // format details will be passed through the csd-0 meta-data later
+ // on.
+ String outputPath = outputFile.getAbsolutePath();
+ try
+ {
+ mMuxer = new MediaMuxer(outputPath,
+ MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+ } catch (IOException ioe)
+ {
+ // throw new RuntimeException("MediaMuxer creation failed",
+ // ioe);
+ ioe.printStackTrace();
+ }
+ result = doEncodeDecodeVideoFromBuffer(mEncoder, colorFormat);
+ } finally
+ {
+ if (mEncoder != null)
+ {
+ mEncoder.stop();
+ mEncoder.release();
+ }
+ if (mMuxer != null)
+ {
+ mMuxer.stop();
+ mMuxer.release();
+ }
+ if (VERBOSE)
+ Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
+ }
+ return result;
+ }
+
+ /**
+ * Returns the first codec capable of encoding the specified MIME type, or
+ * null if no match was found.
+ */
+ private static MediaCodecInfo selectCodec(String mimeType)
+ {
+ int numCodecs = MediaCodecList.getCodecCount();
+ for (int i = 0; i < numCodecs; i++)
+ {
+ MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
+ if (!codecInfo.isEncoder())
+ {
+ continue;
+ }
+ String[] types = codecInfo.getSupportedTypes();
+ for (int j = 0; j < types.length; j++)
+ {
+ if (types[j].equalsIgnoreCase(mimeType))
+ {
+ return codecInfo;
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Returns a color format that is supported by the codec and by this test
+ * code. If no match is found, this throws a test failure -- the set of
+ * formats known to the test should be expanded for new platforms.
+ */
+ private static int selectColorFormat(MediaCodecInfo codecInfo,
+ String mimeType)
+ {
+ MediaCodecInfo.CodecCapabilities capabilities = codecInfo
+ .getCapabilitiesForType(mimeType);
+ for (int i = 0; i < capabilities.colorFormats.length; i++)
+ {
+ int colorFormat = capabilities.colorFormats[i];
+ if (isRecognizedFormat(colorFormat))
+ {
+ return colorFormat;
+ }
+ }
+ return 0; // not reached
+ }
+
+ /**
+ * Returns true if this is a color format that this test code understands
+ * (i.e. we know how to read and generate frames in this format).
+ */
+ private static boolean isRecognizedFormat(int colorFormat)
+ {
+ switch (colorFormat)
+ {
+ // these are the formats we know how to handle for
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * Does the actual work for encoding frames from buffers of byte[].
+ */
+ @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
+ @SuppressLint("InlinedApi")
+ private boolean doEncodeDecodeVideoFromBuffer(MediaCodec encoder,
+ int encoderColorFormat)
+ {
+ final int TIMEOUT_USEC = 10000;
+ ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers();
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ int generateIndex = 0;
+ // yuv format
+ byte[] frameData = new byte[mWidth * mHeight * 3 / 2];
+ // Loop until the output side is done.
+ boolean inputDone = false;
+ // If we're not done submitting frames, generate a new one and submit
+ // it. By
+ // doing this on every loop we're working to ensure that the encoder
+ // always has
+ // work to do.
+ while (!inputDone)
+ {
+ int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
+ if (inputBufIndex >= 0)
+ {
+ long ptsUsec = computePresentationTime(generateIndex);
+ if (generateIndex >= frames.size())
+ {
+ // Send an empty frame with the end-of-stream flag set. If
+ // we set EOS
+ // on a frame with data, that frame data will be ignored,
+ // and the
+ // output will be short one frame.
+ encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec,
+ MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+ inputDone = true;
+ drainEncoder(true, info);
+ } else
+ {
+ try
+ {
+ generateFrame(generateIndex, encoderColorFormat,
+ frameData);
+ } catch (Exception e)
+ {
+ Log.i(TAG, "meet a different type of image");
+ Arrays.fill(frameData, (byte) 0);
+ }
+ if (VERBOSE)
+ Log.i(TAG, "generateIndex: " + generateIndex
+ + ", size: " + frames.size());
+ ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex];
+ // the buffer should be sized to hold one full frame
+ inputBuf.clear();
+ inputBuf.put(frameData);
+ encoder.queueInputBuffer(inputBufIndex, 0,
+ frameData.length, ptsUsec, 0);
+ drainEncoder(false, info);
+ }
+ generateIndex++;
+ } else
+ {
+ // either all in use, or we timed out during initial setup
+ if (VERBOSE)
+ Log.i(TAG, "input buffer not available");
+ }
+ }
+ return true;
+ }
+
+ /**
+ * use Muxer to generate mp4 file with data from encoder
+ *
+ * @param endOfStream
+ * if this is the last frame
+ * @param mBufferInfo
+ * the BufferInfo of data from encoder
+ */
+ @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
+ private void drainEncoder(boolean endOfStream, MediaCodec.BufferInfo mBufferInfo)
+ {
+ final int TIMEOUT_USEC = 10000;
+
+ if (endOfStream)
+ {
+ try
+ {
+ mEncoder.signalEndOfInputStream();
+ } catch (Exception e)
+ {
+ }
+ }
+
+ ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
+ while (true)
+ {
+ int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo,
+ TIMEOUT_USEC);
+ if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER)
+ {
+ // no output available yet
+ if (!endOfStream)
+ {
+ break; // out of while
+ } else
+ {
+ if (VERBOSE)
+ Log.i(TAG, "no output available, spinning to await EOS");
+ }
+ } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
+ {
+ // not expected for an encoder
+ encoderOutputBuffers = mEncoder.getOutputBuffers();
+ } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
+ {
+ // should happen before receiving buffers, and should only
+ // happen once
+ if (mMuxerStarted)
+ {
+ throw new RuntimeException("format changed twice");
+ }
+ MediaFormat newFormat = mEncoder.getOutputFormat();
+ if (VERBOSE)
+ Log.i(TAG, "encoder output format changed: " + newFormat);
+
+ // now that we have the Magic Goodies, start the muxer
+ mTrackIndex = mMuxer.addTrack(newFormat);
+ mMuxer.start();
+ mMuxerStarted = true;
+ } else if (encoderStatus < 0)
+ {
+ if (VERBOSE)
+ Log.i(TAG,
+ "unexpected result from encoder.dequeueOutputBuffer: "
+ + encoderStatus);
+ } else
+ {
+ ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
+ if (encodedData == null)
+ {
+ throw new RuntimeException("encoderOutputBuffer "
+ + encoderStatus + " was null");
+ }
+
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)
+ {
+ // The codec config data was pulled out and fed to the muxer
+ // when we got
+ // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
+ if (VERBOSE)
+ Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
+ mBufferInfo.size = 0;
+ }
+
+ if (mBufferInfo.size != 0)
+ {
+ if (!mMuxerStarted)
+ {
+ throw new RuntimeException("muxer hasn't started");
+ }
+
+ // adjust the ByteBuffer values to match BufferInfo
+ encodedData.position(mBufferInfo.offset);
+ encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
+
+ if (VERBOSE)
+ Log.d(TAG, "BufferInfo: " + mBufferInfo.offset + ","
+ + mBufferInfo.size + ","
+ + mBufferInfo.presentationTimeUs);
+
+ try
+ {
+ mMuxer.writeSampleData(mTrackIndex, encodedData,
+ mBufferInfo);
+ } catch (Exception e)
+ {
+ Log.i(TAG, "Too many frames");
+ }
+ }
+
+ mEncoder.releaseOutputBuffer(encoderStatus, false);
+
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0)
+ {
+ if (!endOfStream)
+ {
+ if (VERBOSE)
+ Log.i(TAG, "reached end of stream unexpectedly");
+ } else
+ {
+ if (VERBOSE)
+ Log.i(TAG, "end of stream reached");
+ }
+ break; // out of while
+ }
+ }
+ }
+ }
+
+ /**
+ * Generates data for frame N into the supplied buffer.
+ */
+ private void generateFrame(int frameIndex, int colorFormat, byte[] frameData)
+ {
+ // Set to zero. In YUV this is a dull green.
+ Arrays.fill(frameData, (byte) 0);
+
+ /**
+ Mat mat = Highgui.imread(frames.get(frameIndex).getAbsolutePath());
+
+// Mat dst = new Mat(mWidth, mHeight * 3 / 2, CvType.CV_8UC1);
+ Mat dst = new Mat();
+ Imgproc.cvtColor(mat, dst, Imgproc.COLOR_RGBA2YUV_I420);
+
+ // use array instead of mat to improve the speed
+ dst.get(0, 0, frameData);
+
+ byte[] temp = frameData.clone();
+ int margin = mHeight / 4;
+ int location = mHeight;
+ int step = 0;
+ for (int i = mHeight; i < mHeight + margin; i++)
+ {
+ for (int j = 0; j < mWidth; j++)
+ {
+ byte uValue = temp[i * mWidth + j];
+ byte vValue = temp[(i + margin) * mWidth + j];
+
+ frameData[location * mWidth + step] = uValue;
+ frameData[location * mWidth + step + 1] = vValue;
+ step += 2;
+ if (step >= mWidth)
+ {
+ location++;
+ step = 0;
+ }
+ }
+ }
+ **/
+ }
+
+ /**
+ * Generates the presentation time for frame N, in microseconds.
+ */
+ private static long computePresentationTime(int frameIndex)
+ {
+ long value = frameIndex;
+ return 132 + value * 1000000 / FRAME_RATE;
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/org/havenapp/main/sensors/motion/CameraViewHolder.java b/src/main/java/org/havenapp/main/sensors/motion/CameraViewHolder.java
new file mode 100644
index 00000000..c9d24b47
--- /dev/null
+++ b/src/main/java/org/havenapp/main/sensors/motion/CameraViewHolder.java
@@ -0,0 +1,425 @@
+
+/*
+ * Copyright (c) 2017 Nathanial Freitas / Guardian Project
+ * * Licensed under the GPLv3 license.
+ *
+ * Copyright (c) 2013-2015 Marco Ziccardi, Luca Bonato
+ * Licensed under the MIT license.
+ */
+
+package org.havenapp.main.sensors.motion;
+
+import android.app.Activity;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.graphics.Bitmap;
+import android.graphics.Matrix;
+import android.hardware.Camera;
+import android.os.Environment;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Message;
+import android.os.Messenger;
+import android.os.RemoteException;
+import android.support.v8.renderscript.RenderScript;
+import android.util.Log;
+import android.view.Surface;
+
+import com.google.android.cameraview.CameraView;
+
+import org.havenapp.main.PreferenceManager;
+import org.havenapp.main.model.EventTrigger;
+import org.havenapp.main.service.MonitorService;
+import org.jcodec.api.android.AndroidSequenceEncoder;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+import io.github.silvaren.easyrs.tools.Nv21Image;
+
+public class CameraViewHolder {
+
+ /**
+ * Object to retrieve and set shared preferences
+ */
+ private PreferenceManager prefs;
+
+ private final static int PREVIEW_INTERVAL = 200;
+
+ private List listeners = new ArrayList<>();
+
+ /**
+ * Timestamp of the last picture processed
+ */
+ private long lastTimestamp;
+ /**
+ * Last picture processed
+ */
+ private byte[] lastPic;
+ /**
+ * True IFF there's an async task processing images
+ */
+ private boolean doingVideoProcessing = false;
+
+ /**
+ * Handler used to update back the UI after motion detection
+ */
+ private final Handler updateHandler = new Handler();
+
+ /**
+ * Sensitivity of motion detection
+ */
+ private int motionSensitivity = LuminanceMotionDetector.MOTION_MEDIUM;
+
+ /**
+ * holder of the CameraView and state of running
+ */
+ private CameraView cameraView = null;
+ private boolean isCameraStarted = false;
+
+ /**
+ * Messenger used to signal motion to the alert service
+ */
+ private Messenger serviceMessenger = null;
+ //private Camera camera;
+ private Activity context;
+ private MotionDetector task;
+
+ AndroidSequenceEncoder encoder;
+ private String videoFile;
+
+ //for managing bitmap processing
+ private RenderScript renderScript;
+
+ private ServiceConnection mConnection = new ServiceConnection() {
+
+ public void onServiceConnected(ComponentName className,
+ IBinder service) {
+ Log.i("CameraFragment", "SERVICE CONNECTED");
+ // We've bound to LocalService, cast the IBinder and get LocalService instance
+ serviceMessenger = new Messenger(service);
+ }
+
+ public void onServiceDisconnected(ComponentName arg0) {
+ Log.i("CameraFragment", "SERVICE DISCONNECTED");
+ serviceMessenger = null;
+ }
+ };
+
+ public CameraViewHolder(Activity context, CameraView cameraView) {
+ //super(context);
+ this.context = context;
+ this.cameraView = cameraView;
+ this.renderScript = RenderScript.create(context); // where context can be your activity, application, etc.
+
+ prefs = new PreferenceManager(context);
+
+ task = new MotionDetector(
+ renderScript,
+ updateHandler,
+ motionSensitivity);
+
+ task.addListener((sourceImage, detectedImage, rawBitmap, motionDetected) -> {
+
+ for (MotionDetector.MotionListener listener : listeners)
+ listener.onProcess(sourceImage,detectedImage,rawBitmap,motionDetected);
+
+ if (motionDetected) {
+
+ if (serviceMessenger != null) {
+ Message message = new Message();
+ message.what = EventTrigger.CAMERA;
+
+ try {
+
+ File fileImageDir = new File(Environment.getExternalStorageDirectory(), prefs.getImagePath());
+ fileImageDir.mkdirs();
+
+ String ts = new Date().getTime() + ".jpg";
+
+ File fileImage = new File(fileImageDir, "detected.original." + ts);
+ FileOutputStream stream = new FileOutputStream(fileImage);
+ rawBitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
+
+ stream.flush();
+ stream.close();
+ message.getData().putString("path", fileImage.getAbsolutePath());
+
+ //store the still match frame, even if doing video
+ serviceMessenger.send(message);
+
+ if (prefs.getVideoMonitoringActive() && (!doingVideoProcessing)) {
+ recordVideo();
+
+ }
+
+ } catch (Exception e) {
+ // Cannot happen
+ Log.e("CameraViewHolder", "error creating image", e);
+ }
+ }
+ }
+
+
+ });
+ /*
+ * We bind to the alert service
+ */
+ this.context.bindService(new Intent(context,
+ MonitorService.class), mConnection, Context.BIND_ABOVE_CLIENT);
+ }
+
+ public void setMotionSensitivity (int
+ motionSensitivity )
+ {
+ this.
+ motionSensitivity = motionSensitivity;
+ task.setMotionSensitivity(motionSensitivity);
+ }
+
+ public void addListener(MotionDetector.MotionListener listener) {
+ listeners.add(listener);
+ }
+
+
+ /**
+ * Called on the creation of the surface:
+ * setting camera parameters to lower possible resolution
+ * (preferred is 640x480)
+ * in order to minimize CPU usage
+ */
+ public synchronized void startCamera() {
+
+
+ updateCamera();
+
+ cameraView.start();
+
+ cameraView.setOnFrameListener((data, width, height, rotationDegrees) -> {
+
+ long now = System.currentTimeMillis();
+ if (now < CameraViewHolder.this.lastTimestamp + PREVIEW_INTERVAL)
+ return;
+
+ CameraViewHolder.this.lastTimestamp = now;
+
+ if (!doingVideoProcessing) {
+
+ Log.i("CameraViewHolder", "Processing new image");
+
+ mDecodeThreadPool.execute(() -> processNewFrame(data, width, height, rotationDegrees));
+ } else {
+ mEncodeVideoThreadPool.execute(() -> recordNewFrame(data, width, height, rotationDegrees));
+ }
+ });
+
+
+ }
+
+ public void updateCamera ()
+ {
+ switch (prefs.getCamera()) {
+ case PreferenceManager.FRONT:
+ if (cameraView.getFacing() != CameraView.FACING_FRONT)
+ cameraView.setFacing(CameraView.FACING_FRONT);
+ break;
+ case PreferenceManager.BACK:
+ if (cameraView.getFacing() != CameraView.FACING_BACK)
+ cameraView.setFacing(CameraView.FACING_BACK);
+ break;
+ default:
+ // camera = null;
+ break;
+ }
+ }
+
+ // A queue of Runnables
+ private final BlockingQueue mDecodeWorkQueue = new LinkedBlockingQueue();
+
+ // Creates a thread pool manager
+ private ThreadPoolExecutor mDecodeThreadPool = new ThreadPoolExecutor(
+ 1, // Initial pool size
+ 1, // Max pool size
+ 10,
+ TimeUnit.SECONDS,
+ mDecodeWorkQueue);
+
+ // A queue of Runnables
+ private final BlockingQueue mEncodeVideoWorkQueue = new LinkedBlockingQueue();
+
+ // Creates a thread pool manager
+ private ThreadPoolExecutor mEncodeVideoThreadPool = new ThreadPoolExecutor(
+ 1, // Initial pool size
+ 1, // Max pool size
+ 10,
+ TimeUnit.SECONDS,
+ mEncodeVideoWorkQueue);
+
+
+ private Matrix mtxVideoRotate;
+
+ private void recordNewFrame (byte[] data, int width, int height, int rotationDegrees)
+ {
+
+ Bitmap bitmap = Nv21Image.nv21ToBitmap(renderScript, data, width, height);
+
+ bitmap = Bitmap.createBitmap(bitmap,0,0,width,height,mtxVideoRotate,true);
+
+ try {
+ if (encoder != null)
+ encoder.encodeImage(bitmap);
+
+ bitmap.recycle();
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+
+
+ }
+
+ private void finishVideoEncoding ()
+ {
+ try {
+ encoder.finish();
+
+ if (serviceMessenger != null) {
+ Message message = new Message();
+ message.what = EventTrigger.CAMERA_VIDEO;
+ message.getData().putString(MonitorService.KEY_PATH, videoFile);
+ try {
+ serviceMessenger.send(message);
+ } catch (RemoteException e) {
+ e.printStackTrace();
+ }
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private synchronized void processNewFrame (byte[] data, int width, int height, int rotationDegrees)
+ {
+ task.detect(
+ lastPic,
+ data,
+ width,
+ height,
+ cameraView.getDefaultOrientation(),
+ cameraView.getFacing());
+
+ lastPic = data;
+
+ }
+
+
+ private synchronized boolean recordVideo() {
+
+ if (doingVideoProcessing)
+ return false;
+ String ts1 = String.valueOf(new Date().getTime());
+ videoFile = Environment.getExternalStorageDirectory() + File.separator + prefs.getImagePath() + File.separator + ts1 + ".mp4";
+ try {
+ encoder = AndroidSequenceEncoder.createSequenceEncoder(new File(videoFile),5);
+
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ mtxVideoRotate = new Matrix();
+
+ if (cameraView.getFacing() == CameraView.FACING_FRONT) {
+ mtxVideoRotate.postRotate(-cameraView.getDefaultOrientation());
+ mtxVideoRotate.postScale(-1, 1, cameraView.getWidth() / 2, cameraView.getHeight() / 2);
+ }
+ else
+ mtxVideoRotate.postRotate(cameraView.getDefaultOrientation());
+
+ doingVideoProcessing = true;
+
+ int seconds = prefs.getMonitoringTime() * 1000;
+ updateHandler.postDelayed(() -> {
+ doingVideoProcessing = false;
+ finishVideoEncoding();
+ }, seconds);
+
+ for (MotionDetector.MotionListener listener : listeners)
+ listener.onProcess(null, null, null, false);
+
+ return true;
+ }
+
+
+ public synchronized void stopCamera ()
+ {
+ if (cameraView != null) {
+ cameraView.stop();
+ }
+ }
+
+ public int getCameraFacing() {
+ return cameraView.getFacing();
+ }
+
+ public void destroy ()
+ {
+ if (mConnection != null) {
+ this.context.unbindService(mConnection);
+ mConnection = null;
+ }
+ stopCamera();
+ }
+
+ public int getCorrectCameraOrientation(int facing, int orientation) {
+
+ int rotation = context.getWindowManager().getDefaultDisplay().getRotation();
+ int degrees = 0;
+
+ switch(rotation){
+ case Surface.ROTATION_0:
+ degrees = 0;
+ break;
+
+ case Surface.ROTATION_90:
+ degrees = 90;
+ break;
+
+ case Surface.ROTATION_180:
+ degrees = 180;
+ break;
+
+ case Surface.ROTATION_270:
+ degrees = 270;
+ break;
+
+ }
+
+ int result;
+ if(facing == CameraView.FACING_FRONT){
+ result = (orientation + degrees) % 360;
+ result = (360 - result) % 360;
+ }else{
+ result = (orientation - degrees + 360) % 360;
+ }
+
+ return result;
+ }
+
+ public boolean doingVideoProcessing ()
+ {
+ return doingVideoProcessing;
+ }
+
+}
diff --git a/src/main/java/org/havenapp/main/sensors/motion/MotionDetector.java b/src/main/java/org/havenapp/main/sensors/motion/MotionDetector.java
new file mode 100644
index 00000000..d0477519
--- /dev/null
+++ b/src/main/java/org/havenapp/main/sensors/motion/MotionDetector.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright (c) 2013-2015 Marco Ziccardi, Luca Bonato
+ * Licensed under the MIT license.
+ */
+
+
+package org.havenapp.main.sensors.motion;
+
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Color;
+import android.graphics.ImageFormat;
+import android.graphics.Matrix;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.os.Handler;
+import android.support.v8.renderscript.RenderScript;
+import android.util.Log;
+
+import com.google.android.cameraview.CameraView;
+
+import org.havenapp.main.sensors.media.ImageCodec;
+import org.havenapp.main.sensors.motion.IMotionDetector;
+import org.havenapp.main.sensors.motion.LuminanceMotionDetector;
+
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import io.github.silvaren.easyrs.tools.Nv21Image;
+
+/**
+ * Task doing all image processing in backgrounds,
+ * has a collection of listeners to notify in after having processed
+ * the image
+ * @author marco
+ *
+ */
+public class MotionDetector {
+
+ // Input data
+
+ private List listeners = new ArrayList<>();
+ private Handler handler;
+ private int motionSensitivity;
+ // Output data
+
+ private boolean hasChanged;
+
+ private IMotionDetector detector;
+
+ private RenderScript renderScript;
+
+ private int detectColor = Color.YELLOW;
+
+ public interface MotionListener {
+ public void onProcess(Bitmap oldBitmap,
+ Bitmap newBitmap,
+ Bitmap rawBitmap,
+ boolean motionDetected);
+ }
+
+ public void addListener(MotionListener listener) {
+ listeners.add(listener);
+ }
+
+ public MotionDetector(
+ RenderScript renderScript,
+ Handler updateHandler,
+ int motionSensitivity) {
+ this.renderScript = renderScript;
+ this.handler = updateHandler;
+ this.motionSensitivity = motionSensitivity;
+ detector = new LuminanceMotionDetector();
+
+ }
+
+ public void setDetectColor (int detectColor)
+ {
+ this.detectColor = detectColor;
+ }
+
+ public void setMotionSensitivity (int motionSensitivity)
+ {
+ this.motionSensitivity = motionSensitivity;
+ detector.setThreshold(motionSensitivity);
+ }
+
+ public void detect(byte[] rawOldPic,
+ byte[] rawNewPic,
+ int width,
+ int height,
+ int rotationDegrees,
+ int cameraFacing) {
+
+ int[] newPicLuma = ImageCodec.N21toLuma(rawNewPic, width, height);
+ if (rawOldPic != null) {
+
+ int[] oldPicLuma = ImageCodec.N21toLuma(rawOldPic, width, height);
+ detector.setThreshold(motionSensitivity);
+ List changedPixels =
+ detector.detectMotion(oldPicLuma, newPicLuma, width, height);
+ hasChanged = false;
+
+ int[] newPic = ImageCodec.lumaToGreyscale(newPicLuma, width, height);
+
+ if (changedPixels != null) {
+ hasChanged = true;
+
+ }
+
+
+ if (hasChanged) {
+
+
+ Bitmap lastBitmap = ImageCodec.lumaToBitmapGreyscale(oldPicLuma, width, height);
+
+ for (int i = 0; i < newPic.length; i++)
+ newPic[i] = Color.TRANSPARENT;
+
+ for (int changedPixel : changedPixels) {
+ newPic[changedPixel] = detectColor;
+ }
+
+
+ Matrix mtx = new Matrix();
+
+ if (cameraFacing == CameraView.FACING_FRONT) {
+ mtx.postRotate(-rotationDegrees);
+ mtx.postScale(-1, 1, width / 2, height / 2);
+ }
+ else
+ mtx.postRotate(rotationDegrees);
+
+
+ Bitmap newBitmap
+ = Bitmap.createBitmap(Bitmap.createBitmap(newPic, width, height, Bitmap.Config.ARGB_4444), 0, 0, width, height, mtx, true);
+
+ Bitmap rawBitmap = Bitmap.createBitmap(Nv21Image.nv21ToBitmap(renderScript, rawNewPic, width, height),0,0,width,height,mtx,true);
+
+ handler.post(() -> {
+ for (MotionListener listener : listeners) {
+ listener.onProcess(
+ lastBitmap,
+ newBitmap,
+ rawBitmap,
+ hasChanged);
+ }
+
+ });
+ }
+ else
+ {
+ //nothing changed
+ handler.post(() -> {
+ for (MotionListener listener : listeners) {
+ listener.onProcess(
+ null,
+ null,
+ null,
+ hasChanged);
+ }
+
+ });
+ }
+
+ }
+
+
+ }
+
+
+}
diff --git a/src/main/java/org/havenapp/main/sensors/motion/Preview.java b/src/main/java/org/havenapp/main/sensors/motion/Preview.java
deleted file mode 100644
index 4cae11e3..00000000
--- a/src/main/java/org/havenapp/main/sensors/motion/Preview.java
+++ /dev/null
@@ -1,473 +0,0 @@
-
-/*
- * Copyright (c) 2017 Nathanial Freitas / Guardian Project
- * * Licensed under the GPLv3 license.
- *
- * Copyright (c) 2013-2015 Marco Ziccardi, Luca Bonato
- * Licensed under the MIT license.
- */
-
-package org.havenapp.main.sensors.motion;
-
-import android.content.ComponentName;
-import android.content.Context;
-import android.content.Intent;
-import android.content.ServiceConnection;
-import android.graphics.Bitmap;
-import android.hardware.Camera;
-import android.hardware.Camera.Parameters;
-import android.hardware.Camera.PreviewCallback;
-import android.hardware.Camera.Size;
-import android.media.AudioManager;
-import android.media.MediaRecorder;
-import android.os.Environment;
-import android.os.Handler;
-import android.os.IBinder;
-import android.os.Message;
-import android.os.Messenger;
-import android.os.RemoteException;
-import android.util.Log;
-import android.view.Surface;
-import android.view.SurfaceHolder;
-import android.view.SurfaceView;
-import android.view.WindowManager;
-
-import org.havenapp.main.PreferenceManager;
-import org.havenapp.main.model.EventTrigger;
-import org.havenapp.main.sensors.media.ImageCodec;
-import org.havenapp.main.sensors.media.MediaRecorderTask;
-import org.havenapp.main.sensors.media.MotionAsyncTask;
-import org.havenapp.main.service.MonitorService;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-
-public class Preview extends SurfaceView implements SurfaceHolder.Callback {
-
- /**
- * Object to retrieve and set shared preferences
- */
- private PreferenceManager prefs;
- private int cameraFacing = 0;
-
- private final static int PREVIEW_INTERVAL = 500;
-
- private List listeners = new ArrayList<>();
-
- /**
- * Timestamp of the last picture processed
- */
- private long lastTimestamp;
- /**
- * Last picture processed
- */
- private byte[] lastPic;
- /**
- * True IFF there's an async task processing images
- */
- private boolean doingProcessing, doingVideoProcessing = false;
-
- /**
- * Handler used to update back the UI after motion detection
- */
- private final Handler updateHandler = new Handler();
-
- /**
- * Last frame captured
- */
- private int imageCount = 0;
-
- /**
- * Sensitivity of motion detection
- */
- private int motionSensitivity = LuminanceMotionDetector.MOTION_MEDIUM;
-
- /**
- * Messenger used to signal motion to the alert service
- */
- private Messenger serviceMessenger = null;
- private MediaRecorder mediaRecorder = null;
- private SurfaceHolder mHolder;
- private Camera camera;
- private Context context;
- private MotionAsyncTask task;
- private String videoFile;
- private ServiceConnection mConnection = new ServiceConnection() {
-
- public void onServiceConnected(ComponentName className,
- IBinder service) {
- Log.i("CameraFragment", "SERVICE CONNECTED");
- // We've bound to LocalService, cast the IBinder and get LocalService instance
- serviceMessenger = new Messenger(service);
- }
-
- public void onServiceDisconnected(ComponentName arg0) {
- Log.i("CameraFragment", "SERVICE DISCONNECTED");
- serviceMessenger = null;
- }
- };
-
- public Preview (Context context) {
- super(context);
- this.context = context;
- // Install a SurfaceHolder.Callback so we get notified when the
- // underlying surface is created and destroyed.
- mHolder = getHolder();
- mHolder.addCallback(this);
- prefs = new PreferenceManager(context);
-
- motionSensitivity = prefs.getCameraSensitivity();
- /*
- * We bind to the alert service
- */
- this.context.bindService(new Intent(context,
- MonitorService.class), mConnection, Context.BIND_ABOVE_CLIENT);
- }
-
- public void setMotionSensitivity (int
- motionSensitivity )
- {
- this.
- motionSensitivity = motionSensitivity;
- }
-
- public void addListener(MotionAsyncTask.MotionListener listener) {
- listeners.add(listener);
- }
-
-
- /**
- * Called on the creation of the surface:
- * setting camera parameters to lower possible resolution
- * (preferred is 640x480)
- * in order to minimize CPU usage
- */
- public void surfaceCreated(SurfaceHolder holder) {
-
-
- if (camera != null)
- stopCamera();
- /*
- * The Surface has been created, acquire the camera and tell it where
- * to draw.
- * If the selected camera is the front one we open it
- */
- switch (prefs.getCamera()) {
- case PreferenceManager.FRONT:
- Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
- int cameraCount = Camera.getNumberOfCameras();
- for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
- Camera.getCameraInfo(camIdx, cameraInfo);
- if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
- try {
- camera = Camera.open(camIdx);
- cameraFacing = Camera.CameraInfo.CAMERA_FACING_FRONT;
- } catch (RuntimeException e) {
- Log.e("Preview", "Camera failed to open: " + e.getLocalizedMessage());
- }
- }
- }
- break;
- case PreferenceManager.BACK:
-
- camera = Camera.open();
- cameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
- break;
- default:
- camera = null;
- break;
- }
-
- if (camera != null) {
-
- final Camera.Parameters parameters = camera.getParameters();
-
- try {
- List sizesPreviews = parameters.getSupportedPreviewSizes();
-
- Size bestSize = sizesPreviews.get(0);
-
- for (int i = 1; i < sizesPreviews.size(); i++) {
- if ((sizesPreviews.get(i).width * sizesPreviews.get(i).height) >
- (bestSize.width * bestSize.height)) {
- bestSize = sizesPreviews.get(i);
- }
- }
-
- parameters.setPreviewSize(bestSize.width, bestSize.height);
-
- } catch (Exception e) {
- Log.w("Camera", "Error setting camera preview size", e);
- }
-
- try {
- List ranges = parameters.getSupportedPreviewFpsRange();
- int[] bestRange = ranges.get(0);
- for (int i = 1; i < ranges.size(); i++) {
- if (ranges.get(i)[1] >
- bestRange[1]) {
- bestRange[0] = ranges.get(i)[0];
- bestRange[1] = ranges.get(i)[1];
-
- }
- }
- parameters.setPreviewFpsRange(bestRange[0], bestRange[1]);
- } catch (Exception e) {
- Log.w("Camera", "Error setting frames per second", e);
- }
-
- try {
- parameters.setAutoExposureLock(false);
- parameters.setExposureCompensation(parameters.getMaxExposureCompensation());
- } catch (Exception e) {
- }
- /*
- * If the flash is needed
- */
- if (prefs.getFlashActivation()) {
- Log.i("Preview", "Flash activated");
- parameters.setFlashMode(Parameters.FLASH_MODE_TORCH);
- }
-
- camera.setParameters(parameters);
-
- try {
-
- camera.setPreviewDisplay(mHolder);
-
- camera.setPreviewCallback(new PreviewCallback() {
-
- public void onPreviewFrame(byte[] data, final Camera cam) {
-
- Camera.Size size;
- try {
- size = cam.getParameters().getPreviewSize();
- } catch(RuntimeException e) {
- return;
- }
- if (size == null) return;
- long now = System.currentTimeMillis();
- if (now < Preview.this.lastTimestamp + PREVIEW_INTERVAL)
- return;
- if (!doingProcessing) {
-
-
- Log.i("Preview", "Processing new image");
- Preview.this.lastTimestamp = now;
- task = new MotionAsyncTask(
- lastPic,
- data,
- size.width,
- size.height,
- updateHandler,
- motionSensitivity);
- for (MotionAsyncTask.MotionListener listener : listeners) {
- Log.i("Preview", "Added listener");
- task.addListener(listener);
- }
- doingProcessing = true;
- task.addListener(new MotionAsyncTask.MotionListener() {
-
- public void onProcess(Bitmap oldBitmap, Bitmap newBitmap,
- Bitmap rawBitmap,
- boolean motionDetected) {
-
- if (motionDetected) {
- Log.i("MotionListener", "Motion detected");
- if (serviceMessenger != null) {
- Message message = new Message();
- message.what = EventTrigger.CAMERA;
-
-
- try {
-
- File fileImageDir = new File(Environment.getExternalStorageDirectory(), prefs.getImagePath());
- fileImageDir.mkdirs();
-
- String ts = new Date().getTime() + ".jpg";
-
- File fileImage = new File(fileImageDir, "detected.original." + ts);
- FileOutputStream stream = new FileOutputStream(fileImage);
- if (prefs.getCamera().equalsIgnoreCase(PreferenceManager.BACK)) {
- Bitmap bmps = ImageCodec.rotate(rawBitmap, 180, false);
- bmps.compress(Bitmap.CompressFormat.JPEG, 100, stream);} else {
- rawBitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
- }
- stream.flush();
- stream.close();
- message.getData().putString("path", fileImage.getAbsolutePath());
-
- //store the still match frame, even if doing video
- serviceMessenger.send(message);
-
- if (prefs.getVideoMonitoringActive() && (!doingVideoProcessing)) {
- new Thread ()
- {
- public void run ()
- {
- camera.stopPreview();
- record(camera, serviceMessenger);
-
- }
- }.start();
- }
-
- } catch (Exception e) {
- // Cannot happen
- Log.e("Preview", "error creating image", e);
- }
- }
- }
- Log.i("MotionListener", "Allowing further processing");
- doingProcessing = false;
- }
- });
- task.start();
- lastPic = data;
- try {
-
- Camera.Parameters parameters = cam.getParameters();
- parameters.setExposureCompensation(parameters.getMaxExposureCompensation());
- cam.setParameters(parameters);
-
- } catch (Exception e) {
- }
- }
- }
- });
-
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
- }
-
- private synchronized boolean record(Camera cam, Messenger messenger) {
-
- if (mediaRecorder != null && doingVideoProcessing)
- return false;
-
- String ts1 = String.valueOf(new Date().getTime());
- videoFile = Environment.getExternalStorageDirectory() + File.separator + prefs.getImagePath() + File.separator + ts1 + ".mp4";
- int seconds = prefs.getMonitoringTime() * 1000;
- MediaRecorderTask mediaRecorderTask = new MediaRecorderTask(cam, videoFile, seconds, mHolder);
- mediaRecorder = mediaRecorderTask.getPreparedMediaRecorder();
-
- /**
- AudioManager audioManager = (AudioManager) context.getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
- if (audioManager != null) {
- audioManager.setStreamMute(AudioManager.STREAM_SYSTEM, true);
- audioManager.setStreamMute(AudioManager.STREAM_MUSIC,true);
- }**/
-
- doingVideoProcessing = true;
- mediaRecorder.start();
- updateHandler.postDelayed(() -> {
- if (messenger != null) {
- Message message = new Message();
- message.what = EventTrigger.CAMERA_VIDEO;
- message.getData().putString("path", videoFile);
- try {
- messenger.send(message);
- } catch (RemoteException e) {
- e.printStackTrace();
- }
- /**
- if (audioManager != null) {
- audioManager.setStreamMute(AudioManager.STREAM_SYSTEM, false);
- audioManager.setStreamMute(AudioManager.STREAM_MUSIC, false);
- }**/
-
- mediaRecorder.stop();
- mediaRecorder.reset();
- mediaRecorder.release();
- doingVideoProcessing = false;
- }
- }, seconds);
-
- return true;
- }
-
- public void surfaceDestroyed(SurfaceHolder holder) {
-
- if (doingVideoProcessing && serviceMessenger != null && prefs.getVideoMonitoringActive()) {
- Message message = new Message();
- message.what = EventTrigger.CAMERA_VIDEO;
- message.getData().putString("path", videoFile);
- try {
- serviceMessenger.send(message);
- } catch (RemoteException e) {
- e.printStackTrace();
- }
- }
- }
-
- public void stopCamera ()
- {
- if (camera != null) {
- // Surface will be destroyed when we return, so stop the preview.
- // Because the CameraDevice object is not a shared resource, it's very
- // important to release it when the activity is paused.
- this.context.unbindService(mConnection);
-
- camera.setPreviewCallback(null);
- camera.stopPreview();
- camera.release();
- }
- }
-
- public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
- if (camera != null) {
-
- int degree = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay().getRotation();
- int displayOrientation = 0;
-
- if (prefs.getCamera().equals(PreferenceManager.FRONT)) {
-
- switch (degree) {
- case Surface.ROTATION_0:
- displayOrientation = 90;
- break;
- case Surface.ROTATION_90:
- displayOrientation = 0;
- break;
- case Surface.ROTATION_180:
- displayOrientation = 0;
- break;
- case Surface.ROTATION_270:
- displayOrientation = 180;
- break;
- }
- } else {
- boolean isLandscape = false;// degree == Configuration.ORIENTATION_LANDSCAPE;
-
- switch (degree) {
- case Surface.ROTATION_0:
- displayOrientation = isLandscape ? 0 : 90;
- break;
- case Surface.ROTATION_90:
- displayOrientation = isLandscape ? 0 : 270;
- break;
- case Surface.ROTATION_180:
- displayOrientation = isLandscape ? 180 : 270;
- break;
- case Surface.ROTATION_270:
- displayOrientation = isLandscape ? 180 : 90;
- break;
- }
- }
-
- camera.setDisplayOrientation(displayOrientation);
-
- camera.startPreview();
- }
- }
-
- public int getCameraFacing() {
- return this.cameraFacing;
- }
-}
diff --git a/src/main/java/org/havenapp/main/service/BackgroundCamera.java b/src/main/java/org/havenapp/main/service/BackgroundCamera.java
new file mode 100644
index 00000000..3638bcb4
--- /dev/null
+++ b/src/main/java/org/havenapp/main/service/BackgroundCamera.java
@@ -0,0 +1,43 @@
+package org.havenapp.main.service;
+
+import android.content.Context;
+import android.graphics.PixelFormat;
+import android.view.Gravity;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.WindowManager;
+
+public class BackgroundCamera {
+
+ private void startCamera (Context context)
+ {
+
+ // Create new SurfaceView, set its size to 1x1, move it to the top left corner and set this service as a callback
+ WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+ SurfaceView surfaceView = new SurfaceView(context);
+ WindowManager.LayoutParams layoutParams = new WindowManager.LayoutParams(
+ 1, 1,
+ WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY,
+ WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH,
+ PixelFormat.TRANSLUCENT
+ );
+ layoutParams.gravity = Gravity.LEFT | Gravity.TOP;
+ windowManager.addView(surfaceView, layoutParams);
+ surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
+ @Override
+ public void surfaceCreated(SurfaceHolder surfaceHolder) {
+
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
+
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
+
+ }
+ });
+ }
+}
diff --git a/src/main/java/org/havenapp/main/service/MonitorService.java b/src/main/java/org/havenapp/main/service/MonitorService.java
index 43a6285e..2a6d0066 100644
--- a/src/main/java/org/havenapp/main/service/MonitorService.java
+++ b/src/main/java/org/havenapp/main/service/MonitorService.java
@@ -74,7 +74,7 @@ public class MonitorService extends Service {
private BarometerMonitor mBaroMonitor = null;
private AmbientLightMonitor mLightMonitor = null;
- private boolean mIsRunning = false;
+ private boolean mIsMonitoringActive = false;
/**
* Last Event instances
@@ -92,9 +92,14 @@ public class MonitorService extends Service {
private class MessageHandler extends Handler {
@Override
public void handleMessage(Message msg) {
- alert(msg.what,msg.getData().getString("path"));
+
+ //only accept alert if monitor is running
+ if (mIsMonitoringActive)
+ alert(msg.what,msg.getData().getString(KEY_PATH));
}
}
+
+ public final static String KEY_PATH = "path";
/**
* Messenger interface used by clients to interact
@@ -137,8 +142,10 @@ public void onCreate() {
showNotification();
+ // startCamera();
+
PowerManager powerManager = (PowerManager) getSystemService(POWER_SERVICE);
- wakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK,
+ wakeLock = powerManager.newWakeLock(PowerManager.FULL_WAKE_LOCK,
"MyWakelockTag");
wakeLock.acquire();
}
@@ -211,13 +218,13 @@ private void showNotification() {
public boolean isRunning ()
{
- return mIsRunning;
+ return mIsMonitoringActive;
}
private void startSensors ()
{
- mIsRunning = true;
+ mIsMonitoringActive = true;
if (!mPrefs.getAccelerometerSensitivity().equals(PreferenceManager.OFF)) {
mAccelManager = new AccelerometerMonitor(this);
@@ -247,7 +254,7 @@ private void startSensors ()
private void stopSensors ()
{
- mIsRunning = false;
+ mIsMonitoringActive = false;
//this will never be false:
// -you can't use ==, != for string comparisons, use equals() instead
// -Value is never set to OFF in the first place
@@ -298,9 +305,10 @@ else if (mPrefs.getNotificationTimeMs() > 0 && mLastNotification != null)
//check if time window is within configured notification time window
doNotification = ((now.getTime()-mLastNotification.getTime())>mPrefs.getNotificationTimeMs());
}
- else
+
+ if (doNotification)
{
- doNotification = true;
+ doNotification = !(mPrefs.getVideoMonitoringActive() && alertType == EventTrigger.CAMERA);
}
EventTrigger eventTrigger = new EventTrigger();
@@ -359,4 +367,6 @@ else if (eventTrigger.getType() == EventTrigger.CAMERA_VIDEO) {
}
+
+
}
diff --git a/src/main/java/org/havenapp/main/ui/CameraConfigureActivity.java b/src/main/java/org/havenapp/main/ui/CameraConfigureActivity.java
index dd60ff43..15cacfbe 100644
--- a/src/main/java/org/havenapp/main/ui/CameraConfigureActivity.java
+++ b/src/main/java/org/havenapp/main/ui/CameraConfigureActivity.java
@@ -96,7 +96,7 @@ private void switchCamera() {
else if (camera.equals(PreferenceManager.BACK))
mPrefManager.setCamera(PreferenceManager.FRONT);
- ((CameraFragment) getSupportFragmentManager().findFragmentById(R.id.fragment_camera)).resetCamera();
+ ((CameraFragment) getSupportFragmentManager().findFragmentById(R.id.fragment_camera)).updateCamera();
setResult(RESULT_OK);
}
diff --git a/src/main/java/org/havenapp/main/ui/CameraFragment.java b/src/main/java/org/havenapp/main/ui/CameraFragment.java
index e379a02d..7d98350e 100644
--- a/src/main/java/org/havenapp/main/ui/CameraFragment.java
+++ b/src/main/java/org/havenapp/main/ui/CameraFragment.java
@@ -9,43 +9,50 @@
package org.havenapp.main.ui;
import android.os.Bundle;
-import android.graphics.Bitmap;
import android.support.v4.app.Fragment;
-import android.hardware.Camera;
import android.hardware.SensorEvent;
+import android.support.v7.preference.PreferenceFragmentCompat;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
-import android.widget.FrameLayout;
+import android.widget.TextView;
+
+import com.google.android.cameraview.CameraView;
import org.havenapp.main.PreferenceManager;
import org.havenapp.main.R;
-import org.havenapp.main.sensors.media.MotionAsyncTask;
-import org.havenapp.main.sensors.media.ImageCodec;
-import org.havenapp.main.sensors.motion.Preview;
+import org.havenapp.main.sensors.motion.CameraViewHolder;
public final class CameraFragment extends Fragment {
- private Preview preview;
+ private CameraViewHolder cameraViewHolder;
private ImageView newImage;
+ private PreferenceManager prefs;
+ private TextView txtCameraStatus;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
- return inflater.inflate(R.layout.camera_fragment, container, false);
+ View view = inflater.inflate(R.layout.camera_fragment, container, false);
+
+ newImage = view.findViewById(R.id.new_image);
+ txtCameraStatus = view.findViewById(R.id.camera_status_display);
+
+ return view;
}
public void setMotionSensitivity (int threshold)
{
- preview.setMotionSensitivity(threshold);
+ cameraViewHolder.setMotionSensitivity(threshold);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
+ prefs = new PreferenceManager(getContext());
}
@Override
@@ -56,70 +63,79 @@ public void onPause() {
@Override
public void onResume() {
super.onResume();
- if (preview == null)
+ // if (cameraViewHolder == null)
initCamera();
- else
- resetCamera();
+
+
+ cameraViewHolder.setMotionSensitivity(prefs.getCameraSensitivity());
+ }
+
+ public void updateCamera ()
+ {
+ if (cameraViewHolder != null) {
+ cameraViewHolder.updateCamera();
+ }
}
public void stopCamera ()
{
- if (preview != null) {
- preview.stopCamera();
- preview = null;
+ if (cameraViewHolder != null) {
+ cameraViewHolder.stopCamera();
}
}
+ /**
public void resetCamera ()
{
stopCamera();
- ((FrameLayout) getActivity().findViewById(R.id.preview)).removeAllViews();
initCamera();
- }
+ }**/
- private void initCamera ()
+ public void initCamera ()
{
- if (preview == null) {
- PreferenceManager prefs = new PreferenceManager(getActivity());
- if (prefs.getCameraActivation()) {
- //Uncomment to see the camera
- preview = new Preview(getActivity());
+ PreferenceManager prefs = new PreferenceManager(getActivity());
- ((FrameLayout) getActivity().findViewById(R.id.preview)).addView(preview);
+ if (prefs.getCameraActivation()) {
+ //Uncomment to see the camera
- // oldImage = (ImageView) getActivity().findViewById(R.id.old_image);
- newImage = getActivity().findViewById(R.id.new_image);
+ CameraView cameraView = getActivity().findViewById(R.id.camera_view);
- preview.addListener(new MotionAsyncTask.MotionListener() {
+ if (cameraViewHolder == null) {
+ cameraViewHolder = new CameraViewHolder(getActivity(), cameraView);
- public void onProcess(Bitmap oldBitmap, Bitmap newBitmap, Bitmap rawBitmap,
- boolean motionDetected) {
- int rotation = 0;
- boolean reflex = false;
+ cameraViewHolder.addListener((oldBitmap, newBitmap, rawBitmap, motionDetected) -> {
+ if (motionDetected)
+ newImage.setImageBitmap(newBitmap);
+ else
+ newImage.setImageResource(R.drawable.blankimage);
- if (preview == null)
- return;
-
- if (preview.getCameraFacing() == Camera.CameraInfo.CAMERA_FACING_BACK) {
- rotation = 90;
+ if (txtCameraStatus != null) {
+ if (cameraViewHolder.doingVideoProcessing()) {
+ txtCameraStatus.setText("Recording...");
} else {
- rotation = 270;
- reflex = true;
+ txtCameraStatus.setText("");
}
-
- // oldImage.setImageBitmap(ImageCodec.rotate(oldBitmap, rotation, reflex));
- newImage.setImageBitmap(ImageCodec.rotate(newBitmap, rotation, reflex));
}
+
});
}
+
}
+
+
+ cameraViewHolder.startCamera();
+
}
@Override
public void onDestroy() {
super.onDestroy();
+
+ if (cameraViewHolder != null)
+ cameraViewHolder.destroy();
+
}
public void onSensorChanged(SensorEvent event) {
diff --git a/src/main/java/org/havenapp/main/ui/EventTriggerAdapter.java b/src/main/java/org/havenapp/main/ui/EventTriggerAdapter.java
index 9118feda..32ac3b6d 100644
--- a/src/main/java/org/havenapp/main/ui/EventTriggerAdapter.java
+++ b/src/main/java/org/havenapp/main/ui/EventTriggerAdapter.java
@@ -6,7 +6,9 @@
import android.media.ThumbnailUtils;
import android.net.Uri;
import android.provider.MediaStore;
+import android.support.v4.content.FileProvider;
import android.support.v7.widget.RecyclerView;
+import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
@@ -16,11 +18,11 @@
import com.github.derlio.waveform.SimpleWaveformView;
import com.github.derlio.waveform.soundfile.SoundFile;
-import com.squareup.picasso.Picasso;
import com.stfalcon.frescoimageviewer.ImageViewer;
import org.havenapp.main.R;
import org.havenapp.main.model.EventTrigger;
+import org.w3c.dom.Text;
import java.io.File;
import java.util.ArrayList;
@@ -36,10 +38,12 @@ public class EventTriggerAdapter extends RecyclerView.Adapter eventTriggers;
- private ArrayList eventTriggerImagePaths;
+ private ArrayList eventTriggerImagePaths;
private OnItemClickListener clickListener;
+ private final static String AUTHORITY = "org.havenapp.main.fileprovider";
+
public EventTriggerAdapter(Context context, List eventTriggers) {
this.context = context;
this.eventTriggers = eventTriggers;
@@ -47,9 +51,15 @@ public EventTriggerAdapter(Context context, List eventTriggers) {
this.eventTriggerImagePaths = new ArrayList<>();
for (EventTrigger trigger : eventTriggers)
{
- if (trigger.getType() == EventTrigger.CAMERA)
+ if (trigger.getType() == EventTrigger.CAMERA
+ && (!TextUtils.isEmpty(trigger.getPath())))
{
- eventTriggerImagePaths.add("file:///" + trigger.getPath());
+ Uri fileUri = FileProvider.getUriForFile(
+ context,
+ AUTHORITY,
+ new File(trigger.getPath()));
+
+ eventTriggerImagePaths.add(fileUri);
}
}
}
@@ -75,7 +85,6 @@ public void onBindViewHolder(EventTriggerVH holder, int position) {
holder.extra.setVisibility(View.GONE);
holder.sound.setVisibility(View.GONE);
-
if (eventTrigger.getPath() != null)
{
switch (eventTrigger.getType()) {
@@ -103,37 +112,39 @@ public boolean onLongClick(View view) {
break;
case EventTrigger.CAMERA:
holder.image.setVisibility(View.VISIBLE);
- Picasso.get().load(new File(eventTrigger.getPath())).into(holder.image);
- holder.image.setOnClickListener(new View.OnClickListener() {
- @Override
- public void onClick(View view) {
- int startPosition = 0;
- for (int i = 0; i < eventTriggerImagePaths.size(); i++) {
- if (eventTriggerImagePaths.get(i).contains(eventTrigger.getPath())) {
- startPosition = i;
- break;
- }
- }
+ Uri fileUri = FileProvider.getUriForFile(
+ context,
+ AUTHORITY,
+ new File(eventTrigger.getPath()));
+ //Picasso.get().load(fileUri).into(holder.image);
+ holder.image.setImageURI(fileUri);
+
+ holder.image.setOnClickListener(view -> {
+ int startPosition = 0;
- ShareOverlayView overlayView = new ShareOverlayView(context);
- ImageViewer viewer = new ImageViewer.Builder(context, eventTriggerImagePaths)
- .setStartPosition(startPosition)
- .setOverlayView(overlayView)
- .show();
- overlayView.setImageViewer(viewer);
+ /**
+ for (int i = 0; i < eventTriggerImagePaths.size(); i++) {
+ if (eventTriggerImagePaths.get(i).contains(eventTrigger.getPath())) {
+ startPosition = i;
+ break;
+ }
+ }**/
+
+ ShareOverlayView overlayView = new ShareOverlayView(context);
+ ImageViewer viewer = new ImageViewer.Builder(context, eventTriggerImagePaths)
+ .setStartPosition(startPosition)
+ .setOverlayView(overlayView)
+ .show();
+ overlayView.setImageViewer(viewer);
- }
});
- holder.image.setOnLongClickListener(new View.OnLongClickListener() {
- @Override
- public boolean onLongClick(View view) {
- shareMedia(eventTrigger);
- return false;
- }
+ holder.image.setOnLongClickListener(view -> {
+ shareMedia(eventTrigger);
+ return false;
});
break;
case EventTrigger.MICROPHONE:
diff --git a/src/main/res/drawable/blankimage.png b/src/main/res/drawable/blankimage.png
new file mode 100644
index 00000000..177e7b67
Binary files /dev/null and b/src/main/res/drawable/blankimage.png differ
diff --git a/src/main/res/drawable/fill_drawable_accent.xml b/src/main/res/drawable/fill_drawable_accent.xml
new file mode 100644
index 00000000..415ac387
--- /dev/null
+++ b/src/main/res/drawable/fill_drawable_accent.xml
@@ -0,0 +1,18 @@
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/main/res/layout/activity_monitor.xml b/src/main/res/layout/activity_monitor.xml
index d4de9697..a02dd54c 100644
--- a/src/main/res/layout/activity_monitor.xml
+++ b/src/main/res/layout/activity_monitor.xml
@@ -50,10 +50,10 @@
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_margin="10dp"
- android:background="@drawable/round_drawable_accent"
+ android:background="@drawable/fill_drawable_accent"
android:padding="6dp"
android:text="@string/start_now"
- android:textColor="@color/colorAccent"
+ android:textColor="@color/colorPrimaryDark"
android:textSize="25sp"
android:textStyle="bold" />
@@ -72,6 +72,7 @@
android:layout_height="match_parent"
android:gravity="center_horizontal|bottom"
android:orientation="horizontal"
+ android:id="@+id/buttonBar"
android:padding="10dp">
-
+
+ android:layout_height="match_parent"
+ android:layout_gravity="center_horizontal"
+ android:background="@color/transparent"
+ android:scaleType="fitXY"
+ />
+
+
diff --git a/src/main/res/layout/item_event.xml b/src/main/res/layout/item_event.xml
index b1b74385..86b467fd 100644
--- a/src/main/res/layout/item_event.xml
+++ b/src/main/res/layout/item_event.xml
@@ -22,7 +22,7 @@
android:layout_height="wrap_content"
tools:text="Title" />
-
+
+
+