android + ui: update target sdk, add camera support, camera control from native code, camera view, qr scan modal

This commit is contained in:
ardocrat 2024-05-03 19:51:57 +03:00
parent 6a24c90de9
commit ef5fd29612
18 changed files with 679 additions and 84 deletions

49
Cargo.lock generated
View file

@ -3202,6 +3202,34 @@ dependencies = [
"slab",
]
[[package]]
name = "g2gen"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2c7625b2fc250dd90b63f7887a6bb0f7ec1d714c8278415bea2669ef20820e"
dependencies = [
"g2poly",
"proc-macro2 1.0.81",
"quote 1.0.36",
"syn 1.0.109",
]
[[package]]
name = "g2p"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc36d9bdc3d2da057775a9f4fa7d7b09edab3e0eda7a92cc353358fa63b8519e"
dependencies = [
"g2gen",
"g2poly",
]
[[package]]
name = "g2poly"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af6a86e750338603ea2c14b1c0bfe58cd61f87ca67a0021d9334996024608e12"
[[package]]
name = "gcc"
version = "0.3.55"
@ -3510,6 +3538,7 @@ dependencies = [
"log",
"openssl-sys",
"rand 0.8.5",
"rqrr",
"rust-i18n",
"serde",
"serde_derive",
@ -5114,6 +5143,15 @@ dependencies = [
"imgref",
]
[[package]]
name = "lru"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc"
dependencies = [
"hashbrown 0.14.3",
]
[[package]]
name = "lru-cache"
version = "0.1.2"
@ -7041,6 +7079,17 @@ dependencies = [
"libc",
]
[[package]]
name = "rqrr"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad0cd0432e6beb2f86aa4c8af1bb5edcf3c9bcb9d4836facc048664205458575"
dependencies = [
"g2p",
"image 0.25.1",
"lru",
]
[[package]]
name = "rsa"
version = "0.9.6"

View file

@ -53,6 +53,8 @@ rand = "0.8.5"
serde_derive = "1.0.197"
serde_json = "1.0.115"
tokio = { version = "1.37.0", features = ["full"] }
image = { version = "0.25.1" }
rqrr = "0.7.1"
## tor
arti = { version = "1.2.0", features = ["pt-client", "static"] }
@ -84,7 +86,6 @@ env_logger = "0.10.0"
winit = { version = "0.29.15" }
eframe = { version = "0.27.2", features = [ "wgpu" ] }
arboard = "3.2.0"
image = "0.25.1"
[target.'cfg(target_os = "android")'.dependencies]
android_logger = "0.13.1"

View file

@ -7,13 +7,13 @@ def keystoreProperties = new Properties()
keystoreProperties.load(new FileInputStream(keystorePropertiesFile))
android {
compileSdk 33
compileSdk 34
ndkVersion '26.0.10792818'
defaultConfig {
applicationId "mw.gri.android"
minSdk 24
targetSdk 33
targetSdk 34
versionCode 1
versionName "0.1.0"
}
@ -48,19 +48,12 @@ android {
dependencies {
implementation 'androidx.appcompat:appcompat:1.6.1'
// To use the Android Frame Pacing library
//implementation "androidx.games:games-frame-pacing:1.9.1"
// To use the Android Performance Tuner
//implementation "androidx.games:games-performance-tuner:1.5.0"
// To use the Games Activity library
implementation "androidx.games:games-activity:2.0.2"
// To use the Games Controller Library
//implementation "androidx.games:games-controller:1.1.0"
// To use the Games Text Input Library
//implementation "androidx.games:games-text-input:1.1.0"
// Android Camera
implementation 'androidx.camera:camera-core:1.2.3'
implementation 'androidx.camera:camera-camera2:1.2.3'
implementation 'androidx.camera:camera-lifecycle:1.2.3'
}

View file

@ -2,10 +2,12 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-permission android:name="android.permission.POST_NOTIFICATIONS"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.CAMERA"/>
<application
android:hardwareAccelerated="true"
@ -20,7 +22,7 @@
<activity
android:launchMode="singleTask"
android:name=".MainActivity"
android:configChanges="orientation|screenSize|screenLayout|keyboardHidden|uiMode|keyboard"
android:configChanges="orientation|screenSize"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />

View file

@ -3,21 +3,29 @@ package mw.gri.android;
import android.Manifest;
import android.content.*;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.os.Build;
import android.os.Bundle;
import android.os.Process;
import android.system.ErrnoException;
import android.system.Os;
import android.util.Size;
import android.view.KeyEvent;
import android.view.View;
import android.view.inputmethod.InputMethodManager;
import androidx.annotation.NonNull;
import androidx.camera.core.*;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.core.content.ContextCompat;
import androidx.core.graphics.Insets;
import androidx.core.view.DisplayCutoutCompat;
import androidx.core.view.ViewCompat;
import androidx.core.view.WindowInsetsCompat;
import com.google.androidgamesdk.GameActivity;
import org.jetbrains.annotations.NotNull;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static android.content.ClipDescription.MIMETYPE_TEXT_HTML;
import static android.content.ClipDescription.MIMETYPE_TEXT_PLAIN;
@ -26,6 +34,7 @@ public class MainActivity extends GameActivity {
public static String STOP_APP_ACTION = "STOP_APP";
private static final int NOTIFICATIONS_PERMISSION_CODE = 1;
private static final int CAMERA_PERMISSION_CODE = 2;
static {
System.loadLibrary("grim");
@ -41,9 +50,19 @@ public class MainActivity extends GameActivity {
}
};
private final ImageAnalysis mImageAnalysis = new ImageAnalysis.Builder()
.setTargetResolution(new Size(640, 480))
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build();
private ListenableFuture<ProcessCameraProvider> mCameraProviderFuture = null;
private ProcessCameraProvider mCameraProvider = null;
private ExecutorService mCameraExecutor = null;
private boolean mUseBackCamera = true;
@Override
protected void onCreate(Bundle savedInstanceState) {
// Setup HOME environment variable for native code configurations.
// Setup environment variables for native code.
try {
Os.setenv("HOME", getExternalFilesDir("").getPath(), true);
Os.setenv("XDG_CACHE_HOME", getExternalCacheDir().getPath(), true);
@ -87,7 +106,7 @@ public class MainActivity extends GameActivity {
});
findViewById(android.R.id.content).post(() -> {
// Request notifications permissions.
// Request notifications permissions if needed.
if (Build.VERSION.SDK_INT >= 33) {
String notificationsPermission = Manifest.permission.POST_NOTIFICATIONS;
if (checkSelfPermission(notificationsPermission) != PackageManager.PERMISSION_GRANTED) {
@ -104,12 +123,30 @@ public class MainActivity extends GameActivity {
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull @NotNull String[] permissions, @NonNull @NotNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == NOTIFICATIONS_PERMISSION_CODE && grantResults.length != 0 &&
grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// Start notification service.
BackgroundService.start(this);
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// Called on screen orientation change to restart camera.
if (mCameraProvider != null) {
stopCamera();
startCamera();
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] results) {
super.onRequestPermissionsResult(requestCode, permissions, results);
if (results.length != 0 && results[0] == PackageManager.PERMISSION_GRANTED) {
switch (requestCode) {
case NOTIFICATIONS_PERMISSION_CODE: {
// Start notification service.
BackgroundService.start(this);
return;
}
case CAMERA_PERMISSION_CODE: {
// Start camera.
startCamera();
}
}
}
}
@ -121,7 +158,7 @@ public class MainActivity extends GameActivity {
onInput(event.getCharacters());
return false;
}
// Pass any other input values into native code.
// Pass any other input values into native code.
} else if (event.getAction() == KeyEvent.ACTION_UP &&
event.getKeyCode() != KeyEvent.KEYCODE_ENTER &&
event.getKeyCode() != KeyEvent.KEYCODE_BACK) {
@ -201,13 +238,98 @@ public class MainActivity extends GameActivity {
return text;
}
// Called from native code to show keyboard.
public void showKeyboard() {
InputMethodManager imm = (InputMethodManager )getSystemService(Context.INPUT_METHOD_SERVICE);
InputMethodManager imm = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE);
imm.showSoftInput(getWindow().getDecorView(), InputMethodManager.SHOW_IMPLICIT);
}
// Called from native code to hide keyboard.
public void hideKeyboard() {
InputMethodManager imm = (InputMethodManager )getSystemService(Context.INPUT_METHOD_SERVICE);
InputMethodManager imm = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(getWindow().getDecorView().getWindowToken(), 0);
}
// Called from native code to start camera.
public void startCamera() {
// Check permissions.
String notificationsPermission = Manifest.permission.CAMERA;
if (checkSelfPermission(notificationsPermission) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[] { notificationsPermission }, CAMERA_PERMISSION_CODE);
} else {
// Start .
if (mCameraProviderFuture == null) {
mCameraProviderFuture = ProcessCameraProvider.getInstance(this);
mCameraProviderFuture.addListener(() -> {
try {
mCameraProvider = mCameraProviderFuture.get();
// Launch camera.
openCamera();
} catch (Exception e) {
View content = findViewById(android.R.id.content);
if (content != null) {
content.post(this::stopCamera);
}
}
}, ContextCompat.getMainExecutor(this));
} else {
View content = findViewById(android.R.id.content);
if (content != null) {
content.post(this::openCamera);
}
}
}
}
// Open camera after initialization or start after stop.
private void openCamera() {
// Set up the image analysis use case which will process frames in real time.
if (mCameraExecutor == null) {
mCameraExecutor = Executors.newSingleThreadExecutor();
mImageAnalysis.setAnalyzer(mCameraExecutor, image -> {
// Convert image to JPEG.
byte[] data = Utils.convertCameraImage(image);
// Send image to native code.
onCameraImage(data, image.getImageInfo().getRotationDegrees());
image.close();
});
}
// Select back camera initially.
CameraSelector cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA;
if (!mUseBackCamera) {
cameraSelector = CameraSelector.DEFAULT_FRONT_CAMERA;
}
// Apply declared configs to CameraX using the same lifecycle owner
mCameraProvider.unbindAll();
mCameraProvider.bindToLifecycle(this, cameraSelector, mImageAnalysis);
}
// Called from native code to stop camera.
public void stopCamera() {
View content = findViewById(android.R.id.content);
if (content != null) {
content.post(() -> {
mCameraProvider.unbindAll();
});
}
}
// Called from native code to get number of cameras.
public int camerasAmount() {
if (mCameraProvider == null) {
return 0;
}
return mCameraProvider.getAvailableCameraInfos().size();
}
// Called from native code to switch camera.
public void switchCamera() {
mUseBackCamera = true;
stopCamera();
startCamera();
}
// Pass image from camera into native code.
public native void onCameraImage(byte[] buff, int rotation);
}

View file

@ -1,10 +1,138 @@
package mw.gri.android;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.media.Image;
import androidx.camera.core.ImageProxy;
import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
public class Utils {
// Convert Pixels to DensityPixels
public static int pxToDp(int px, Context context) {
return (int) (px / context.getResources().getDisplayMetrics().density);
}
/** Converts a YUV_420_888 image from CameraX API to a bitmap. */
public static byte[] convertCameraImage(ImageProxy image) {
// Convert image to nv21 and get buffer.
ByteBuffer nv21Buffer =
yuv420ThreePlanesToNV21(image.getPlanes(), image.getWidth(), image.getHeight());
nv21Buffer.rewind();
byte[] nv21 = new byte[nv21Buffer.limit()];
nv21Buffer.get(nv21);
// Convert to JPEG.
YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, image.getWidth(), image.getHeight(), null);
ByteArrayOutputStream out = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 100, out);
return out.toByteArray();
}
/**
* Converts YUV_420_888 to NV21 bytebuffer.
*
* <p>The NV21 format consists of a single byte array containing the Y, U and V values. For an
* image of size S, the first S positions of the array contain all the Y values. The remaining
* positions contain interleaved V and U values. U and V are subsampled by a factor of 2 in both
* dimensions, so there are S/4 U values and S/4 V values. In summary, the NV21 array will contain
* S Y values followed by S/4 VU values: YYYYYYYYYYYYYY(...)YVUVUVUVU(...)VU
*
* <p>YUV_420_888 is a generic format that can describe any YUV image where U and V are subsampled
* by a factor of 2 in both dimensions. {@link Image#getPlanes} returns an array with the Y, U and
* V planes. The Y plane is guaranteed not to be interleaved, so we can just copy its values into
* the first part of the NV21 array. The U and V planes may already have the representation in the
* NV21 format. This happens if the planes share the same buffer, the V buffer is one position
* before the U buffer and the planes have a pixelStride of 2. If this is case, we can just copy
* them to the NV21 array.
*/
private static ByteBuffer yuv420ThreePlanesToNV21(ImageProxy.PlaneProxy[] yuv420888planes, int width, int height) {
int imageSize = width * height;
byte[] out = new byte[imageSize + 2 * (imageSize / 4)];
if (areUVPlanesNV21(yuv420888planes, width, height)) {
// Copy the Y values.
yuv420888planes[0].getBuffer().get(out, 0, imageSize);
ByteBuffer uBuffer = yuv420888planes[1].getBuffer();
ByteBuffer vBuffer = yuv420888planes[2].getBuffer();
// Get the first V value from the V buffer, since the U buffer does not contain it.
vBuffer.get(out, imageSize, 1);
// Copy the first U value and the remaining VU values from the U buffer.
uBuffer.get(out, imageSize + 1, 2 * imageSize / 4 - 1);
} else {
// Fallback to copying the UV values one by one, which is slower but also works.
// Unpack Y.
unpackPlane(yuv420888planes[0], width, height, out, 0, 1);
// Unpack U.
unpackPlane(yuv420888planes[1], width, height, out, imageSize + 1, 2);
// Unpack V.
unpackPlane(yuv420888planes[2], width, height, out, imageSize, 2);
}
return ByteBuffer.wrap(out);
}
/** Checks if the UV plane buffers of a YUV_420_888 image are in the NV21 format. */
private static boolean areUVPlanesNV21(ImageProxy.PlaneProxy[] planes, int width, int height) {
int imageSize = width * height;
ByteBuffer uBuffer = planes[1].getBuffer();
ByteBuffer vBuffer = planes[2].getBuffer();
// Backup buffer properties.
int vBufferPosition = vBuffer.position();
int uBufferLimit = uBuffer.limit();
// Advance the V buffer by 1 byte, since the U buffer will not contain the first V value.
vBuffer.position(vBufferPosition + 1);
// Chop off the last byte of the U buffer, since the V buffer will not contain the last U value.
uBuffer.limit(uBufferLimit - 1);
// Check that the buffers are equal and have the expected number of elements.
boolean areNV21 =
(vBuffer.remaining() == (2 * imageSize / 4 - 2)) && (vBuffer.compareTo(uBuffer) == 0);
// Restore buffers to their initial state.
vBuffer.position(vBufferPosition);
uBuffer.limit(uBufferLimit);
return areNV21;
}
/**
* Unpack an image plane into a byte array.
*
* <p>The input plane data will be copied in 'out', starting at 'offset' and every pixel will be
* spaced by 'pixelStride'. Note that there is no row padding on the output.
*/
private static void unpackPlane(
ImageProxy.PlaneProxy plane, int width, int height, byte[] out, int offset, int pixelStride) {
ByteBuffer buffer = plane.getBuffer();
buffer.rewind();
// Compute the size of the current plane.
// We assume that it has the aspect ratio as the original image.
int numRow = (buffer.limit() + plane.getRowStride() - 1) / plane.getRowStride();
if (numRow == 0) {
return;
}
int scaleFactor = height / numRow;
int numCol = width / scaleFactor;
// Extract the data in the output buffer.
int outputPos = offset;
int rowStart = 0;
for (int row = 0; row < numRow; row++) {
int inputPos = rowStart;
for (int col = 0; col < numCol; col++) {
out[outputPos] = buffer.get(inputPos);
outputPos += pixelStride;
inputPos += plane.getPixelStride();
}
rowStart += plane.getRowStride();
}
}
}

View file

@ -1,7 +1,7 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
plugins {
id 'com.android.application' version '8.1.0' apply false
id 'com.android.library' version '8.1.0' apply false
id 'com.android.application' version '8.1.1' apply false
id 'com.android.library' version '8.1.1' apply false
}
task clean(type: Delete) {

View file

@ -17,6 +17,7 @@ settings: Settings
language: Language
scan: Scan
qr_code: QR code
scan_qr: Scan QR code
repeat: Repeat
wallets:
await_conf_amount: Awaiting confirmation

View file

@ -17,6 +17,7 @@ settings: Настройки
language: Язык
scan: Сканировать
qr_code: QR-код
scan_qr: Сканировать QR-код
repeat: Повторить
wallets:
await_conf_amount: Ожидает подтверждения

View file

@ -12,7 +12,13 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use lazy_static::lazy_static;
use std::sync::{Arc, RwLock};
use jni::JNIEnv;
use jni::objects::{JByteArray, JObject, JString, JValue};
use winit::platform::android::activity::AndroidApp;
use crate::gui::platform::PlatformCallbacks;
#[derive(Clone)]
@ -26,6 +32,19 @@ impl Android {
android_app: app,
}
}
fn call_java_method(&self, name: &str, sig: &str, args: &[JValue]) -> Option<jni::sys::jvalue> {
use jni::objects::{JObject};
let vm = unsafe { jni::JavaVM::from_raw(self.android_app.vm_as_ptr() as _) }.unwrap();
let mut env = vm.attach_current_thread().unwrap();
let activity = unsafe {
JObject::from_raw(self.android_app.activity_as_ptr() as jni::sys::jobject)
};
if let Ok(result) = env.call_method(activity, name, sig, args) {
return Some(result.as_jni().clone());
}
None
}
}
impl PlatformCallbacks for Android {
@ -33,75 +52,89 @@ impl PlatformCallbacks for Android {
// Disable NDK soft input show call before fix for egui.
// self.android_app.show_soft_input(false);
use jni::objects::{JObject};
let vm = unsafe { jni::JavaVM::from_raw(self.android_app.vm_as_ptr() as _) }.unwrap();
let mut env = vm.attach_current_thread().unwrap();
let activity = unsafe {
JObject::from_raw(self.android_app.activity_as_ptr() as jni::sys::jobject)
};
let _ = env.call_method(
activity,
"showKeyboard",
"()V",
&[]
).unwrap();
self.call_java_method("showKeyboard", "()V", &[]).unwrap();
}
fn hide_keyboard(&self) {
// Disable NDK soft input hide call before fix for egui.
// self.android_app.hide_soft_input(false);
use jni::objects::{JObject};
let vm = unsafe { jni::JavaVM::from_raw(self.android_app.vm_as_ptr() as _) }.unwrap();
let mut env = vm.attach_current_thread().unwrap();
let activity = unsafe {
JObject::from_raw(self.android_app.activity_as_ptr() as jni::sys::jobject)
};
let _ = env.call_method(
activity,
"hideKeyboard",
"()V",
&[]
).unwrap();
self.call_java_method("hideKeyboard", "()V", &[]).unwrap();
}
fn copy_string_to_buffer(&self, data: String) {
use jni::objects::{JObject, JValue};
let vm = unsafe { jni::JavaVM::from_raw(self.android_app.vm_as_ptr() as _) }.unwrap();
let mut env = vm.attach_current_thread().unwrap();
let activity = unsafe {
JObject::from_raw(self.android_app.activity_as_ptr() as jni::sys::jobject)
};
let arg_value = env.new_string(data).unwrap();
let _ = env.call_method(
activity,
"copyText",
"(Ljava/lang/String;)V",
&[JValue::Object(&JObject::from(arg_value))]
).unwrap();
self.call_java_method("copyText",
"(Ljava/lang/String;)V",
&[JValue::Object(&JObject::from(arg_value))]).unwrap();
}
fn get_string_from_buffer(&self) -> String {
use jni::objects::{JObject, JString};
let result = self.call_java_method("pasteText", "()Ljava/lang/String;", &[]).unwrap();
let vm = unsafe { jni::JavaVM::from_raw(self.android_app.vm_as_ptr() as _) }.unwrap();
let mut env = vm.attach_current_thread().unwrap();
let activity = unsafe {
JObject::from_raw(self.android_app.activity_as_ptr() as jni::sys::jobject)
};
let result = env.call_method(
activity,
"pasteText",
"()Ljava/lang/String;",
&[]
).unwrap();
let j_object: jni::sys::jobject = unsafe { result.as_jni().l };
let j_object: jni::sys::jobject = unsafe { result.l };
let paste_data: String = unsafe {
env.get_string(JString::from(JObject::from_raw(j_object)).as_ref()).unwrap().into()
};
paste_data
}
fn cameras_amount(&self) -> u32 {
let result = self.call_java_method("camerasAmount", "()I", &[]).unwrap();
let amount = unsafe { result.i };
amount as u32
}
fn switch_camera(&self) {
self.call_java_method("switchCamera", "()V", &[]).unwrap();
}
fn start_camera(&self) {
// Clear image.
let mut w_image = LAST_CAMERA_IMAGE.write().unwrap();
*w_image = None;
// Start camera.
self.call_java_method("startCamera", "()V", &[]).unwrap();
}
fn stop_camera(&self) {
// Stop camera.
self.call_java_method("stopCamera", "()V", &[]).unwrap();
// Clear image.
let mut w_image = LAST_CAMERA_IMAGE.write().unwrap();
*w_image = None;
}
fn camera_image(&self) -> Option<(Vec<u8>, u32)> {
let r_image = LAST_CAMERA_IMAGE.read().unwrap();
if r_image.is_some() {
return Some(r_image.clone().unwrap());
}
None
}
}
lazy_static! {
static ref LAST_CAMERA_IMAGE: Arc<RwLock<Option<(Vec<u8>, u32)>>> = Arc::new(RwLock::new(None));
}
#[allow(dead_code)]
#[cfg(target_os = "android")]
#[allow(non_snake_case)]
#[no_mangle]
/// Callback from Java code with last entered character from soft keyboard.
pub extern "C" fn Java_mw_gri_android_MainActivity_onCameraImage(
env: JNIEnv,
_class: JObject,
buff: jni::sys::jbyteArray,
rotation: jni::sys::jint,
) {
let arr = unsafe { JByteArray::from_raw(buff) };
let image : Vec<u8> = env.convert_byte_array(arr).unwrap();
if let Ok(mut w_image) = LAST_CAMERA_IMAGE.write() {
*w_image = Some((image, rotation as u32));
}
}

View file

@ -31,4 +31,21 @@ impl PlatformCallbacks for Desktop {
let mut clipboard = arboard::Clipboard::new().unwrap();
clipboard.get_text().unwrap_or("".to_string())
}
fn camera_image(&self) -> Option<(Vec<u8>, u32)> {
Some((vec![].into(), 0))
}
fn cameras_amount(&self) -> u32 {
0
}
fn start_camera(&self) {
}
fn stop_camera(&self) {
}
fn switch_camera(&self) {
}
}

View file

@ -26,4 +26,9 @@ pub trait PlatformCallbacks {
fn hide_keyboard(&self);
fn copy_string_to_buffer(&self, data: String);
fn get_string_from_buffer(&self) -> String;
fn cameras_amount(&self) -> u32;
fn switch_camera(&self);
fn start_camera(&self);
fn stop_camera(&self);
fn camera_image(&self) -> Option<(Vec<u8>, u32)>;
}

168
src/gui/views/camera.rs Normal file
View file

@ -0,0 +1,168 @@
// Copyright 2024 The Grim Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::{Arc, RwLock};
use egui::load::SizedTexture;
use egui::{Pos2, Rect, TextureOptions, Widget};
use image::{DynamicImage, EncodableLayout, ImageFormat};
use crate::gui::platform::PlatformCallbacks;
use crate::gui::views::types::QrScanState;
use crate::gui::views::View;
/// Camera scanner content.
pub struct CameraContent {
// QR code scanning progress and result.
qr_scan_state: Arc<RwLock<QrScanState>>
}
impl Default for CameraContent {
fn default() -> Self {
Self {
qr_scan_state: Arc::new(RwLock::new(QrScanState::default())),
}
}
}
impl CameraContent {
pub fn ui(&mut self, ui: &mut egui::Ui, cb: &dyn PlatformCallbacks) {
// Draw last image from camera or loader.
if let Some(img_data) = cb.camera_image() {
// Load image to draw.
if let Ok(mut img) =
image::load_from_memory_with_format(&*img_data.0, ImageFormat::Jpeg) {
// Process image to find QR code.
self.scan_qr(&img);
// Setup image rotation.
img = match img_data.1 {
90 => img.rotate90(),
180 => img.rotate180(),
279 => img.rotate270(),
_ => img
};
// Convert to ColorImage to add at content.
let color_image = match &img {
DynamicImage::ImageRgb8(image) => {
egui::ColorImage::from_rgb(
[image.width() as usize, image.height() as usize],
image.as_bytes(),
)
},
other => {
let image = other.to_rgba8();
egui::ColorImage::from_rgba_unmultiplied(
[image.width() as usize, image.height() as usize],
image.as_bytes(),
)
},
};
// Create image texture.
let texture = ui.ctx().load_texture("camera_image",
color_image.clone(),
TextureOptions::default());
let image_size = egui::emath::vec2(color_image.width() as f32,
color_image.height() as f32);
let sized_image = SizedTexture::new(texture.id(), image_size);
// Add image to content.
ui.vertical_centered(|ui| {
egui::Image::from_texture(sized_image)
// Setup to make image cropped at center of square.
.uv(Rect::from([Pos2::new(0.125, 0.0), Pos2::new(1.125, 1.0)]))
.max_height(ui.available_width())
.maintain_aspect_ratio(false)
.shrink_to_fit()
.ui(ui);
});
} else {
self.loading_content_ui(ui);
}
} else {
self.loading_content_ui(ui);
}
// Request redraw.
ui.ctx().request_repaint();
}
/// Draw camera loading progress content.
fn loading_content_ui(&self, ui: &mut egui::Ui) {
let space = (ui.available_width() - View::BIG_SPINNER_SIZE) / 2.0;
ui.vertical_centered(|ui| {
ui.add_space(space);
View::big_loading_spinner(ui);
ui.add_space(space);
});
}
/// Check if image is processing to find QR code.
fn image_processing(&self) -> bool {
let mut r_scan = self.qr_scan_state.read().unwrap();
r_scan.image_processing
}
/// Parse QR code from provided image data.
fn scan_qr(&self, data: &DynamicImage) {
// Do not scan when another image is processing.
if self.image_processing() {
return;
}
// Setup scanning flag.
{
let mut w_scan = self.qr_scan_state.write().unwrap();
w_scan.image_processing = true;
}
// Launch scanner at separate thread.
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap()
.block_on(async {
// Prepare image data.
let img = data.to_luma8();
let mut img: rqrr::PreparedImage<image::GrayImage>
= rqrr::PreparedImage::prepare(img);
// Scan and save results.
let grids = img.detect_grids();
for g in grids {
if let Ok((meta, text)) = g.decode() {
println!("12345 ecc: {}, text: {}", meta.ecc_level, text.clone());
if !text.is_empty() {
let mut w_scan = self.qr_scan_state.write().unwrap();
w_scan.qr_scan_result = Some(text);
}
}
}
// Setup scanning flag.
{
let mut w_scan = self.qr_scan_state.write().unwrap();
w_scan.image_processing = false;
}
});
}
/// Get QR code scan result.
pub fn qr_scan_result(&self) -> Option<String> {
let r_scan = self.qr_scan_state.read().unwrap();
if r_scan.qr_scan_result.is_some() {
return Some(r_scan.qr_scan_result.as_ref().unwrap().clone());
}
None
}
/// Reset camera content state to default.
pub fn clear_state(&mut self) {
let mut w_scan = self.qr_scan_state.write().unwrap();
*w_scan = QrScanState::default();
}
}

View file

@ -30,4 +30,7 @@ mod network;
pub use network::*;
mod wallets;
pub use wallets::*;
pub use wallets::*;
mod camera;
pub use camera::*;

View file

@ -136,4 +136,21 @@ impl TextEditOptions {
self.scan_qr = true;
self
}
}
/// QR code scanning state.
pub struct QrScanState {
// Flag to check if image is processing to find QR code.
pub(crate) image_processing: bool,
// Found QR code content.
pub qr_scan_result: Option<String>
}
impl Default for QrScanState {
fn default() -> Self {
Self {
image_processing: false,
qr_scan_result: None,
}
}
}

View file

@ -521,9 +521,12 @@ impl View {
});
}
/// Size of big loading spinner.
pub const BIG_SPINNER_SIZE: f32 = 104.0;
/// Draw big gold loading spinner.
pub fn big_loading_spinner(ui: &mut egui::Ui) {
Spinner::new().size(104.0).color(Colors::GOLD).ui(ui);
Spinner::new().size(Self::BIG_SPINNER_SIZE).color(Colors::GOLD).ui(ui);
}
/// Draw small gold loading spinner.

View file

@ -21,7 +21,7 @@ use crate::AppConfig;
use crate::gui::Colors;
use crate::gui::icons::{BRIDGE, CHAT_CIRCLE_TEXT, CHECK, CHECK_FAT, FOLDER_USER, GEAR_FINE, GRAPH, PACKAGE, PATH, POWER, REPEAT, SCAN, USERS_THREE};
use crate::gui::platform::PlatformCallbacks;
use crate::gui::views::{Modal, Root, View};
use crate::gui::views::{CameraContent, Modal, Root, View};
use crate::gui::views::types::{ModalPosition, TextEditOptions};
use crate::gui::views::wallets::{WalletTransactions, WalletMessages, WalletTransport, WalletSettings};
use crate::gui::views::wallets::types::{GRIN, WalletTab, WalletTabType};
@ -41,8 +41,13 @@ pub struct WalletContent {
/// Flag to check if error occurred during account creation at [`Modal`].
account_creation_error: bool,
/// Camera content for QR scan [`Modal`].
camera_content: CameraContent,
/// QR code scan result
qr_scan_result: Option<String>,
/// Current tab content to show.
pub current_tab: Box<dyn WalletTab>,
pub current_tab: Box<dyn WalletTab>
}
impl Default for WalletContent {
@ -52,6 +57,8 @@ impl Default for WalletContent {
account_creating: false,
account_label_edit: "".to_string(),
account_creation_error: false,
camera_content: CameraContent::default(),
qr_scan_result: None,
current_tab: Box::new(WalletTransactions::default())
}
}
@ -60,6 +67,9 @@ impl Default for WalletContent {
/// Identifier for account list [`Modal`].
const ACCOUNT_LIST_MODAL: &'static str = "account_list_modal";
/// Identifier for QR code scan [`Modal`].
const QR_CODE_SCAN_MODAL: &'static str = "qr_code_scan_modal";
impl WalletContent {
pub fn ui(&mut self,
ui: &mut egui::Ui,
@ -156,6 +166,11 @@ impl WalletContent {
self.account_list_modal_ui(ui, wallet, modal, cb);
});
}
QR_CODE_SCAN_MODAL => {
Modal::ui(ui.ctx(), |ui, modal| {
self.scan_qr_modal_ui(ui, wallet, modal, cb);
});
}
_ => {}
}
}
@ -177,7 +192,15 @@ impl WalletContent {
ui.allocate_ui_with_layout(rect.size(), Layout::right_to_left(Align::Center), |ui| {
// Draw button to scan QR code.
View::item_button(ui, View::item_rounding(0, 2, true), SCAN, None, || {
//TODO: Scan with QR code.
// Load accounts.
self.qr_scan_result = None;
// Show account list modal.
Modal::new(QR_CODE_SCAN_MODAL)
.position(ModalPosition::CenterTop)
.title(t!("scan_qr"))
.closeable(false)
.show();
cb.start_camera();
});
// Draw button to show list of accounts.
@ -336,6 +359,36 @@ impl WalletContent {
}
}
/// Draw QR scanner [`Modal`] content.
fn scan_qr_modal_ui(&mut self,
ui: &mut egui::Ui,
wallet: &mut Wallet,
modal: &Modal,
cb: &dyn PlatformCallbacks) {
ui.add_space(6.0);
if let Some(result) = &self.qr_scan_result {
ui.vertical_centered(|ui| {
ui.label(RichText::new(result).size(16.0).color(Colors::INACTIVE_TEXT));
});
} else if let Some(result) = self.camera_content.qr_scan_result() {
///TODO: parse result and show
cb.stop_camera();
self.camera_content.clear_state();
println!("result: {}", result);
self.qr_scan_result = Some(result);
} else {
self.camera_content.ui(ui, cb);
}
ui.add_space(6.0);
ui.vertical_centered_justified(|ui| {
View::button(ui, t!("modal.cancel"), Colors::WHITE, || {
cb.stop_camera();
modal.close();
});
});
ui.add_space(6.0);
}
/// Draw tab buttons in the bottom of the screen.
fn tabs_ui(&mut self, ui: &mut egui::Ui, wallet: &Wallet) {
ui.scope(|ui| {

View file

@ -400,7 +400,6 @@ impl WalletTransport {
r_success.clone()
}
/// Draw amount input [`Modal`] content to send over Tor.
/// Draw amount input [`Modal`] content to send over Tor.
fn send_tor_modal_ui(&mut self,
ui: &mut egui::Ui,