5

I'm new to Xamarin and Maui, I've been working on an app and now I need a way to access the camera feed.

I want to be able to preview the camera, for android first, within the app and capture an image when a buton is pressed. I have it working with the native camera app (media selector) but this isn't what would fulfil my requirements.

I've been trying to use handlers with Android camera X to get a preview view on the layout but I just can't crack it, there's hardly any direction on building one from the ground up. Does anyone have any experience with this that might be able to point me in the right direction?

1
  • 1
    Please provide enough code so others can better understand or reproduce the problem. Commented Jul 8, 2022 at 13:37

5 Answers 5

3

Here is the iOS implementation:

#nullable enable using System.Diagnostics; using System.Runtime.InteropServices; using AVFoundation; using CoreGraphics; using CoreVideo; using Foundation; using Microsoft.Maui.Controls; using UIKit; using VideoDemos.Controls; using WoundMatrixSmall.DependencyServices.MultiTargeting; using WoundMatrixSmall.Utilities; using WoundMatrixSmall.ViewModels; using WoundMatrixSmall.Views; namespace VideoDemos.Platforms.MaciOS { public class CameraPersonViewer : UIView, IAVCapturePhotoCaptureDelegate { private AVCaptureSession captureSession; private AVCaptureDeviceInput captureDeviceInput; private AVCapturePhotoOutput capturePhotoOutput; private UITextView textViewDelayCountdown; private UIView liveCameraStream; private UIButton takePhotoButton, cancelCameraButton, takeDelayedPhotoButton, toggleCameraButton, toggleFlashButton; private UIImageView autoFocusImage; private UIImageView orientationImage; private CameraPerson currentCameraPerson; int timeSpan = 0; private bool flashOn = false; double rectangleHeight = 0; double rectangleWidth = 0; double rectangleMargin = 0; public CameraPersonViewer(CameraPerson cameraPerson) { currentCameraPerson = cameraPerson; try { SetupUserInterface(); SetupEventHandlers(); SetupLiveCameraStream(); AuthorizeCameraUse(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(@" ERROR: ", ex.Message); } } public void SetupUserInterface() { try { NFloat centerButtonX = UIScreen.MainScreen.Bounds.GetMidX(); NFloat centerButtonY = UIScreen.MainScreen.Bounds.GetMidY(); NFloat topLeftX = UIScreen.MainScreen.Bounds.X + 10; NFloat topRightX = UIScreen.MainScreen.Bounds.Right - 10; NFloat bottomButtonY = UIScreen.MainScreen.Bounds.Bottom - 10; NFloat topButtonY = UIScreen.MainScreen.Bounds.Top + 10; int buttonWidth = 0; int buttonHeight = 0; NFloat centerX = UIScreen.MainScreen.Bounds.GetMidX(); NFloat centerY = UIScreen.MainScreen.Bounds.GetMidY(); if (Device.Idiom == TargetIdiom.Tablet) { rectangleHeight = 325; rectangleWidth = 325; rectangleMargin = 162; buttonWidth = 70; buttonHeight = 70; } else if (Device.Idiom == TargetIdiom.Phone) { rectangleHeight = 225; rectangleWidth = 225; rectangleMargin = 112; buttonWidth = 60; buttonHeight = 60; } liveCameraStream = new UIView() { Frame = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Height) }; if (liveCameraStream != null) { // Rotate image to the correct display orientation liveCameraStream.Transform = CGAffineTransform.MakeRotation((float)Math.PI * 1.5F); liveCameraStream.Frame = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Height); } textViewDelayCountdown = new UITextView() { Frame = new CGRect(centerButtonX - 30, centerButtonY - 75, 110, 110) }; textViewDelayCountdown.TextColor = UIColor.FromRGB(96, 166, 67); textViewDelayCountdown.BackgroundColor = UIColor.FromRGBA(255, 255, 255, 0); textViewDelayCountdown.Font = UIFont.SystemFontOfSize(100f); UIImage uiImage = new UIImage("icon_focus_off.png"); autoFocusImage = new UIImageView(uiImage) { Frame = new CGRect(centerX - rectangleMargin, centerY - rectangleMargin, rectangleHeight, rectangleWidth) }; if (currentCameraPerson.ImageType != CameraPerson.IMAGE_TYPE.Profile) { UIImage orientImage = new UIImage("orientationimage.png"); orientationImage = new UIImageView(orientImage) { Frame = new CGRect(5, (centerY - 185), 200, 375) }; } cancelCameraButton = new UIButton() { Frame = new CGRect(topRightX - 100, centerButtonY + 120, buttonWidth, buttonHeight) }; cancelCameraButton.SetBackgroundImage(UIImage.FromFile("icon_camera_close.png"), UIControlState.Normal); toggleFlashButton = new UIButton() { Frame = new CGRect(topRightX - 100, centerButtonY + 45, buttonWidth, buttonHeight) }; toggleFlashButton.SetBackgroundImage(UIImage.FromFile("icon_camera_flash_off.png"), UIControlState.Normal); takePhotoButton = new UIButton() { Frame = new CGRect(topRightX - 100, centerButtonY - 30, buttonWidth, buttonHeight) }; takePhotoButton.SetBackgroundImage(UIImage.FromFile("icon_camera_take.png"), UIControlState.Normal); takeDelayedPhotoButton = new UIButton() { Frame = new CGRect(topRightX - 100, centerButtonY - 105, buttonWidth, buttonHeight) }; takeDelayedPhotoButton.SetBackgroundImage(UIImage.FromFile("icon_camera_take_timer.png"), UIControlState.Normal); toggleCameraButton = new UIButton() { Frame = new CGRect(topRightX - 100, centerButtonY - 180, buttonWidth, buttonHeight) }; toggleCameraButton.SetBackgroundImage(UIImage.FromFile("icon_camera_switch.png"), UIControlState.Normal); UIApplication.SharedApplication.StatusBarHidden = true; this.Add(liveCameraStream); this.Add(textViewDelayCountdown); this.Add(takePhotoButton); this.Add(takeDelayedPhotoButton); this.Add(toggleCameraButton); this.Add(toggleFlashButton); this.Add(cancelCameraButton); this.Add(autoFocusImage); if (currentCameraPerson.ImageType == CameraPerson.IMAGE_TYPE.Wound) { this.Add(orientationImage); } } catch (Exception ex) { Debug.WriteLine(ex); } } public void SetupEventHandlers() { takePhotoButton.TouchUpInside += (object? sender, EventArgs e) => { CapturePhoto(); }; toggleCameraButton.TouchUpInside += (object? sender, EventArgs e) => { ToggleFrontBackCamera(); }; cancelCameraButton.TouchUpInside += async (object? sender, EventArgs e) => { await CancelCamera(false); }; toggleFlashButton.TouchUpInside += (object? sender, EventArgs e) => { ToggleFlash(); }; takeDelayedPhotoButton.TouchUpInside += (object? sender, EventArgs e) => { DelayCaptureTimerStart(); }; } public async Task CancelCamera(bool isAccepted) { try { captureSession.StopRunning(); UIApplication.SharedApplication.StatusBarHidden = false; if (!isAccepted) { Device.BeginInvokeOnMainThread(async () => { //INavigation mainNav = Application.Current.MainPage.Navigation; //mainNav.RemovePage(mainNav.NavigationStack[mainNav.NavigationStack.Count - 2]); await Application.Current.MainPage.Navigation.PopAsync(); }); OrientationService.ForcePortrait(); // Give the phone a chance to rotate the screen before opening the view up. await Task.Delay(250); } } catch (Exception ex) { //throw ex; } } [Export("captureOutput:didFinishProcessingPhoto:error:")] public virtual async void DidFinishProcessingPhoto(AVCapturePhotoOutput captureOutput, AVCapturePhoto photo, NSError error) { if (error != null) { Console.WriteLine($"Error capturing photo: {error}", error); return; } NSData? jpegImage = photo.FileDataRepresentation; UIImage? photoX = new UIImage(jpegImage); byte[] myByteArray = new byte[jpegImage.Length]; System.Runtime.InteropServices.Marshal.Copy(jpegImage.Bytes, myByteArray, 0, Convert.ToInt32(jpegImage.Length)); string captureDate = CommonUtils.ConvertDateToLong(DateTime.Now).ToString(); if (currentCameraPerson.CameraType == CameraPerson.CAMERA_TYPE.Forward) { myByteArray = RotateImage(photoX, 180); } await CancelCamera(true); if (currentCameraPerson.ImageType == CameraPerson.IMAGE_TYPE.Wound) { await ShowConfirmWoundImage(myByteArray, captureDate); } else { await ShowProfileImage(myByteArray, captureDate); } } public async void CapturePhoto() { try { SetFlashOnStart(); // Camera facing Type AVCaptureDevicePosition devicePosition = captureDeviceInput.Device.Position; AVCaptureDevice device = GetCameraForOrientation(devicePosition); ConfigureCameraForDevice(device); await Task.Delay(500); AVCapturePhotoSettings settings = AVCapturePhotoSettings.Create(); AVCaptureConnection? videoConnection = capturePhotoOutput.ConnectionFromMediaType(new NSString(AVMediaTypes.Video.ToString())); capturePhotoOutput.CapturePhoto(settings, this); // Above line calls DidFinishProcessingPhoto Called } catch (Exception ex) { Console.Write(ex.Message); } } private async Task ShowConfirmWoundImage(byte[] myByteArray, string captureDate) { await Task.Run(() => { ConfirmWoundImageViewModel vm = new ConfirmWoundImageViewModel(myByteArray, currentCameraPerson.PatientId, currentCameraPerson.CameraType, currentCameraPerson.LocalId, captureDate); ConfirmWoundImagePage vw = new ConfirmWoundImagePage { BindingContext = vm }; Device.BeginInvokeOnMainThread(async () => { await Application.Current.MainPage.Navigation.PushAsync(vw); }); }); } private async Task ShowProfileImage(byte[] myByteArray, string captureDate) { await Task.Run(() => { ConfirmPatientImageViewModel vm = new ConfirmPatientImageViewModel(myByteArray, currentCameraPerson.PatientId); ConfirmPatientImagePage vw = new ConfirmPatientImagePage { BindingContext = vm }; Device.BeginInvokeOnMainThread(async () => { await Application.Current.MainPage.Navigation.PushAsync(vw); }); }); } public void ToggleFrontBackCamera() { AVCaptureDevicePosition devicePosition = captureDeviceInput.Device.Position; if (devicePosition == AVCaptureDevicePosition.Front) { currentCameraPerson.CameraType = CameraPerson.CAMERA_TYPE.Back; devicePosition = AVCaptureDevicePosition.Back; } else { currentCameraPerson.CameraType = CameraPerson.CAMERA_TYPE.Forward; devicePosition = AVCaptureDevicePosition.Front; } AVCaptureDevice device = GetCameraForOrientation(devicePosition); //ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); } public void SetFlashOnStart() { AVCaptureDevice device = captureDeviceInput.Device; NSError error = new NSError(); if (device.HasFlash) { if (flashOn) { device.LockForConfiguration(out error); device.FlashMode = AVCaptureFlashMode.On; device.UnlockForConfiguration(); } else { device.LockForConfiguration(out error); device.FlashMode = AVCaptureFlashMode.Off; device.UnlockForConfiguration(); } } } public void ToggleFlash() { try { AVCaptureDevice device = captureDeviceInput.Device; NSError error = new NSError(); if (device.HasFlash) { if (flashOn) { flashOn = false; toggleFlashButton.SetBackgroundImage(UIImage.FromFile("icon_camera_flash_off.png"), UIControlState.Normal); } else { flashOn = true; toggleFlashButton.SetBackgroundImage(UIImage.FromFile("icon_camera_flash_on.png"), UIControlState.Normal); } } else { flashOn = false; toggleFlashButton.SetBackgroundImage(UIImage.FromFile("icon_camera_flash_off.png"), UIControlState.Normal); } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("@Error: " + ex.Message); } } public byte[] RotateImage(UIImage image, float degree) { float Radians = degree * (float)Math.PI / 180; UIView view = new UIView(frame: new CGRect(0, 0, image.Size.Width, image.Size.Height)); CGAffineTransform t = CGAffineTransform.MakeRotation(Radians); view.Transform = t; CGSize size = view.Frame.Size; UIGraphics.BeginImageContext(size); CGContext context = UIGraphics.GetCurrentContext(); context.TranslateCTM(size.Width / 2, size.Height / 2); context.RotateCTM(Radians); context.ScaleCTM(1, -1); context.DrawImage(new CGRect(-image.Size.Width / 2, -image.Size.Height / 2, image.Size.Width, image.Size.Height), image.CGImage); UIImage imageCopy = UIGraphics.GetImageFromCurrentImageContext(); UIGraphics.EndImageContext(); byte[] rotatedBytes; using (NSData imageData = imageCopy.AsPNG()) { byte[] myByteArray = new Byte[imageData.Length]; System.Runtime.InteropServices.Marshal.Copy(imageData.Bytes, myByteArray, 0, Convert.ToInt32(imageData.Length)); rotatedBytes = myByteArray; } return rotatedBytes; } public void DelayCaptureTimerStart() { timeSpan = 5; NSTimer timer = NSTimer.CreateRepeatingTimer(1, t => { if (!OnTimerTick()) t.Invalidate(); }); NSRunLoop.Main.AddTimer(timer, NSRunLoopMode.Common); } bool OnTimerTick() { // Set the Text property of the Label. while (timeSpan > 0) { textViewDelayCountdown.Text = timeSpan.ToString(); timeSpan--; return true; } textViewDelayCountdown.Text = ""; CapturePhoto(); return false; } public void SetupLiveCameraStream() { try { captureSession = new AVCaptureSession(); CoreAnimation.CALayer viewLayer = liveCameraStream.Layer; AVCaptureVideoPreviewLayer videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = liveCameraStream.Bounds }; videoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); #pragma warning disable CS0618 // Type or member is obsolete AVCaptureDevice captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); #pragma warning restore CS0618 // Type or member is obsolete //ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); NSMutableDictionary dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); capturePhotoOutput = new AVCapturePhotoOutput(); captureSession.AddOutput(capturePhotoOutput); captureSession.AddInput(captureDeviceInput); captureSession.StartRunning(); // Camera facing Type AVCaptureDevicePosition devicePosition = captureDeviceInput.Device.Position; if (currentCameraPerson.CameraType == CameraPerson.CAMERA_TYPE.Back) { devicePosition = AVCaptureDevicePosition.Back; } else { devicePosition = AVCaptureDevicePosition.Front; } AVCaptureDevice device = GetCameraForOrientation(devicePosition); //ConfigureCameraForDevice(device); captureSession.BeginConfiguration(); captureSession.RemoveInput(captureDeviceInput); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); captureSession.AddInput(captureDeviceInput); captureSession.CommitConfiguration(); //End } catch (Exception ex) { Console.WriteLine(ex.Message); } } public AVCaptureDevice? GetCameraForOrientation(AVCaptureDevicePosition orientation) { if (currentCameraPerson.CameraType == CameraPerson.CAMERA_TYPE.Forward) { AVCaptureDeviceDiscoverySession discoverySession = AVCaptureDeviceDiscoverySession.Create(new AVCaptureDeviceType[] { AVCaptureDeviceType.BuiltInWideAngleCamera }, AVMediaTypes.Video, AVCaptureDevicePosition.Front); foreach (AVCaptureDevice device in discoverySession.Devices) { if (device.Position == orientation) { return device; } } } else { return AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); } return null; } public void ConfigureCameraForDevice(AVCaptureDevice device) { NFloat centerX = UIScreen.MainScreen.Bounds.GetMidX(); NFloat centerY = UIScreen.MainScreen.Bounds.GetMidY(); NSError error = new NSError(); if (device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { device.LockForConfiguration(out error); device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; device.UnlockForConfiguration(); UIImage uiImage = new UIImage("icon_focus_on"); autoFocusImage = new UIImageView(uiImage) { Frame = new CGRect(centerX - rectangleMargin, centerY - rectangleMargin, rectangleHeight, rectangleWidth) }; this.Add(autoFocusImage); } else if (device.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { device.LockForConfiguration(out error); device.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; device.UnlockForConfiguration(); UIImage uiImage = new UIImage("icon_focus_on"); autoFocusImage = new UIImageView(uiImage) { Frame = new CGRect(centerX - rectangleMargin, centerY - rectangleMargin, rectangleHeight, rectangleWidth) }; this.Add(autoFocusImage); } else if (device.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { device.LockForConfiguration(out error); device.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; device.UnlockForConfiguration(); UIImage uiImage = new UIImage("icon_focus_on"); autoFocusImage = new UIImageView(uiImage) { Frame = new CGRect(centerX - rectangleMargin, centerY - rectangleMargin, rectangleHeight, rectangleWidth) }; this.Add(autoFocusImage); } } public async void AuthorizeCameraUse() { AVAuthorizationStatus authorizationStatus = AVCaptureDevice.GetAuthorizationStatus(AVAuthorizationMediaType.Video); if (authorizationStatus != AVAuthorizationStatus.Authorized) { await AVCaptureDevice.RequestAccessForMediaTypeAsync(AVAuthorizationMediaType.Video); } } protected override void Dispose(bool disposing) { if (captureDeviceInput != null && captureSession != null) { captureSession.RemoveInput(captureDeviceInput); } if (captureDeviceInput != null) { captureDeviceInput.Dispose(); } if (captureSession != null) { captureSession.StopRunning(); captureSession.Dispose(); } if (capturePhotoOutput != null) { capturePhotoOutput.Dispose(); } base.Dispose(disposing); } } } 
Sign up to request clarification or add additional context in comments.

Comments

2

I ended up creating a custom handler for maui and building my own view using native android. Built the view using xamarin in c#. Then used an interface to call back to the handler that an image had been taken.

1 Comment

As it’s currently written, your answer is unclear. Please edit to add additional details that will help others understand how this addresses the question asked. You can find more information on how to write good answers in the help center.
1

You can try to use .NET Multi-platform App UI (.NET MAUI) IMediaPicker interface. This interfaces lets a user pick or take a photo or video on the device。

To get photo, use code like:

FileResult photo = await MediaPicker.Default.CapturePhotoAsync(); 

1 Comment

I think OP is looking for a CameraView, not a way to open the camera app.
1

Note, you'll also need a generic handler like this:

#if IOS || MACCATALYST using PlatformView = VideoDemos.Platforms.MaciOS.CameraPersonViewer; #elif ANDROID using PlatformView = VideoDemos.Platforms.Android.CameraPersonViewer; #elif (NETSTANDARD || !PLATFORM) || (NET6_0_OR_GREATER && !IOS && !ANDROID) using PlatformView = System.Object; #endif using VideoDemos.Controls; using Microsoft.Maui.Handlers; namespace VideoDemos.Handlers { public partial class CameraPersonHandler { public static IPropertyMapper<CameraPerson, CameraPersonHandler> PropertyMapper = new PropertyMapper<CameraPerson, CameraPersonHandler>(ViewHandler.ViewMapper) { }; public static CommandMapper<CameraPerson, CameraPersonHandler> CommandMapper = new(ViewCommandMapper) { }; public CameraPersonHandler() : base(PropertyMapper, CommandMapper) { } } } 

And you'll need platform specific handlers - ios:

 #if IOS || MACCATALYST using PlatformView = VideoDemos.Platforms.MaciOS.CameraPersonViewer; #elif ANDROID using PlatformView = VideoDemos.Platforms.Android.CameraPersonViewer; #elif (NETSTANDARD || !PLATFORM) || (NET6_0_OR_GREATER && !IOS && !ANDROID) using PlatformView = System.Object; #endif using VideoDemos.Controls; using Microsoft.Maui.Handlers; namespace VideoDemos.Handlers { public partial class CameraPersonHandler { public static IPropertyMapper<CameraPerson, CameraPersonHandler> PropertyMapper = new PropertyMapper<CameraPerson, CameraPersonHandler>(ViewHandler.ViewMapper) { }; public static CommandMapper<CameraPerson, CameraPersonHandler> CommandMapper = new(ViewCommandMapper) { }; public CameraPersonHandler() : base(PropertyMapper, CommandMapper) { } } } 

Here is the Android specific handler:

#nullable enable using Microsoft.Maui.Handlers; using VideoDemos.Controls; using VideoDemos.Platforms.Android; namespace VideoDemos.Handlers { public partial class CameraPersonHandler : ViewHandler<CameraPerson, CameraPersonViewer> { public CameraPersonHandler(IPropertyMapper mapper, CommandMapper? commandMapper = null) : base(mapper, commandMapper) { } protected override CameraPersonViewer CreatePlatformView() => new CameraPersonViewer(Context, VirtualView); protected override void ConnectHandler(CameraPersonViewer platformView) { base.ConnectHandler(platformView); // Perform any control setup here } protected override void DisconnectHandler(CameraPersonViewer platformView) { platformView.Dispose(); base.DisconnectHandler(platformView); } } } 

Note, you'll have to supply your own png files and xml files for your desired overlay.

Comments

0

I have this working in Android and iOS. I thought I should post here in case anyone might have a need for this. There are some Catch22s that I'll mention later.

In this first code block you'll see the code for the View. I've called it CameraPerson because there is a Person image overlay. This is the code that gets put in the RootFolder or a non-Platform folder. I put mine in a folder called MultiTargeting.

using System.ComponentModel; public class CameraPerson : View { public CameraPerson() { } public enum IMAGE_TYPE { Profile, Wound } public enum CAMERA_TYPE { Forward, Back } public static readonly BindableProperty ImageTypeProperty = BindableProperty.Create(nameof(ImageType), typeof(IMAGE_TYPE), typeof(CameraPerson), IMAGE_TYPE.Wound); public static readonly BindableProperty PatientIdProperty = BindableProperty.Create(nameof(PatientId), typeof(int), typeof(CameraPerson), 0); public static readonly BindableProperty CameraTypeProperty = BindableProperty.Create(nameof(CameraType), typeof(CAMERA_TYPE), typeof(CameraPerson), CAMERA_TYPE.Back); public static readonly BindableProperty LocalIdProperty = BindableProperty.Create(nameof(LocalId), typeof(long), typeof(CameraPerson), null); public IMAGE_TYPE ImageType { get { return (IMAGE_TYPE)GetValue(ImageTypeProperty); } set { SetValue(ImageTypeProperty, value); } } public int PatientId { get { return (int)GetValue(PatientIdProperty); } set { SetValue(PatientIdProperty, value); } } public CAMERA_TYPE CameraType { get { return (CAMERA_TYPE)GetValue(CameraTypeProperty); } set { SetValue(CameraTypeProperty, value); } } public long LocalId { get { return (long)GetValue(LocalIdProperty); } set { SetValue(LocalIdProperty, value); } } } 

In the next code block you'll see for the Android implementation. I placed this code in a folder called NativeViews. It must go under your Platforms/Android/ folder somewhere.

using Android.App; using Android.Content; using Android.Content.PM; using Android.Graphics; using Android.Hardware; using Android.Nfc; using Android.Views; using Android.Widget; using AndroidX.CoordinatorLayout.Widget; using Microsoft.Maui.Controls.Compatibility; using Microsoft.Maui.Controls.Platform; using VideoDemos.Controls; using Application = Microsoft.Maui.Controls.Application; using Resource = WoundMatrixSmall.Resource; namespace VideoDemos.Platforms.Android { public class CameraPersonViewer : CoordinatorLayout, TextureView.ISurfaceTextureListener, global::Android.Hardware.Camera.IAutoFocusCallback { private global::Android.Hardware.Camera camera; private global::Android.Widget.Button buttonTakePhoto; private global::Android.Widget.Button buttonCameraClose; private global::Android.Widget.Button buttonCameraFlash; private global::Android.Widget.Button buttonSwitchCam; private global::Android.Widget.Button buttonCameraTakePhotoTimer; private global::Android.Widget.ImageView imageViewFrameWoundImage; private global::Android.Widget.FrameLayout frmCamera; global::Android.Widget.TextView textViewDelayCountdown; private global::Android.Views.View view; private Activity activity; private WindowManagerFlags _originalFlags; private CameraFacing cameraType; private TextureView textureView; private SurfaceTexture surfaceTexture; int timeSpan = 0; private bool flashOn; private bool cameraPreviewStatus = false; private CameraPerson CurrentCameraPerson = null; public CameraPersonViewer(Context context, CameraPerson cameraPerson) : base(context) { CurrentCameraPerson = cameraPerson; try { SetupUserInterface(); SetupEventHandlers(); AddView(view); } catch (Java.Lang.Exception ex) { System.Diagnostics.Debug.WriteLine(@"ERROR: ", ex.Message); } } private void SetupUserInterface() { activity = Microsoft.Maui.ApplicationModel.Platform.CurrentActivity; WindowManagerLayoutParams attrs = activity.Window.Attributes; _originalFlags = attrs.Flags; attrs.Flags |= WindowManagerFlags.Fullscreen; activity.Window.Attributes = attrs; view = activity.LayoutInflater.Inflate(Resource.Layout.woundcameracontrol, this, false); if (CurrentCameraPerson.CameraType == CameraPerson.CAMERA_TYPE.Forward) cameraType = CameraFacing.Front; else cameraType = CameraFacing.Back; textureView = view.FindViewById<TextureView>(Resource.Id.textureView); textureView.SurfaceTextureListener = this; } private void SetupEventHandlers() { buttonTakePhoto = view.FindViewById<global::Android.Widget.Button>(Resource.Id.buttonCameraTakePhoto); buttonTakePhoto.Click += CaptureCameraClick; buttonCameraClose = view.FindViewById<global::Android.Widget.Button>(Resource.Id.buttonCameraClose); buttonCameraClose.Click += CancelCameraClick; buttonCameraFlash = view.FindViewById<global::Android.Widget.Button>(Resource.Id.buttonCameraFlash); buttonCameraFlash.Click += FlashCameraClick; buttonSwitchCam = view.FindViewById<global::Android.Widget.Button>(Resource.Id.buttonSwitchCam); buttonSwitchCam.Click += RotateCameraClick; buttonCameraTakePhotoTimer = view.FindViewById<global::Android.Widget.Button>(Resource.Id.buttonCameraTakePhotoTimer); buttonCameraTakePhotoTimer.Click += StartTimer; frmCamera = view.FindViewById<global::Android.Widget.FrameLayout>(Resource.Id.frmCamera); textViewDelayCountdown = view.FindViewById<global::Android.Widget.TextView>(Resource.Id.textViewDelayCountdown); } protected override void OnLayout(bool changed, int l, int t, int r, int b) { base.OnLayout(changed, l, t, r, b); int msw = MeasureSpec.MakeMeasureSpec(r - l, MeasureSpecMode.Exactly); int msh = MeasureSpec.MakeMeasureSpec(b - t, MeasureSpecMode.Exactly); view.Measure(msw, msh); view.Layout(0, 0, r - l, b - t); } public void OnSurfaceTextureUpdated(SurfaceTexture surface) { if (cameraPreviewStatus) SetFocus(); } public void OnSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { try { camera = global::Android.Hardware.Camera.Open((int)cameraType); textureView.LayoutParameters = new FrameLayout.LayoutParams((width), (height)); surfaceTexture = surface; camera.SetPreviewTexture(surface); PrepareAndStartCamera(); } catch (System.Exception) { } } public bool OnSurfaceTextureDestroyed(SurfaceTexture surface) { try { if (camera != null) { camera.StopPreview(); camera.Release(); camera = null; } } catch (System.Exception ex) { Console.WriteLine("Exception occur OnSurfaceTextureDestroyed :", ex.Message.ToString()); } return true; } public void OnSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { PrepareAndStartCamera(); } private void PrepareAndStartCamera() { camera.StopPreview(); Display display = activity.WindowManager.DefaultDisplay; if (display.Rotation == SurfaceOrientation.Rotation0) { camera.SetDisplayOrientation(90); } if (display.Rotation == SurfaceOrientation.Rotation270) { camera.SetDisplayOrientation(180); } cameraPreviewStatus = true; SetFocus(); camera.StartPreview(); } private void StartTimer(object sender, EventArgs e) { try { timeSpan = 5; Device.StartTimer(TimeSpan.FromSeconds(1), OnTimerTick); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } } private bool OnTimerTick() { try { while (timeSpan > 0) { textViewDelayCountdown.Text = timeSpan.ToString(); timeSpan--; return true; } textViewDelayCountdown.Visibility = ViewStates.Gone; imageViewFrameWoundImage = view.FindViewById<global::Android.Widget.ImageView>(Resource.Id.imageViewFrameWoundImage); imageViewFrameWoundImage.SetImageResource(Resource.Drawable.icon_focus_on); Torch(flashOn); System.Threading.Thread.Sleep(500); camera.AutoFocus(this); return false; } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); return false; } } public void SetFocus() { global::Android.Hardware.Camera.Parameters p = camera.GetParameters(); IList<string> supportedFocusMode = p.SupportedFocusModes; if (supportedFocusMode != null) { try { p.FocusMode = global::Android.Hardware.Camera.Parameters.FocusModeAuto; camera.SetParameters(p); camera.StartPreview(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); ; } } } private async void CapturedImage() { try { byte[] mainBytes; cameraPreviewStatus = false; camera.StopPreview(); Bitmap image = textureView.Bitmap; byte[] bytes; using (MemoryStream stream = new MemoryStream()) { image.Compress(Bitmap.CompressFormat.Jpeg, 100, stream); bytes = stream.ToArray(); } mainBytes = bytes; await Task.Run(() => { mainBytes = RotateImages(bytes); }); WindowManagerLayoutParams attrs = activity.Window.Attributes; attrs.Flags = _originalFlags; activity.Window.Attributes = attrs; camera.Release(); camera = null; string captureDate = CommonUtils.ConvertDateToLong(DateTime.Now).ToString(); await Application.Current.MainPage.Navigation.PopAsync(); if (CurrentCameraPerson.ImageType == CameraPerson.IMAGE_TYPE.Wound) { await ShowConfirmWoundImage(mainBytes, captureDate); } else { await ShowProfileImage(mainBytes); } } catch (Java.Lang.Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } } private async Task ShowProfileImage(byte[] myByteArray) { await Task.Run(() => { ConfirmPatientImageViewModel vm = new ConfirmPatientImageViewModel(myByteArray, CurrentCameraPerson.PatientId); ConfirmPatientImagePage vw = new ConfirmPatientImagePage { BindingContext = vm }; Device.BeginInvokeOnMainThread(async () => { await Application.Current.MainPage.Navigation.PushAsync(vw); }); }); } private async Task ShowConfirmWoundImage(byte[] myByteArray, string captureDate) { await Task.Run(() => { ConfirmWoundImageViewModel vm = new ConfirmWoundImageViewModel(myByteArray, CurrentCameraPerson.PatientId, CameraPerson.CAMERA_TYPE.Back, CurrentCameraPerson.LocalId, captureDate); ConfirmWoundImagePage vw = new ConfirmWoundImagePage { BindingContext = vm }; Device.BeginInvokeOnMainThread(async () => { await Application.Current.MainPage.Navigation.PushAsync(vw); }); }); } private async void CancelCameraClick(object sender, EventArgs e) { try { OrientationService.ForcePortrait(); // Give the phone a chance to rotate the screen before opening the view up. await Task.Delay(250); WindowManagerLayoutParams attrs = activity.Window.Attributes; attrs.Flags = _originalFlags; activity.Window.Attributes = attrs; await Application.Current.MainPage.Navigation.PopModalAsync(); } catch (Java.Lang.Exception ex) { System.Diagnostics.Debug.WriteLine(@"ERROR: ", ex.Message); } } private async void CaptureCameraClick(object sender, EventArgs e) { try { imageViewFrameWoundImage = view.FindViewById<global::Android.Widget.ImageView>(Resource.Id.imageViewFrameWoundImage); imageViewFrameWoundImage.SetImageResource(Resource.Drawable.icon_focus_on); Torch(flashOn); await Task.Delay(500); camera.AutoFocus(this); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } } private void FlashCameraClick(object sender, EventArgs e) { global::Android.Hardware.Camera.Parameters flashParameter = camera.GetParameters(); IList<string> supportedFlash = flashParameter.SupportedFlashModes; if (supportedFlash != null) { flashOn = !flashOn; SetFlashImage(flashOn); } else { flashOn = false; buttonCameraFlash.SetBackgroundResource(Resource.Drawable.icon_camera_flash_off); } } public void SetFlashImage(bool flashStatus) { try { if (flashStatus) { buttonCameraFlash.SetBackgroundResource(Resource.Drawable.icon_camera_flash_on); } else { buttonCameraFlash.SetBackgroundResource(Resource.Drawable.icon_camera_flash_off); } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(@"ERROR: ", ex.Message); } } public void Torch(bool on) { if (!this.Context.PackageManager.HasSystemFeature(PackageManager.FeatureCameraFlash)) { global::Android.Util.Log.Info("WoundMatrix", "Flash not supported on this device"); return; } if (camera == null) { global::Android.Util.Log.Info("WoundMatrix", "NULL Camera"); return; } global::Android.Hardware.Camera.Parameters cameraParameter = camera.GetParameters(); IList<string> supportedFlashModes = cameraParameter.SupportedFlashModes; if (supportedFlashModes == null) supportedFlashModes = new List<string>(); string flashMode = string.Empty; if (on) { if (supportedFlashModes.Contains(global::Android.Hardware.Camera.Parameters.FlashModeTorch)) { flashMode = global::Android.Hardware.Camera.Parameters.FlashModeTorch; } else if (supportedFlashModes.Contains(global::Android.Hardware.Camera.Parameters.FlashModeOn)) { flashMode = global::Android.Hardware.Camera.Parameters.FlashModeOn; } } else { if (supportedFlashModes.Contains(global::Android.Hardware.Camera.Parameters.FlashModeOff)) { flashMode = global::Android.Hardware.Camera.Parameters.FlashModeOff; } } if (!string.IsNullOrEmpty(flashMode)) { cameraParameter.FlashMode = flashMode; camera.SetParameters(cameraParameter); } } private void RotateCameraClick(object sender, EventArgs e) { if (cameraType == CameraFacing.Front) { cameraType = CameraFacing.Back; camera.Release(); camera = global::Android.Hardware.Camera.Open((int)cameraType); camera.SetPreviewTexture(surfaceTexture); PrepareAndStartCamera(); } else { cameraType = CameraFacing.Front; camera.Release(); camera = global::Android.Hardware.Camera.Open((int)cameraType); camera.SetPreviewTexture(surfaceTexture); PrepareAndStartCamera(); } } public byte[] RotateImages(byte[] image) { Bitmap bmp = BitmapFactory.DecodeByteArray(image, 0, image.Length); try { if (bmp.Height > bmp.Width) { Matrix matrix = new Matrix(); matrix.PostRotate(-90); bmp = Bitmap.CreateBitmap(bmp, 0, 0, bmp.Width, bmp.Height, matrix, true); MemoryStream ms = new MemoryStream(); bmp.Compress(Bitmap.CompressFormat.Png, 100, ms); return ms.ToArray(); } else { return image; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex); return image; } } public void OnAutoFocus(bool success, global::Android.Hardware.Camera camera) { try { camera.CancelAutoFocus(); global::Android.Hardware.Camera.Parameters parameters = camera.GetParameters(); if (parameters.SupportedFocusModes != null) { if (parameters.FocusMode != global::Android.Hardware.Camera.Parameters.FocusModeAuto) { parameters.FocusMode = global::Android.Hardware.Camera.Parameters.FocusModeAuto; try { camera.SetParameters(parameters); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("@Error : " + ex.Message); } camera.StartPreview(); cameraPreviewStatus = true; } } CapturedImage(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("@Error : " + ex.Message); } } } } 

4 Comments

What are the catch 22's you mention?
How about the iOS implementation, and what are the catch that you mean?
how would I be able to record a video?
I have the iOS implementation and I'll post it when I have some time. The catch22 I was talking about is that you have to name the xml files with the Android implementation in lower case without any underscores to get it to work. I wanted to post everything but I was unable to because I reached the character limit for a post. I'll see if if I can post the iOS implementation in a different reply later today if I have time.