gpt4 book ai didi

ios - 如何从 flutter -> swift && swift -> flutter 传递数据?

转载 作者:行者123 更新时间:2023-12-05 05:31:42 25 4
gpt4 key购买 nike

我正在研究 flutter 和 swift 以定制我的相机。

我的 Flutter 版本 flutter doctor -v

我的 XCode 版本 XCode version

我的相机如下 custom camera screen

我想在按下按钮时拍照。我怎样才能完成这个?

这是我的代码

.dart 文件

class IOSCompositionWidget extends StatefulWidget {
const IOSCompositionWidget({super.key});

@override
State<IOSCompositionWidget> createState() => _IOSCompositionWidgetState();
}

class _IOSCompositionWidgetState extends State<IOSCompositionWidget> {
static const platformChannel =
MethodChannel('com.vrin.methodchannel/cameraButton');

dynamic nativePhoto;
final String text = '';

@override
Widget build(BuildContext context) {
Map<String, dynamic> creationParams = <String, dynamic>{};
creationParams["text"] = text;

return Scaffold(
backgroundColor: Colors.black,
body: Column(
mainAxisAlignment: MainAxisAlignment.start,
children: [
SizedBox(
height: MediaQuery.of(context).size.height * 0.068,
),
Padding(
padding: EdgeInsets.only(
left: MediaQuery.of(context).size.width * 0.062,
right: MediaQuery.of(context).size.width * 0.038,
),
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
SvgPicture.asset(
'images/help.svg',
height: 24,
width: 24,
),
SizedBox(
width: MediaQuery.of(context).size.width * 0.026,
),
SvgPicture.asset(
'images/timer.svg',
height: 24,
width: 24,
),
],
),
Container(
height: MediaQuery.of(context).size.height * 0.028,
width: MediaQuery.of(context).size.width * 0.262,
color: Colors.white,
),
SvgPicture.asset(
'images/close.svg',
height: 24,
width: 24,
),
],
),
),
SizedBox(
height: MediaQuery.of(context).size.height * 0.017,
),
SizedBox(
height: MediaQuery.of(context).size.height * 0.646,
width: MediaQuery.of(context).size.width,
child: UiKitView(
viewType: 'NativeView',
creationParams: creationParams,
creationParamsCodec: const StandardMessageCodec(),
),
),
SizedBox(
height: MediaQuery.of(context).size.height * 0.063,
),
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
GestureDetector(
onTap: () async {
print("pressed");
//TODO
},
child: Container(
height: 60,
width: 60,
decoration: BoxDecoration(
color: Colors.white,
borderRadius: BorderRadius.circular(50),
),
),
),
],
),
],
),
);
}
}

我的 Swift 代码

AppDelegate.swift

@UIApplicationMain
@objc class AppDelegate: FlutterAppDelegate {

override func application(
_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
) -> Bool {
if #available(iOS 12.0, *) {
UNUserNotificationCenter.current().delegate = self as? UNUserNotificationCenterDelegate
}

let nativeViewFactory = NativeViewFactory()
registrar(forPlugin: "Runner")!.register(nativeViewFactory, withId: "NativeView")

UIApplication.shared.setMinimumBackgroundFetchInterval(TimeInterval(60*15))
FirebaseApp.configure()
GeneratedPluginRegistrant.register(with: self)

return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
}

NativeViewFactory.swift

import Foundation
import Flutter
import UIKit
import AVFoundation


class NativeViewFactory: NSObject, FlutterPlatformViewFactory{

    private var nativeView: NativeView?
    private var messenger: FlutterBinaryMessenger?

func createArgsCodec() -> FlutterMessageCodec & NSObjectProtocol {
return FlutterStandardMessageCodec.sharedInstance()
}



    override init(){
super.init()
}

    init(messenger: FlutterBinaryMessenger) {
        self.messenger = messenger
        super.init()
    }


    func create(withFrame frame: CGRect, viewIdentifier viewId: Int64, arguments args: Any?) -> FlutterPlatformView {

        self.nativeView = NativeView(
            frame: frame,
            viewIdentifier: viewId,
            arguments: args,
            binaryMessenger: messenger)
        return nativeView ?? NativeView(
            frame: frame,
            viewIdentifier: viewId,
            arguments: args,
            binaryMessenger: messenger)
    }
}



class NativeView: NSObject, FlutterPlatformView{

    var likeAction: (() -> Void)?
    private var returnView: UIView?
    var previewView : UIView!
    var boxView:UIView!
    let myButton: UIButton = UIButton()
    //Camera Capture requiered properties

var videoDataOutput: AVCaptureVideoDataOutput!
var videoDataOutputQueue: DispatchQueue!
var previewLayer:AVCaptureVideoPreviewLayer!
var captureDevice : AVCaptureDevice!
let session = AVCaptureSession()
var photoOutput: AVCapturePhotoOutput?
var imageData: Data?
var depthData: AVDepthData?
var depthDataMap: CVPixelBuffer?




    override init() {
        returnView = UIView()
        super.init()
    }





    init(
        frame: CGRect,
        viewIdentifier viewId: Int64,
        arguments args: Any?,
        binaryMessenger messenger: FlutterBinaryMessenger?
    ) {
        returnView = UIView()
        super.init()
        // iOS views can be created here
        createNativeView(view: returnView!, args: args)
//        ViewController()
    }



    func view() -> UIView {
        return returnView!
    }



    func receiveGyeomViewMethod(){
        print("receiveGyeomViewMethod")
    }



    @objc func onClickMyButton(sender: UIButton){
        print("button pressed")
    }




    func createNativeView(view _view: UIView, args: Any?){
        _view.backgroundColor = UIColor.black


        previewView = UIView(frame: CGRect(x: 0, y: -150, width: UIScreen.main.bounds.size.width, height: UIScreen.main.bounds.size.height))
        previewView.contentMode = UIView.ContentMode.scaleAspectFit
        view().addSubview(previewView)

        self.setupAVCapture()

    }

}



extension NativeView:  AVCaptureVideoDataOutputSampleBufferDelegate{

     func setupAVCapture(){
         session.sessionPreset = AVCaptureSession.Preset.photo
        let device = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices
                print(device)
                captureDevice = device.first
         do {
             let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice!)
             session.addInput(captureDeviceInput)
             photoOutput = AVCapturePhotoOutput()

             //let pixelFormatType = "kCVPixelFormatType_DisparityFloat32"

 //            photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.jpeg]

 //                )], completionHandler: nil)
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.hevc])], completionHandler: nil)



             //captureSession.addOutput(photoOutput!)
             if session.canAddOutput(photoOutput!) {
                 session.addOutput(photoOutput!)
                 photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported
             }
         } catch {
             print(error)
         }
        beginSession()
    }


    func beginSession(){
        var deviceInput: AVCaptureDeviceInput!

        do {
            deviceInput = try AVCaptureDeviceInput(device: captureDevice)
            guard deviceInput != nil else {
                print("error: cant get deviceInput")
                return
            }

            if self.session.canAddInput(deviceInput){

                self.session.addInput(deviceInput)

            }

            videoDataOutput = AVCaptureVideoDataOutput()
            videoDataOutput.alwaysDiscardsLateVideoFrames=true
            videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
            videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)


            if session.canAddOutput(self.videoDataOutput){
                session.addOutput(self.videoDataOutput)
            }
            videoDataOutput.connection(with: .video)?.isEnabled = true
            previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
            previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect

            let rootLayer :CALayer = self.previewView.layer
            rootLayer.masksToBounds=true
            previewLayer.frame = rootLayer.bounds
            rootLayer.addSublayer(self.previewLayer)
            session.startRunning()
        } catch let error as NSError {
            deviceInput = nil
            print("error: \(error.localizedDescription)")
        }
    }

    // clean up AVCapture

    func stopCamera(){
        session.stopRunning()
    }
}

我正在制作我的自定义相机,我需要在点击 flutter 按钮时拍照。

最佳答案

我想你可以创建一个 Flutter Plugin对于此功能。

要从 Flutter 调用 Swift,您可以使用 MethodChannel .

要将结果从 Swift 发送到 Flutter,您可以 EventChanel

关于ios - 如何从 flutter -> swift && swift -> flutter 传递数据?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/74311451/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com