gpt4 book ai didi

Swift 多点视频应用程序不会读取数据

转载 作者:行者123 更新时间:2023-11-28 08:35:54 28 4
gpt4 key购买 nike

我正在尝试创建一个应用程序,该应用程序将从 iphone 获取视频并通过 Apple 的多点接口(interface)将其发送到 ipad。

//
// ViewController.swift
// Multipeer Video
//
//


import UIKit
import MultipeerConnectivity
import CoreMotion
import SceneKit
import AVFoundation
import AVKit
import MediaPlayer

class ViewController: UIViewController, MCNearbyServiceAdvertiserDelegate, MCNearbyServiceBrowserDelegate, MCSessionDelegate, NSStreamDelegate, AVCaptureVideoDataOutputSampleBufferDelegate
{
//Video
let transferredSession = AVCaptureSession()
let captureSession = AVCaptureSession()
var transferredLayer : AVCaptureVideoPreviewLayer?
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
var videoDeviceOutput: AVCaptureVideoDataOutput!
var sessionQueue: dispatch_queue_t!
var data = NSData()
var movieplayer = MPMoviePlayerController()

//MultiPeer
let label = UILabel()
var displayLink: CADisplayLink?
let serviceType = "motion-control"
let peerID = MCPeerID(displayName: UIDevice.currentDevice().name)
var serviceAdvertiser : MCNearbyServiceAdvertiser!
var serviceBrowser : MCNearbyServiceBrowser!
lazy var session : MCSession =
{
let session = MCSession(peer: self.peerID, securityIdentity: nil, encryptionPreference: MCEncryptionPreference.Required)
session.delegate = self
return session
}()


override func viewDidLoad()
{
super.viewDidLoad()

//Video
captureSession.sessionPreset = AVCaptureSessionPresetHigh

let devices = AVCaptureDevice.devices()

// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
print("Capture device found")

}
}
}
}

label.textAlignment = NSTextAlignment.Center
view.addSubview(label)
if UIDevice.currentDevice().userInterfaceIdiom == UIUserInterfaceIdiom.Pad
{
label.text = "iPad"
view.backgroundColor = UIColor.blackColor()
label.textColor = UIColor.whiteColor()
initialiseAdvertising()
//need to setup recieving video
}
else
{
label.text = "iPhone"
initialiseBrowsing()
beginVideoSession()
}

}

func beginVideoSession() {

configureDevice()
do {
//try captureSession.addInput(input: captureDevice)
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
updateDeviceSettings(0.0, isoValue: 0.0)
} catch {
//error message etc.
print("Capture device not initialisable")
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.view.layer.addSublayer(previewLayer!)
previewLayer?.frame = self.view.layer.frame
self.view.layer.insertSublayer(previewLayer!, atIndex: 0)
captureSession.startRunning()
}

func configureDevice() {
if let device = captureDevice {
do {
try device.lockForConfiguration()
device.focusMode = .Locked
device.unlockForConfiguration()
} catch {
//error message etc.
print("Capture device not configurable")
}
}

}

// set ISO

func updateDeviceSettings(focusValue : Float, isoValue : Float) {
if let device = captureDevice {
do {
try device.lockForConfiguration()
device.setFocusModeLockedWithLensPosition(focusValue, completionHandler: { (time) -> Void in
})

let minISO = device.activeFormat.minISO
let maxISO = device.activeFormat.maxISO
let clampedISO = isoValue * (maxISO - minISO) + minISO

device.setExposureModeCustomWithDuration(AVCaptureExposureDurationCurrent, ISO: clampedISO, completionHandler: { (time) -> Void in
//
})

device.unlockForConfiguration()
} catch {
print("Can't update device settings")
}

}
}


// MARK: MCNearbyServiceBrowserDelegate (iPhone is browser)

var streamTargetPeer: MCPeerID?
var outputStream: NSOutputStream?

func initialiseBrowsing()
{
serviceBrowser = MCNearbyServiceBrowser(peer: peerID, serviceType: serviceType)
serviceBrowser.delegate = self
serviceBrowser.startBrowsingForPeers()
}

func browser(browser: MCNearbyServiceBrowser, foundPeer peerID: MCPeerID, withDiscoveryInfo info: [String : String]?)
{
print( "Found Peer! \(peerID)")
streamTargetPeer = peerID
browser.invitePeer(peerID, toSession: session, withContext: nil, timeout: 120)

displayLink = CADisplayLink(target: self, selector: #selector(ViewController.step))
displayLink?.addToRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
print("displayLink")
}

//Initializes SampleBufferDelegate and videoDeviceOutput
func addVideoOutput() {
videoDeviceOutput = AVCaptureVideoDataOutput()
videoDeviceOutput.alwaysDiscardsLateVideoFrames = true
self.sessionQueue = dispatch_queue_create("Camera Session", DISPATCH_QUEUE_SERIAL)
videoDeviceOutput.setSampleBufferDelegate(self, queue: sessionQueue)
if captureSession.canAddOutput(videoDeviceOutput) {
captureSession.addOutput(videoDeviceOutput)
}
}

//Grabbing frames from camera
func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
print("frame received")
let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, 0)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
let src_buff = CVPixelBufferGetBaseAddress(imageBuffer)
data = NSData(bytes: src_buff, length: bytesPerRow * height)
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);

}

func startStream()
{
guard let streamTargetPeer = streamTargetPeer where outputStream == nil else
{
return
}
do
{
print("stream started")
outputStream = try session.startStreamWithName("MotionControlStream", toPeer: streamTargetPeer)
outputStream?.scheduleInRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
outputStream?.open()
}
catch
{
print("unable to start stream!! \(error)")
}
}

func step()
{
startStream()
print("step")
guard let outputStream = outputStream else
{
print("no stream")
return
}
if outputStream.hasSpaceAvailable
{
print("writing to output")
outputStream.write(UnsafePointer<UInt8>(data.bytes), maxLength: data.length)
}
else
{
print("no space availale")
}
}

func browser(browser: MCNearbyServiceBrowser, lostPeer peerID: MCPeerID)
{
label.text = "Lost Peer!"
}

// MARK: MCNearbyServiceAdvertiserDelegate (iPad is advertiser)

func initialiseAdvertising()
{
serviceAdvertiser = MCNearbyServiceAdvertiser(peer: peerID, discoveryInfo: nil, serviceType: serviceType)
serviceAdvertiser.delegate = self
serviceAdvertiser.startAdvertisingPeer()
}

// MARK: MCSessionDelegate

func session(session: MCSession, peer peerID: MCPeerID, didChangeState state: MCSessionState)
{
let stateName:String
switch state
{
case MCSessionState.Connected:
stateName = "connected"
case MCSessionState.Connecting:
stateName = "connecting"
case MCSessionState.NotConnected:
stateName = "not connected"
}

let deviceName:String
switch UIDevice.currentDevice().userInterfaceIdiom
{
case UIUserInterfaceIdiom.Pad:
deviceName = "iPad"
case UIUserInterfaceIdiom.Phone:
deviceName = "iPhone"
default:
deviceName = "Unspecified"
}

dispatch_async(dispatch_get_main_queue())
{
self.label.text = "\(deviceName) didChangeState: \(stateName)"
}
}


func session(_: MCSession, didReceiveStream stream: NSInputStream, withName streamName: String, fromPeer peerID: MCPeerID)
{
print("did recieve")
stream.scheduleInRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
stream.delegate = self
stream.open()
}

func stream(stream: NSStream, handleEvent eventCode: NSStreamEvent)
{
print(eventCode)
if eventCode == NSStreamEvent.EndEncountered{
print("end")
}
print("stream started")
if let inputStream = stream as? NSInputStream //where eventCode == NSStreamEvent.HasBytesAvailable
{
print("Does this byte?")
/* let bufferSize = 1024
var buffer = [UInt8](count: bufferSize, repeatedValue: 0)
let bytesRead = inputStream.read(&buffer, maxLength: bufferSize)
print(bytesRead)
if bytesRead >= 0 {
let output = NSString(bytes: &buffer, length: bytesRead, encoding: NSUTF8StringEncoding)
print(output)
}
*/

var bytes = [UInt8](count:12, repeatedValue: 0)
inputStream.read(&bytes, maxLength: data.length)


let dataIn: NSData = "Stream".dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: true)!
let stream: NSInputStream = NSInputStream(data: dataIn)
var buffer = [UInt8](count: 8, repeatedValue: 0)
stream.open()
if stream.hasBytesAvailable {
print("stream has bytes!")
let result: Int = stream.read(&buffer, maxLength: buffer.count)
print("result: \(result)")

let dataString: NSString = NSString(data: dataIn, encoding: NSUTF8StringEncoding)!
print(dataString)
let movieURL = NSURL.init(string: dataString as String)
print(movieURL)

} else {
print("stream has no bytes")
}
dispatch_async(dispatch_get_main_queue())
{

}


//*/
}
if let outstream = stream as? NSOutputStream {
print("This is output")
print(outstream)
}
}

func session(session: MCSession, didFinishReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, atURL localURL: NSURL, withError error: NSError?)
{
}

func session(session: MCSession, didStartReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, withProgress progress: NSProgress)
{
}

// MARK: Layout

func session(session: MCSession, didReceiveData data: NSData, fromPeer peerID: MCPeerID)
{
}

override func viewDidLayoutSubviews()
{
if UIDevice.currentDevice().userInterfaceIdiom == UIUserInterfaceIdiom.Pad
{
label.frame = CGRect(x: 0, y: topLayoutGuide.length, width: view.frame.width, height: label.intrinsicContentSize().height)
}
else
{
label.frame = view.bounds
}
}

func advertiser(advertiser: MCNearbyServiceAdvertiser, didReceiveInvitationFromPeer peerID: MCPeerID, withContext context: NSData?, invitationHandler: (Bool, MCSession) -> Void)
{
invitationHandler(true, self.session)
}
}

似乎我可以用 iphone 正确发送数据,但我一直无法弄清楚我在 iPad 上做错了什么。我猜这与运行循环有关,但在对其进行故障排除时,我注意到 iPad 收到了一个结束事件代码。

感谢您的帮助!

最佳答案

(显然)涉及两个主要任务:将像素缓冲区的基地址存储到 NSData 对象中,然后在另一端恢复基地址。使用 Apple 的 MultiPeerConnectivity 框架实现此目的的最佳方式不是通过流,而是通过数据对象。

以下是从 iPhone 的 CVPixelBuffer 发送原始缓冲区数据的方法:

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        NSError *err;
        [((ViewController *)self.parentViewController).session sendData:[self dataFromImageBuffer:imageBuffer withBytesPerRow:CVPixelBufferGetBytesPerRow(imageBuffer) withHeight:CVPixelBufferGetHeight(imageBuffer)] toPeers:((ViewController *)self.parentViewController).session.connectedPeers withMode:MCSessionSendDataReliable error:&err];
     }
}
 
- (NSData *)dataFromImageBuffer:(CVImageBufferRef)imageBuffer withBytesPerRow:(size_t)bytesPerRow withHeight:(NSInteger)height
{
    NSMutableData *data = [NSMutableData new];
    if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
    {
        uint8_t *rawBuffer = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
        [data appendBytes:rawBuffer length:1228808];
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    }
    return data;
}

以下是您在 iPad 上的显示方式:

- (void)session:(nonnull MCSession *)session didReceiveData:(nonnull NSData *)data fromPeer:(nonnull MCPeerID *)peerID {
    dispatch_async(dispatch_get_main_queue(), ^{
        NSMutableData *mdata = [NSMutableData new];
        UInt8 *rawBuffer = (uint8_t *)[data bytes];
        [mdata appendBytes:rawBuffer length:1228808];
        uint8_t *buffer = (uint8_t *)[mdata bytes];
        NSLog(@"sizeof(buffer) %lu", sizeof(buffer));
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef newContext = CGBitmapContextCreate(buffer, 640, 480, 8, 2560, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
        CGImageRef newImage = CGBitmapContextCreateImage(newContext);
        
        CGContextRelease(newContext);
        CGColorSpaceRelease(colorSpace);
        
        UIImage *image = [[UIImage alloc] initWithCGImage:newImage scale:1 orientation:UIImageOrientationUp];
        CGImageRelease(newImage);
        
        if (image) {
            NSLog(@"image size %f x %f", [image size].width, [image size].height);
            dispatch_async(dispatch_get_main_queue(), ^{
                [((ViewerViewController *)self.childViewControllers.lastObject).view.layer setContents:(__bridge id)image.CGImage];
            });
        }
    });
}

请注意,这是原始的、未压缩的和其他未改变的图像数据;因此,它是巨大的,并且需要很长时间才能从一个设备转移到另一个设备以使产品可行。

这是一种在发送数据之前压缩数据以实现实时性能的方法;它的质量不如我现在使用的解决方案,但它快速简便。

在您的 iPhone 上:

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *image = [[UIImage alloc] initWithCGImage:newImage scale:1 orientation:UIImageOrientationUp];
CGImageRelease(newImage);
if (image) {
NSData *data = UIImageJPEGRepresentation(image, 0.7);
NSError *err;
[((ViewController *)self.parentViewController).session sendData:data toPeers:((ViewController *)self.parentViewController).session.connectedPeers withMode:MCSessionSendDataReliable error:&err];
}

在您的 iPad 上:

- (void)session:(nonnull MCSession *)session didReceiveData:(nonnull NSData *)data fromPeer:(nonnull MCPeerID *)peerID {
UIImage *image = [UIImage imageWithData:imageData];
if (image) {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"Displaying image...");
[((ViewerViewController *)self.childViewControllers.lastObject).view.layer setContents:(__bridge id)image.CGImage];
});
}
}

关于Swift 多点视频应用程序不会读取数据,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/37718919/

28 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com