- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
我正在尝试创建一个应用程序,该应用程序将从 iphone 获取视频并通过 Apple 的多点接口(interface)将其发送到 ipad。
//
// ViewController.swift
// Multipeer Video
//
//
import UIKit
import MultipeerConnectivity
import CoreMotion
import SceneKit
import AVFoundation
import AVKit
import MediaPlayer
class ViewController: UIViewController, MCNearbyServiceAdvertiserDelegate, MCNearbyServiceBrowserDelegate, MCSessionDelegate, NSStreamDelegate, AVCaptureVideoDataOutputSampleBufferDelegate
{
//Video
let transferredSession = AVCaptureSession()
let captureSession = AVCaptureSession()
var transferredLayer : AVCaptureVideoPreviewLayer?
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice : AVCaptureDevice?
var videoDeviceOutput: AVCaptureVideoDataOutput!
var sessionQueue: dispatch_queue_t!
var data = NSData()
var movieplayer = MPMoviePlayerController()
//MultiPeer
let label = UILabel()
var displayLink: CADisplayLink?
let serviceType = "motion-control"
let peerID = MCPeerID(displayName: UIDevice.currentDevice().name)
var serviceAdvertiser : MCNearbyServiceAdvertiser!
var serviceBrowser : MCNearbyServiceBrowser!
lazy var session : MCSession =
{
let session = MCSession(peer: self.peerID, securityIdentity: nil, encryptionPreference: MCEncryptionPreference.Required)
session.delegate = self
return session
}()
override func viewDidLoad()
{
super.viewDidLoad()
//Video
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
print("Capture device found")
}
}
}
}
label.textAlignment = NSTextAlignment.Center
view.addSubview(label)
if UIDevice.currentDevice().userInterfaceIdiom == UIUserInterfaceIdiom.Pad
{
label.text = "iPad"
view.backgroundColor = UIColor.blackColor()
label.textColor = UIColor.whiteColor()
initialiseAdvertising()
//need to setup recieving video
}
else
{
label.text = "iPhone"
initialiseBrowsing()
beginVideoSession()
}
}
func beginVideoSession() {
configureDevice()
do {
//try captureSession.addInput(input: captureDevice)
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
updateDeviceSettings(0.0, isoValue: 0.0)
} catch {
//error message etc.
print("Capture device not initialisable")
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.view.layer.addSublayer(previewLayer!)
previewLayer?.frame = self.view.layer.frame
self.view.layer.insertSublayer(previewLayer!, atIndex: 0)
captureSession.startRunning()
}
func configureDevice() {
if let device = captureDevice {
do {
try device.lockForConfiguration()
device.focusMode = .Locked
device.unlockForConfiguration()
} catch {
//error message etc.
print("Capture device not configurable")
}
}
}
// set ISO
func updateDeviceSettings(focusValue : Float, isoValue : Float) {
if let device = captureDevice {
do {
try device.lockForConfiguration()
device.setFocusModeLockedWithLensPosition(focusValue, completionHandler: { (time) -> Void in
})
let minISO = device.activeFormat.minISO
let maxISO = device.activeFormat.maxISO
let clampedISO = isoValue * (maxISO - minISO) + minISO
device.setExposureModeCustomWithDuration(AVCaptureExposureDurationCurrent, ISO: clampedISO, completionHandler: { (time) -> Void in
//
})
device.unlockForConfiguration()
} catch {
print("Can't update device settings")
}
}
}
// MARK: MCNearbyServiceBrowserDelegate (iPhone is browser)
var streamTargetPeer: MCPeerID?
var outputStream: NSOutputStream?
func initialiseBrowsing()
{
serviceBrowser = MCNearbyServiceBrowser(peer: peerID, serviceType: serviceType)
serviceBrowser.delegate = self
serviceBrowser.startBrowsingForPeers()
}
func browser(browser: MCNearbyServiceBrowser, foundPeer peerID: MCPeerID, withDiscoveryInfo info: [String : String]?)
{
print( "Found Peer! \(peerID)")
streamTargetPeer = peerID
browser.invitePeer(peerID, toSession: session, withContext: nil, timeout: 120)
displayLink = CADisplayLink(target: self, selector: #selector(ViewController.step))
displayLink?.addToRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
print("displayLink")
}
//Initializes SampleBufferDelegate and videoDeviceOutput
func addVideoOutput() {
videoDeviceOutput = AVCaptureVideoDataOutput()
videoDeviceOutput.alwaysDiscardsLateVideoFrames = true
self.sessionQueue = dispatch_queue_create("Camera Session", DISPATCH_QUEUE_SERIAL)
videoDeviceOutput.setSampleBufferDelegate(self, queue: sessionQueue)
if captureSession.canAddOutput(videoDeviceOutput) {
captureSession.addOutput(videoDeviceOutput)
}
}
//Grabbing frames from camera
func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
print("frame received")
let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, 0)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
let src_buff = CVPixelBufferGetBaseAddress(imageBuffer)
data = NSData(bytes: src_buff, length: bytesPerRow * height)
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
func startStream()
{
guard let streamTargetPeer = streamTargetPeer where outputStream == nil else
{
return
}
do
{
print("stream started")
outputStream = try session.startStreamWithName("MotionControlStream", toPeer: streamTargetPeer)
outputStream?.scheduleInRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
outputStream?.open()
}
catch
{
print("unable to start stream!! \(error)")
}
}
func step()
{
startStream()
print("step")
guard let outputStream = outputStream else
{
print("no stream")
return
}
if outputStream.hasSpaceAvailable
{
print("writing to output")
outputStream.write(UnsafePointer<UInt8>(data.bytes), maxLength: data.length)
}
else
{
print("no space availale")
}
}
func browser(browser: MCNearbyServiceBrowser, lostPeer peerID: MCPeerID)
{
label.text = "Lost Peer!"
}
// MARK: MCNearbyServiceAdvertiserDelegate (iPad is advertiser)
func initialiseAdvertising()
{
serviceAdvertiser = MCNearbyServiceAdvertiser(peer: peerID, discoveryInfo: nil, serviceType: serviceType)
serviceAdvertiser.delegate = self
serviceAdvertiser.startAdvertisingPeer()
}
// MARK: MCSessionDelegate
func session(session: MCSession, peer peerID: MCPeerID, didChangeState state: MCSessionState)
{
let stateName:String
switch state
{
case MCSessionState.Connected:
stateName = "connected"
case MCSessionState.Connecting:
stateName = "connecting"
case MCSessionState.NotConnected:
stateName = "not connected"
}
let deviceName:String
switch UIDevice.currentDevice().userInterfaceIdiom
{
case UIUserInterfaceIdiom.Pad:
deviceName = "iPad"
case UIUserInterfaceIdiom.Phone:
deviceName = "iPhone"
default:
deviceName = "Unspecified"
}
dispatch_async(dispatch_get_main_queue())
{
self.label.text = "\(deviceName) didChangeState: \(stateName)"
}
}
func session(_: MCSession, didReceiveStream stream: NSInputStream, withName streamName: String, fromPeer peerID: MCPeerID)
{
print("did recieve")
stream.scheduleInRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
stream.delegate = self
stream.open()
}
func stream(stream: NSStream, handleEvent eventCode: NSStreamEvent)
{
print(eventCode)
if eventCode == NSStreamEvent.EndEncountered{
print("end")
}
print("stream started")
if let inputStream = stream as? NSInputStream //where eventCode == NSStreamEvent.HasBytesAvailable
{
print("Does this byte?")
/* let bufferSize = 1024
var buffer = [UInt8](count: bufferSize, repeatedValue: 0)
let bytesRead = inputStream.read(&buffer, maxLength: bufferSize)
print(bytesRead)
if bytesRead >= 0 {
let output = NSString(bytes: &buffer, length: bytesRead, encoding: NSUTF8StringEncoding)
print(output)
}
*/
var bytes = [UInt8](count:12, repeatedValue: 0)
inputStream.read(&bytes, maxLength: data.length)
let dataIn: NSData = "Stream".dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: true)!
let stream: NSInputStream = NSInputStream(data: dataIn)
var buffer = [UInt8](count: 8, repeatedValue: 0)
stream.open()
if stream.hasBytesAvailable {
print("stream has bytes!")
let result: Int = stream.read(&buffer, maxLength: buffer.count)
print("result: \(result)")
let dataString: NSString = NSString(data: dataIn, encoding: NSUTF8StringEncoding)!
print(dataString)
let movieURL = NSURL.init(string: dataString as String)
print(movieURL)
} else {
print("stream has no bytes")
}
dispatch_async(dispatch_get_main_queue())
{
}
//*/
}
if let outstream = stream as? NSOutputStream {
print("This is output")
print(outstream)
}
}
func session(session: MCSession, didFinishReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, atURL localURL: NSURL, withError error: NSError?)
{
}
func session(session: MCSession, didStartReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, withProgress progress: NSProgress)
{
}
// MARK: Layout
func session(session: MCSession, didReceiveData data: NSData, fromPeer peerID: MCPeerID)
{
}
override func viewDidLayoutSubviews()
{
if UIDevice.currentDevice().userInterfaceIdiom == UIUserInterfaceIdiom.Pad
{
label.frame = CGRect(x: 0, y: topLayoutGuide.length, width: view.frame.width, height: label.intrinsicContentSize().height)
}
else
{
label.frame = view.bounds
}
}
func advertiser(advertiser: MCNearbyServiceAdvertiser, didReceiveInvitationFromPeer peerID: MCPeerID, withContext context: NSData?, invitationHandler: (Bool, MCSession) -> Void)
{
invitationHandler(true, self.session)
}
}
似乎我可以用 iphone 正确发送数据,但我一直无法弄清楚我在 iPad 上做错了什么。我猜这与运行循环有关,但在对其进行故障排除时,我注意到 iPad 收到了一个结束事件代码。
感谢您的帮助!
最佳答案
(显然)涉及两个主要任务:将像素缓冲区的基地址存储到 NSData 对象中,然后在另一端恢复基地址。使用 Apple 的 MultiPeerConnectivity 框架实现此目的的最佳方式不是通过流,而是通过数据对象。
以下是从 iPhone 的 CVPixelBuffer 发送原始缓冲区数据的方法:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
NSError *err;
[((ViewController *)self.parentViewController).session sendData:[self dataFromImageBuffer:imageBuffer withBytesPerRow:CVPixelBufferGetBytesPerRow(imageBuffer) withHeight:CVPixelBufferGetHeight(imageBuffer)] toPeers:((ViewController *)self.parentViewController).session.connectedPeers withMode:MCSessionSendDataReliable error:&err];
}
}
- (NSData *)dataFromImageBuffer:(CVImageBufferRef)imageBuffer withBytesPerRow:(size_t)bytesPerRow withHeight:(NSInteger)height
{
NSMutableData *data = [NSMutableData new];
if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
{
uint8_t *rawBuffer = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
[data appendBytes:rawBuffer length:1228808];
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
return data;
}
以下是您在 iPad 上的显示方式:
- (void)session:(nonnull MCSession *)session didReceiveData:(nonnull NSData *)data fromPeer:(nonnull MCPeerID *)peerID {
dispatch_async(dispatch_get_main_queue(), ^{
NSMutableData *mdata = [NSMutableData new];
UInt8 *rawBuffer = (uint8_t *)[data bytes];
[mdata appendBytes:rawBuffer length:1228808];
uint8_t *buffer = (uint8_t *)[mdata bytes];
NSLog(@"sizeof(buffer) %lu", sizeof(buffer));
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(buffer, 640, 480, 8, 2560, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *image = [[UIImage alloc] initWithCGImage:newImage scale:1 orientation:UIImageOrientationUp];
CGImageRelease(newImage);
if (image) {
NSLog(@"image size %f x %f", [image size].width, [image size].height);
dispatch_async(dispatch_get_main_queue(), ^{
[((ViewerViewController *)self.childViewControllers.lastObject).view.layer setContents:(__bridge id)image.CGImage];
});
}
});
}
请注意,这是原始的、未压缩的和其他未改变的图像数据;因此,它是巨大的,并且需要很长时间才能从一个设备转移到另一个设备以使产品可行。
这是一种在发送数据之前压缩数据以实现实时性能的方法;它的质量不如我现在使用的解决方案,但它快速简便。
在您的 iPhone 上:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *image = [[UIImage alloc] initWithCGImage:newImage scale:1 orientation:UIImageOrientationUp];
CGImageRelease(newImage);
if (image) {
NSData *data = UIImageJPEGRepresentation(image, 0.7);
NSError *err;
[((ViewController *)self.parentViewController).session sendData:data toPeers:((ViewController *)self.parentViewController).session.connectedPeers withMode:MCSessionSendDataReliable error:&err];
}
在您的 iPad 上:
- (void)session:(nonnull MCSession *)session didReceiveData:(nonnull NSData *)data fromPeer:(nonnull MCPeerID *)peerID {
UIImage *image = [UIImage imageWithData:imageData];
if (image) {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"Displaying image...");
[((ViewerViewController *)self.childViewControllers.lastObject).view.layer setContents:(__bridge id)image.CGImage];
});
}
}
关于Swift 多点视频应用程序不会读取数据,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/37718919/
我有几个系统使用 docker-compose 并且没有问题。 但是,我在这里有一个“向下”根本不做任何事情的地方。 'up'虽然完美。这是在 MacOS 上。 该项目的昵称是“ Storm ”,脚本
解释起来确实很奇怪,所以就这样...... 我正在从 phpmyadmin 获取包含未转义单引号的数据。我正在尝试转换'至'通过使用Content-Type: text/html;在 php
伙计们?在这里需要一些帮助。我使用委托(delegate)协议(protocol)将一些字符串从“第二个 View Controller ”传回给它的前一个。 我的数组附加了我在委托(delegate
我有以下 eval() 东西: c = Customer() eval("c.name = row.value('customer', '{c}')".format(c=column_name), {
我写了这个测试类: @ContextConfiguration(locations = { "classpath:/test/BeanConfig.xml" }) public class Candi
我这样写代码: @ContextConfiguration(locations = { "classpath:/test/BeanConfig.xml" }) @RunWith(SpringJUnit
假设我更改了文件,然后进行 pull 。 Git 会报错,因为本地仓库还没有保存,将被覆盖。如果我然后删除该添加并使文件与以前相同(与远程 repo 相同),那么会发生 pull 吗? 最佳答案 是的
前言 很多同学将虚拟列表当做亮点写在简历上面,但是却不知道如何手写,那么这个就不是加分项而是减分项了。在上一篇文章欧阳教会你 如何实现一个定高虚拟列表 ,但是实际项目中更多的是不定高虚拟列表,这篇文
我正在阅读《Java for Dummies》一书,但遇到了问题。我不明白为什么 @Override 不起作用。我确信这与我的代码有关,因为我之前已经获得了一个多态数组来使用覆盖,但它对我来说太简单了
我从我的项目中提取了这段代码,因为我试图找到我犯的一个错误,该错误使我的 BeginStoryboard 无法自行停止。我尽可能地简化了代码,但仍然没有发现问题。你认为它可能是什么?
这个问题在这里已经有了答案: Difference between char[] and char * in C [duplicate] (3 个答案) 关闭 7 年前。 我想我知道自己问题的答案,
我一直在使用 java 的 Scanner 类时遇到问题。我可以让它很好地读取我的输入,但问题是当我想要输出一些东西时。给定多行输入,我想在完全读取所有输入后只打印一行。这是我用来读取输入的代码:
对于这个问题,我已经用最简单的术语表达了这一点。 如果元素被点击,'active'类被添加到元素,'active'类从其他元素中移除。 但是,如果该元素是“事件的”并且它被第二次单击,则“事件”类不应
这会在桌面上创建一个新文件夹,但不会将文件夹 .pfrom 的内容 move 到文件夹 .pTo。 int main() { SHFILEOPSTRUCT sf = {0}; TCHA
我有一个关于多线程调试 DLL (/MDd) 和多线程调试 (/MTd) 设置的问题。它们之间的区别很明显:一个是使用动态库,一个是使用静态库。当我使用/MDd 编译我的程序时,一切都进行得很好。但是
我的问题是,如果我在页面加载时创建一个克隆变量,jQuery 只会 append 它一次。奇怪! Click to copy This is an element! $(document)
所以...我是一个开发 django 应用程序的新手,但是当我尝试通过 virtualbox heroku 运行 heroku run python manage.py syncdb 时,它一直在下面
我在 Spring Boot 初始化时遇到了问题。我在一个简单的 Spring Boot 项目中有这个结构。 com.project.name |----App.java (Annoted with
我在 www.7hermanosmx.com/menu.php 页面上有以下代码 - 一切正常,除了黄色框(类 menuholder)应该每行三个相互 float 。他们坚决拒绝这样做!我知道我做错了
我正在尝试在我正在构建的小型网站上添加一个下拉菜单。出于某种原因,我可以获得我想要向下滑动到 fadeOut() 的 div 并执行其他类似的操作,但我无法将它获取到 slideDown()。我不知道
我是一名优秀的程序员,十分优秀!