大佬教程收集整理的这篇文章主要介绍了ios – 在Swift中逐像素地对图像应用视觉效果,大佬教程大佬觉得挺不错的,现在分享给大家,也给大家做个参考。
我将样本缓冲区转换为imageRef变量,如果我将其转换为UIImage,一切都会好的.
但是现在我想把那个imageRef改变它的颜色值逐像素,在这个例子中改为负颜色(我必须做更复杂的东西所以我不能使用CIFilters)但是当我执行注释的部分时它崩溃了糟糕的访问.
import UIKit import AVFoundation class ViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate { let captureSession = AVCaptureSession() var previewLayer : AVCaptureVideoPreviewLayer? var captureDevice : AVCaptureDevice? @IBOutlet weak var cameraView: UIImageView! override func viewDidLoad() { super.viewDidLoad() captureSession.sessionPreset = AVCaptureSessionPresetMedium let devices = AVCaptureDevice.devices() for device in devices { if Device.hasMediaType(AVMediaTypeVideo) && device.position == AVCaptureDevicePosition.BACk { if let device = device as? AVCaptureDevice { captureDevice = device beginSession() break } } } } func focusTo(value : Float) { if let device = captureDevice { if(device.lockForConfiguration(nil)) { device.setFocusModeLockedWithLensPosition(value) { (timE) in } device.unlockForConfiguration() } } } override func touchesBegan(touches: NSSet!,withEvent event: UIEvent!) { var touchPercent = Float(touches.anyObject().LOCATIOnInView(view).x / 320) focusTo(touchPercent) } override func touchesMoved(touches: NSSet!,withEvent event: UIEvent!) { var touchPercent = Float(touches.anyObject().LOCATIOnInView(view).x / 320) focusTo(touchPercent) } func beginSession() { configureDevice() var error : NSError? captureSession.addInput(AVCaptureDeviceInput(device: captureDevice,error: &error)) if error != nil { println("error: \(error?.localizedDescription)") } previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) previewLayer?.frame = view.layer.frame //view.layer.addSublayer(previewLayer) let output = AVCaptureVideoDataOutput() let cameraQueue = dispatch_queue_create("cameraQueue",DISPATCH_QUEUE_seriaL) output.setSampleBufferDelegate(self,queue: cameraQueuE) output.videoSetTings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA] captureSession.addOutput(output) captureSession.startRunning() } func configureDevice() { if let device = captureDevice { device.lockForConfiguration(nil) device.focusMode = .Locked device.unlockForConfiguration() } } // MARK : - AVCaptureVideoDataOutputSampleBufferDelegate func captureOutput(captureOutput: AVCaptureOutput!,didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,fromConnection connection: AVCaptureConnection!) { let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) CVPixelBufferLockBaseAddress(imageBuffer,0) let baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0) let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer) let width = CVPixelBufferGetWidth(imageBuffer) let height = CVPixelBufferGetHeight(imageBuffer) let colorSpace = CGColorSpaceCreateDeviceRGB() var bitmapInfo = CGBitmapInfo.fromRaw(CGImageAlphaInfo.PremultipliedFirst.toRaw())! | CGBitmapInfo.byteOrder32Little let context = CGBitmapContextCreate(baseAddress,width,height,8,bytesPerRow,colorSpace,bitmapInfo) let imageRef = CGBitmapContextCreateImage(context) CVPixelBufferUnlockBaseAddress(imageBuffer,0) let data = CGDataProviderCopyData(CGImageGetDataProvider(imageRef)) as NSData let pixels = data.bytes var newPixels = UnsafeMutablePointer<UInt8>() //for index in Stride(from: 0,to: data.length,by: 4) { /*newPixels[index] = 255 - pixels[index] newPixels[index + 1] = 255 - pixels[index + 1] newPixels[index + 2] = 255 - pixels[index + 2] newPixels[index + 3] = 255 - pixels[index + 3]*/ //} bitmapInfo = CGImageGetBitmapInfo(imageRef) let provider = CGDataProviderCreateWithData(nil,newPixels,UInt(data.length),nil) let newImageRef = CGImageCreate(width,CGImageGetBitsPerComponent(imageRef),CGImageGetBitsPerPixel(imageRef),bitmapInfo,provider,nil,false,kCGRenderingIntentDefault) let image = UIImage(CGImage: newImageRef,scale: 1,orientation: .Right) dispatch_async(dispatch_get_main_queue()) { self.cameraView.image = image } } }
首先,自OP发布以来,Swift发生了一些变化,这条线必须根据rawValue的功能进行修改:
//var bitmapInfo = CGBitmapInfo.fromRaw(CGImageAlphaInfo.PremultipliedFirst.toRaw())! | CGBitmapInfo.byteOrder32Little var bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.PremultipliedFirst.rawvalue) | CGBitmapInfo.byteOrder32Little
指针也需要进行一些更改,因此我发布了所有更改(我将原始行留在注释标记中).
let data = CGDataProviderCopyData(CGImageGetDataProvider(imageRef)) as NSData //let pixels = data.bytes let pixels = UnsafePointer<UInt8>(data.bytes) let imageSize : Int = Int(width) * Int(height) * 4 //var newPixels = UnsafeMutablePointer<UInt8>() var newPixelArray = [UInt8](count: imageSize,repeatedValue: 0) for index in Stride(from: 0,by: 4) { newPixelArraY[index] = 255 - pixels[index] newPixelArraY[index + 1] = 255 - pixels[index + 1] newPixelArraY[index + 2] = 255 - pixels[index + 2] newPixelArraY[index + 3] = pixels[index + 3] } bitmapInfo = CGImageGetBitmapInfo(imageRef) //let provider = CGDataProviderCreateWithData(nil,nil) let provider = CGDataProviderCreateWithData(nil,&newPixelArray,nil)
一些解释:所有旧的像素字节必须转换为UInt8,因此它不是将像素更改为UnsafePointer.然后我为新像素创建了一个数组,并删除了newPixels指针并直接使用了数组.最后将指向新数组的指针添加到提供程序以创建映像.并删除了alpha字节的修改.
在此之后,我能够以非常低的性能将一些负面图像放入我的视图中,每10秒左右一次图像(iPhone 5,通过XCodE).并且在图像视图中呈现第一帧需要花费大量时间.
当我将captureSession.stopRunning()添加到didOutputSampleBuffer函数的开头时,有一些更快的响应,然后在处理完成后再次使用captureSession.startRunning()启动.有了这个我差不多1fps.
感谢有趣的挑战!
以上是大佬教程为你收集整理的ios – 在Swift中逐像素地对图像应用视觉效果全部内容,希望文章能够帮你解决ios – 在Swift中逐像素地对图像应用视觉效果所遇到的程序开发问题。
如果觉得大佬教程网站内容还不错,欢迎将大佬教程推荐给程序员好友。
本图文内容来源于网友网络收集整理提供,作为学习参考使用,版权属于原作者。
如您有任何意见或建议可联系处理。小编QQ:384754419,请注明来意。