iOS: конвертировать UIImage в CMSampleBuffer
Некоторые вопросы касаются того, как преобразовать CMSampleBuffer в UIImage, но нет ответов о том, как сделать обратное, то есть преобразовать UIImage в CMSampleBuffer.
Этот вопрос отличается от аналогичных, поскольку приведенный ниже код обеспечивает отправную точку для преобразования UIImage в CVPixelBuffer, который, надеюсь, поможет специалисту с большим опытом работы с AVFoundation, преобразовать его в CMSampleBuffer.
func convertImageToBuffer(from image: UIImage) -> CVPixelBuffer? {
let attrs = [
String(kCVPixelBufferCGImageCompatibilityKey) : kCFBooleanTrue,
String(kCVPixelBufferCGBitmapContextCompatibilityKey) : kCFBooleanTrue
] as [String : Any]
var buffer : CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(image.size.width), Int(image.size.height), kCVPixelFormatType_32ARGB, attrs as CFDictionary, &buffer)
guard (status == kCVReturnSuccess) else {
return nil
}
CVPixelBufferLockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))
let pixelData = CVPixelBufferGetBaseAddress(buffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pixelData, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
context?.translateBy(x: 0, y: image.size.height)
context?.scaleBy(x: 1.0, y: -1.0)
UIGraphicsPushContext(context!)
image.draw(in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
UIGraphicsPopContext()
CVPixelBufferUnlockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))
return buffer
}
2 ответа
Вы на полпути. Просто конвертируйте CVPixelBuffer в CMSampleBuffer:
extension UIImage {
var cvPixelBuffer: CVPixelBuffer? {
let attrs = [
String(kCVPixelBufferCGImageCompatibilityKey): kCFBooleanTrue,
String(kCVPixelBufferCGBitmapContextCompatibilityKey): kCFBooleanTrue
] as [String: Any]
var buffer: CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(self.size.width), Int(self.size.height), kCVPixelFormatType_32ARGB, attrs as CFDictionary, &buffer)
guard status == kCVReturnSuccess else {
return nil
}
CVPixelBufferLockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))
let pixelData = CVPixelBufferGetBaseAddress(buffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pixelData, width: Int(self.size.width), height: Int(self.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
context?.translateBy(x: 0, y: self.size.height)
context?.scaleBy(x: 1.0, y: -1.0)
UIGraphicsPushContext(context!)
self.draw(in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height))
UIGraphicsPopContext()
CVPixelBufferUnlockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))
return buffer
}
func createCMSampleBuffer() -> CMSampleBuffer? {
let pixelBuffer = cvPixelBuffer
var newSampleBuffer: CMSampleBuffer?
var timimgInfo: CMSampleTimingInfo?
var videoInfo: CMVideoFormatDescription?
CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: pixelBuffer!, formatDescriptionOut: &videoInfo)
CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
imageBuffer: pixelBuffer!,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: videoInfo!,
sampleTiming: &timimgInfo,
sampleBufferOut: &newSampleBuffer)
return newSampleBuffer!
}
}
Используйте этот код Swift.
import Foundation
import AVFoundation
import UIKit
private func freeBlock(_ refCon: UnsafeMutableRawPointer?, doomedMemoryBlock: UnsafeMutableRawPointer, sizeInBytes: Int) -> Void {
let unmanagedData = Unmanaged<NSData>.fromOpaque(refCon!)
unmanagedData.release()
}
enum CMEncodingError: Error {
case cmBlockCreationFailed
}
extension Data {
func toCMBlockBuffer() throws -> CMBlockBuffer {
// This block source is a manually retained pointer to our data instance.
// The passed FreeBlock function manually releases it when the block buffer gets deallocated.
let data = NSMutableData(data: self)
var source = CMBlockBufferCustomBlockSource()
source.refCon = Unmanaged.passRetained(data).toOpaque()
source.FreeBlock = freeBlock
var blockBuffer: CMBlockBuffer?
let result = CMBlockBufferCreateWithMemoryBlock(
allocator: kCFAllocatorDefault,
memoryBlock: data.mutableBytes,
blockLength: data.length,
blockAllocator: kCFAllocatorNull,
customBlockSource: &source,
offsetToData: 0,
dataLength: data.length,
flags: 0,
blockBufferOut: &blockBuffer)
if OSStatus(result) != kCMBlockBufferNoErr {
throw CMEncodingError.cmBlockCreationFailed
}
guard let buffer = blockBuffer else {
throw CMEncodingError.cmBlockCreationFailed
}
assert(CMBlockBufferGetDataLength(buffer) == data.length)
return buffer
}
}
extension UIImage {
func sampleBuffer() -> CMSampleBuffer? {
guard let jpegData = self.jpegData(compressionQuality: 1) else {
return nil
}
let rawPixelSize = CGSize(width: size.width, height: size.height)
var format: CMFormatDescription? = nil
let _ = CMVideoFormatDescriptionCreate(allocator: kCFAllocatorDefault, codecType: kCMVideoCodecType_JPEG, width: Int32(rawPixelSize.width), height: Int32(rawPixelSize.height), extensions: nil, formatDescriptionOut: &format)
do {
let cmBlockBuffer = try jpegData.toCMBlockBuffer()
var size = jpegData.count
var sampleBuffer: CMSampleBuffer? = nil
let nowTime = CMTime(seconds: CACurrentMediaTime(), preferredTimescale: 60)
let _1_60_s = CMTime(value: 1, timescale: 60) //CMTime(seconds: 1.0, preferredTimescale: 30)
var timingInfo: CMSampleTimingInfo = CMSampleTimingInfo(duration: _1_60_s, presentationTimeStamp: nowTime, decodeTimeStamp: .invalid)
let _ = CMSampleBufferCreateReady(allocator: kCFAllocatorDefault, dataBuffer: cmBlockBuffer, formatDescription: format, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &size, sampleBufferOut: &sampleBuffer)
if sampleBuffer != nil {
//print("sending buffer to displayBufferLayer")
//self.bufferDisplayLayer.enqueue(sampleBuffer!)
return sampleBuffer
} else {
print("sampleBuffer is nil")
return nil
}
} catch {
print("error ugh ", error)
return nil
}
}
}
Как:
let image = UIImage(named: "YourImageName")
let sampleBuffer = image?.sampleBuffer()