如何将UIImage数组导出为电影?我有一个严重的问题:我有一个NSArray有几个UIImage物品。我现在想做的是用那些UIImages..但我不知道该怎么做。我希望有人能帮我,或者给我发一个代码片段,做一些我想做的事情。编辑:供今后参考-在应用解决方案后,如果视频看起来失真,请确保所捕获的图像/区域的宽度为16倍。在此经过几个小时的挣扎后发现:为什么我的电影“UIImage”会被歪曲?以下是完整的解决方案(只需确保宽度为16的倍数)http:/codeThink.no-ip.org/WordPress/files/673
3 回答
jeck猫
TA贡献1909条经验 获得超7个赞
注:这是一个SWIFT 2.1解决方案(iOS 8+,XCode 7.2).
loadImages()
RenderSettings
第1部分:设置
RenderSettings
import AVFoundationimport UIKitimport Photosstruct RenderSettings { var width: CGFloat = 1280 var height: CGFloat = 720 var fps: Int32 = 2 // 2 frames per second var avCodecKey = AVVideoCodecH264 var videoFilename = "render" var videoFilenameExt = "mp4" var size: CGSize { return CGSize(width: width, height: height) } var outputURL: NSURL { // Use the CachesDirectory so the rendered video file sticks around as long as we need it to. // Using the CachesDirectory ensures the file won't be included in a backup of the app. let fileManager = NSFileManager.defaultManager() if let tmpDirURL = try? fileManager.URLForDirectory(.CachesDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true) { return tmpDirURL.URLByAppendingPathComponent(videoFilename).URLByAppendingPathExtension(videoFilenameExt) } fatalError("URLForDirectory() failed") }}
第二部分:图像动画
ImageAnimator
VideoWriter
saveToLibrary()
class ImageAnimator { // Apple suggests a timescale of 600 because it's a multiple of standard video rates 24, 25, 30, 60 fps etc. static let kTimescale: Int32 = 600 let settings: RenderSettings let videoWriter: VideoWriter var images: [UIImage]! var frameNum = 0 class func saveToLibrary(videoURL: NSURL) { PHPhotoLibrary.requestAuthorization { status in guard status == .Authorized else { return } PHPhotoLibrary.sharedPhotoLibrary().performChanges({ PHAssetChangeRequest.creationRequestForAssetFromVideoAtFileURL(videoURL) }) { success, error in if !success { print("Could not save video to photo library:", error) } } } } class func removeFileAtURL(fileURL: NSURL) { do { try NSFileManager.defaultManager().removeItemAtPath(fileURL.path!) } catch _ as NSError { // Assume file doesn't exist. } } init(renderSettings: RenderSettings) { settings = renderSettings videoWriter = VideoWriter(renderSettings: settings) images = loadImages() } func render(completion: ()->Void) { // The VideoWriter will fail if a file exists at the URL, so clear it out first. ImageAnimator.removeFileAtURL(settings.outputURL) videoWriter.start() videoWriter.render(appendPixelBuffers) { ImageAnimator.saveToLibrary(self.settings.outputURL) completion() } } // Replace this logic with your own. func loadImages() -> [UIImage] { var images = [UIImage]() for index in 1...10 { let filename = "\(index).jpg" images.append(UIImage(named: filename)!) } return images } // This is the callback function for VideoWriter.render() func appendPixelBuffers(writer: VideoWriter) -> Bool { let frameDuration = CMTimeMake(Int64(ImageAnimator.kTimescale / settings.fps), ImageAnimator.kTimescale) while !images.isEmpty { if writer.isReadyForData == false { // Inform writer we have more buffers to write. return false } let image = images.removeFirst() let presentationTime = CMTimeMultiply(frameDuration, Int32(frameNum)) let success = videoWriter.addImage(image, withPresentationTime: presentationTime) if success == false { fatalError("addImage() failed") } frameNum++ } // Inform writer all buffers have been written. return true }}
第3部分:录影作家
VideoWriter
AVAssetWriter
AVAssetWriterInput
CVPixelBuffer
.
class VideoWriter { let renderSettings: RenderSettings var videoWriter: AVAssetWriter! var videoWriterInput: AVAssetWriterInput! var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor! var isReadyForData: Bool { return videoWriterInput?.readyForMoreMediaData ?? false } class func pixelBufferFromImage(image: UIImage, pixelBufferPool: CVPixelBufferPool, size: CGSize) -> CVPixelBuffer { var pixelBufferOut: CVPixelBuffer? let status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pixelBufferOut) if status != kCVReturnSuccess { fatalError("CVPixelBufferPoolCreatePixelBuffer() failed") } let pixelBuffer = pixelBufferOut! CVPixelBufferLockBaseAddress(pixelBuffer, 0) let data = CVPixelBufferGetBaseAddress(pixelBuffer) let rgbColorSpace = CGColorSpaceCreateDeviceRGB() let context = CGBitmapContextCreate(data, Int(size.width), Int(size.height), 8, CVPixelBufferGetBytesPerRow(pixelBuffer), rgbColorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue) CGContextClearRect(context, CGRectMake(0, 0, size.width, size.height)) let horizontalRatio = size.width / image.size.width let verticalRatio = size.height / image.size.height //aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit let newSize = CGSize(width: image.size.width * aspectRatio, height: image.size.height * aspectRatio) let x = newSize.width < size.width ? (size.width - newSize.width) / 2 : 0 let y = newSize.height < size.height ? (size.height - newSize.height) / 2 : 0 CGContextDrawImage(context, CGRectMake(x, y, newSize.width, newSize.height), image.CGImage) CVPixelBufferUnlockBaseAddress(pixelBuffer, 0) return pixelBuffer } init(renderSettings: RenderSettings) { self.renderSettings = renderSettings } func start() { let avOutputSettings: [String: AnyObject] = [ AVVideoCodecKey: renderSettings.avCodecKey, AVVideoWidthKey: NSNumber(float: Float(renderSettings.width)), AVVideoHeightKey: NSNumber(float: Float(renderSettings.height)) ] func createPixelBufferAdaptor() { let sourcePixelBufferAttributesDictionary = [ kCVPixelBufferPixelFormatTypeKey as String: NSNumber(unsignedInt: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(float: Float(renderSettings.width)), kCVPixelBufferHeightKey as String: NSNumber(float: Float(renderSettings.height)) ] pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary) } func createAssetWriter(outputURL: NSURL) -> AVAssetWriter { guard let assetWriter = try? AVAssetWriter(URL: outputURL, fileType: AVFileTypeMPEG4) else { fatalError("AVAssetWriter() failed") } guard assetWriter.canApplyOutputSettings(avOutputSettings, forMediaType: AVMediaTypeVideo) else { fatalError("canApplyOutputSettings() failed") } return assetWriter } videoWriter = createAssetWriter(renderSettings.outputURL) videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: avOutputSettings) if videoWriter.canAddInput(videoWriterInput) { videoWriter.addInput(videoWriterInput) } else { fatalError("canAddInput() returned false") } // The pixel buffer adaptor must be created before we start writing. createPixelBufferAdaptor() if videoWriter.startWriting() == false { fatalError("startWriting() failed") } videoWriter.startSessionAtSourceTime(kCMTimeZero) precondition(pixelBufferAdaptor.pixelBufferPool != nil, "nil pixelBufferPool") } func render(appendPixelBuffers: (VideoWriter)->Bool, completion: ()->Void) { precondition(videoWriter != nil, "Call start() to initialze the writer") let queue = dispatch_queue_create("mediaInputQueue", nil) videoWriterInput.requestMediaDataWhenReadyOnQueue(queue) { let isFinished = appendPixelBuffers(self) if isFinished { self.videoWriterInput.markAsFinished() self.videoWriter.finishWritingWithCompletionHandler() { dispatch_async(dispatch_get_main_queue()) { completion() } } } else { // Fall through. The closure will be called again when the writer is ready. } } } func addImage(image: UIImage, withPresentationTime presentationTime: CMTime) -> Bool { precondition(pixelBufferAdaptor != nil, "Call start() to initialze the writer") let pixelBuffer = VideoWriter.pixelBufferFromImage(image, pixelBufferPool: pixelBufferAdaptor.pixelBufferPool!, size: renderSettings.size) return pixelBufferAdaptor.appendPixelBuffer(pixelBuffer, withPresentationTime: presentationTime) }}
第4部分:使之成为现实
let settings = RenderSettings()let imageAnimator = ImageAnimator(renderSettings: settings)imageAnimator.render() { print("yes")}
- 3 回答
- 0 关注
- 593 浏览
添加回答
举报
0/150
提交
取消