LoginSignup
2
2

More than 5 years have passed since last update.

画像を動画フォーマットに変換して再生 - Swift3.0

Posted at

実行

スクリーンショット 2017-09-13 13.42.21.png

on.gif

実装

ViewController.swift

//
//  ViewController.swift
//  Convert images to movie format
//
//  Created by ryosuke-hujisawa on 2017/09/13.
//  Copyright © 2017年 ryosuke-hujisawa. All rights reserved.
//

import UIKit
import AVKit
import AVFoundation

class ViewController: UIViewController {

    override func viewDidLoad() {
        super.viewDidLoad()





        let imageURL = Bundle.main.url(forResource: "ijustwannaknowyou", withExtension: "jpg")
        let videoSettings = CXEImageToVideoSync.videoSettings(codec: AVVideoCodecH264, width: 480, height: 320)
        let sync = CXEImageToVideoSync(videoSettings: videoSettings)
        let fileURL = sync.createMovieFrom(url: imageURL!, duration: 4)
        print(fileURL.absoluteString)
        let video = AVURLAsset(url: fileURL)
        print("duration: \(video.duration.seconds)")
//        let playItem = AVPlayerItem(asset: video)
//        self.player = AVPlayer(playerItem: playItem)
//        self.playerView.player = self.player




    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }


}


CXEImagesToVideoSync.swift

//
//  CXEImagesToVideoSync.swift
//  Convert images to movie format
//
//  Created by ryosuke-hujisawa on 2017/09/13.
//  Copyright © 2017年 ryosuke-hujisawa. All rights reserved.
//

import Foundation
import AVFoundation
import UIKit

fileprivate typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?


class CXEImageToVideoSync: NSObject{

    //MARK: Private Properties

    private var assetWriter:AVAssetWriter!
    private var writeInput:AVAssetWriterInput!
    private var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
    private var videoSettings:[String : Any]!
    private var frameTime:CMTime!
    private var fileURL:URL!

    //MARK: Class Method

    class func videoSettings(codec:String, width:Int, height:Int) -> [String: Any]{
        if(Int(width) % 16 != 0){
            print("warning: video settings width must be divisible by 16")
        }

        let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                                           AVVideoWidthKey: width,
                                           AVVideoHeightKey: height]

        return videoSettings
    }

    //MARK: Public methods

    init(videoSettings: [String: Any]) {
        super.init()

        let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
        var tempPath:String
        repeat{
            let random = arc4random()
            tempPath = paths[0] + "/\(random).mp4"
        }while(FileManager.default.fileExists(atPath: tempPath))

        self.fileURL = URL(fileURLWithPath: tempPath)
        self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileTypeQuickTimeMovie)

        self.videoSettings = videoSettings
        self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
        assert(self.assetWriter.canAdd(self.writeInput), "add failed")

        self.assetWriter.add(self.writeInput)
        let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
        self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
        self.frameTime = CMTimeMake(600, 600)
    }

    func createMovieFrom(url: URL, duration:Int) -> URL{
        var urls = [URL]()
        var index = duration
        while(index > 0){
            urls.append(url)
            index -= 1
        }
        return self.createMovieFromSource(images: urls as [AnyObject], extractor:{(inputObject:AnyObject) ->UIImage? in
            return UIImage(data: try! Data(contentsOf: inputObject as! URL))})
    }

    func createMovieFrom(image: UIImage, duration:Int) -> URL{
        var images = [UIImage]()
        var index = duration
        while(index > 0){
            images.append(image)
            index -= 1
        }
        return self.createMovieFromSource(images: images, extractor: {(inputObject:AnyObject) -> UIImage? in
            return inputObject as? UIImage})
    }

    //MARK: Private methods

    private func createMovieFromSource(images: [AnyObject], extractor: @escaping CXEMovieMakerUIImageExtractor) -> URL{

        self.assetWriter.startWriting()
        //        self.assetWriter.startSession(atSourceTime: kCMTimeZero)
        let zeroTime = CMTimeMake(Int64(0),self.frameTime.timescale)
        self.assetWriter.startSession(atSourceTime: zeroTime)

        var i = 0
        let frameNumber = images.count

        while !self.writeInput.isReadyForMoreMediaData {}

        while(true){
            if(i >= frameNumber){
                break
            }

            if (self.writeInput.isReadyForMoreMediaData){
                var sampleBuffer:CVPixelBuffer?
                autoreleasepool{
                    let img = extractor(images[i])
                    if img == nil{
                        i += 1
                        print("Warning: counld not extract one of the frames")
                        //                            continue
                    }
                    sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
                }
                if (sampleBuffer != nil){
                    if(i == 0){



                        /*
                        let time: CMTime = CMTimeMake(1000, 1000)
                        CMTimeMakeを用いて、valueとtimescaleを指定しての作成。上の例のvalue:1000, timescale:1000の場合、秒数でいうと10秒になる。


                        let time: CMTime = CMTimeMakeWithSeconds(Float64(7.6), 100)

                       */


                        //再生時間
                        let time: CMTime = CMTimeMake(0, 1000)

                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: time)


                        let timee: CMTime = CMTimeMake(5000, 1000)

                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: timee)


                    }else{
//                        let value = i - 1
//                        let lastTime = CMTimeMake(Int64(value), self.frameTime.timescale)
//                        let presentTime = CMTimeAdd(lastTime, self.frameTime)
//                        self.bufferAdapter.append(sampleBuffer!, withPresentationTime: presentTime)
                    }
                    i = i + 1
                }
            }
        }
        self.writeInput.markAsFinished()
        self.assetWriter.finishWriting {}

        var isSuccess:Bool = false
        while(!isSuccess){
            switch self.assetWriter.status {
            case .completed:
                isSuccess = true
                print("completed")
            case .writing:
                sleep(1)
                print("writing")
            case .failed:
                isSuccess = true
                print("failed")
            case .cancelled:
                isSuccess = true
                print("cancelled")
            default:
                isSuccess = true
                print("unknown")
            }
        }
        return self.fileURL
    }



    private func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{


        let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]

        var pxbuffer:CVPixelBuffer?
        let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
        let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int

        let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)

        assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")

        CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
        let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()



        let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
        assert(context != nil, "context is nil")

        context!.concatenate(CGAffineTransform.identity)
        context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
        CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))


        print("pxbuffer!")
        print("pxbuffer!の方は \(type(of: pxbuffer!))です")
        print(pxbuffer!)

        return pxbuffer
    }
}



ソース

GitHub

2
2
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
2
2