LoginSignup
2
3

More than 5 years have passed since last update.

Swiftで動画をトリミングした

Posted at

 実行

on01.gif

on.gif

on03.gif

補足

映像と音声は別々に合成しています。スタートポイントだけしかまだ実装してないです。エンドポイントを動かしてもスタートポイントの位置が変わります。エンドポイントはスタトート地点から50秒後に設定しています。viewには動画からサムネイルを生成しています。スライダーを動かすたびにviewに描画されたサムネイル画像が変動するのは、スライダーを動かすたびに、スライダーから取得した秒数で新たにサムネイルを生成してviewに描画しているからです。

ソース

gitHub

実装


import UIKit
import AVFoundation
import MobileCoreServices
import Photos
import AVKit

var int = 0
var trimVideoURL: URL?
var asset : AVAsset?

class ViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
    let imagePickerController = UIImagePickerController()
    var videoURL: URL?


    @IBOutlet weak var imageView: UIImageView!
    @IBAction func selectImage(_ sender: Any) {
        print("UIBarButtonItem。カメラロールから動画を選択")
        imagePickerController.sourceType = .photoLibrary
        imagePickerController.delegate = self
        //imagePickerController.mediaTypes = ["public.image", "public.movie"]
        //動画だけ
        imagePickerController.mediaTypes = ["public.movie"]
        //画像だけ
        //imagePickerController.mediaTypes = ["public.image"]
        present(imagePickerController, animated: true, completion: nil)
    }

    override func viewDidLoad() {
        super.viewDidLoad()

        // 一つめのスライダー
        let startPointSlider = UISlider(frame: CGRect(x:0, y:0, width:350, height:30))
        startPointSlider.layer.position = CGPoint(x:self.view.frame.midX, y:500)
        startPointSlider.backgroundColor = UIColor.white
        startPointSlider.layer.cornerRadius = 10.0
        startPointSlider.layer.shadowOpacity = 0.5
        startPointSlider.layer.masksToBounds = false
        startPointSlider.addTarget(self, action: #selector(self.onStartPointlabel(_:)), for: .valueChanged)

        // 最小値と最大値を設定する.
        startPointSlider.minimumValue = 0
        startPointSlider.maximumValue = 100

        self.view.addSubview(startPointSlider)

        //一つめのラベル
        let startPointlabel = UILabel()
        startPointlabel.text = "start point"
        startPointlabel.sizeToFit()
        startPointlabel.layer.position = CGPoint(x:self.view.frame.midX, y:450)
        self.view.addSubview(startPointlabel)

        // 二つめのスライダー
        let endPointSlider = UISlider(frame: CGRect(x:0, y:0, width:350, height:30))
        endPointSlider.layer.position = CGPoint(x:self.view.frame.midX, y:600)
        endPointSlider.backgroundColor = UIColor.white
        endPointSlider.layer.cornerRadius = 10.0
        endPointSlider.layer.shadowOpacity = 0.5
        endPointSlider.layer.masksToBounds = false
        endPointSlider.addTarget(self, action: #selector(self.onEndPointlabel(_:)), for: .valueChanged)

        // 最小値と最大値を設定する.
        endPointSlider.minimumValue = 0
        endPointSlider.maximumValue = 100

        self.view.addSubview(endPointSlider)

        //二つめのラベル
        let endPointlabel = UILabel()
        endPointlabel.text = "end point"
        endPointlabel.sizeToFit()
        endPointlabel.layer.position = CGPoint(x:self.view.frame.midX, y:550)
        self.view.addSubview(endPointlabel)
    }

    /*
     Sliderの値が変わった時に呼ばれるメソッド
     */

    func onStartPointlabel(_ sender:UISlider!)
    {
        print(floor(sender.value))
        int = Int(floor(sender.value))
        imageView.image = previewImageFromVideo(videoURL!)!

    }

    func onEndPointlabel(_ sender:UISlider!)
    {
        print(floor(sender.value))
        int = Int(floor(sender.value))
        imageView.image = previewImageFromVideo(videoURL!)!

    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
        videoURL = info["UIImagePickerControllerReferenceURL"] as? URL
        trimVideoURL = (info["UIImagePickerControllerReferenceURL"] as? URL)!
        print(videoURL!)
        print(videoURL!)
        imageView.image = previewImageFromVideo(videoURL!)!
        imageView.contentMode = .scaleAspectFit
        imagePickerController.dismiss(animated: true, completion: nil)

    }

    func previewImageFromVideo(_ url:URL) -> UIImage? {
        print("動画からサムネイルを生成する")
        asset = AVAsset(url:url)
        let imageGenerator = AVAssetImageGenerator(asset:asset!)
        imageGenerator.appliesPreferredTrackTransform = true
        var time = asset?.duration
        time?.value = min(30,32)
        do {
            let imageRef = try imageGenerator.copyCGImage(at: CMTimeMake(Int64(int), 1), actualTime: nil)
            return UIImage(cgImage: imageRef)
        } catch {
            return nil
        }
    }

    //動画トリミング
    func createAudioFileFromAsset(_ asset: AVAsset){

        let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
        let filePath = documentsDirectory.appendingPathComponent("rendered-audio.m4v")
        if let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPreset640x480){

            exportSession.canPerformMultiplePassesOverSourceMediaData = true
            exportSession.outputURL = filePath
            exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
            exportSession.outputFileType = AVFileTypeQuickTimeMovie
            exportSession.exportAsynchronously {
                _ in
                print("finished: \(filePath) :  \(exportSession.status.rawValue) ")
            }
        }

    }

    @IBAction func trim(_ sender: Any) {

        let url = trimVideoURL

        let videoAsset = AVURLAsset(url: url!)

        print("videoAssetする")
        print(videoAsset)
        print( type(of: videoAsset) )

        let comp = AVMutableComposition()
        let videoAssetSourceTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first! as AVAssetTrack
        let videoCompositionTrack = comp.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)

        let audioAssetSourceTrack1 = videoAsset.tracks(withMediaType: AVMediaTypeAudio).first! as AVAssetTrack
        let audioCompositionTrack = comp.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)

        do {

            try videoCompositionTrack.insertTimeRange(
                CMTimeRangeMake(CMTimeMakeWithSeconds(Float64(int), 10), CMTimeMakeWithSeconds(50, 10)),
                of: videoAssetSourceTrack,
                at: kCMTimeZero)



            try audioCompositionTrack.insertTimeRange(
                CMTimeRangeMake(CMTimeMakeWithSeconds(Float64(int), 10), CMTimeMakeWithSeconds(50, 10)),
                of: audioAssetSourceTrack1,
                at: kCMTimeZero)


        }catch { print(error) }
        asset = comp
        createAudioFileFromAsset(asset!)
    }
}

2
3
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
2
3