-
Notifications
You must be signed in to change notification settings - Fork 42
/
Copy pathLivePhoto.swift
476 lines (411 loc) · 21.2 KB
/
LivePhoto.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
//
// LivePhoto.swift
// Live Photos
//
// Read discussion at:
// http://www.limit-point.com/blog/2018/live-photos/
//
// Created by Alexander Pagliaro on 7/25/18.
// Copyright © 2018 Limit Point LLC. All rights reserved.
//
import UIKit
import AVFoundation
import MobileCoreServices
import Photos
class LivePhoto {
// MARK: PUBLIC
typealias LivePhotoResources = (pairedImage: URL, pairedVideo: URL)
/// Returns the paired image and video for the given PHLivePhoto
public class func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
queue.async {
shared.extractResources(from: livePhoto, completion: completion)
}
}
/// Generates a PHLivePhoto from an image and video. Also returns the paired image and video.
public class func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
queue.async {
shared.generate(from: imageURL, videoURL: videoURL, progress: progress, completion: completion)
}
}
/// Save a Live Photo to the Photo Library by passing the paired image and video.
public class func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Bool) -> Void) {
PHPhotoLibrary.shared().performChanges({
let creationRequest = PHAssetCreationRequest.forAsset()
let options = PHAssetResourceCreationOptions()
creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: resources.pairedVideo, options: options)
creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: resources.pairedImage, options: options)
}, completionHandler: { (success, error) in
if error != nil {
print(error as Any)
}
completion(success)
})
}
// MARK: PRIVATE
private static let shared = LivePhoto()
private static let queue = DispatchQueue(label: "com.limit-point.LivePhotoQueue", attributes: .concurrent)
lazy private var cacheDirectory: URL? = {
if let cacheDirectoryURL = try? FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) {
let fullDirectory = cacheDirectoryURL.appendingPathComponent("com.limit-point.LivePhoto", isDirectory: true)
if !FileManager.default.fileExists(atPath: fullDirectory.absoluteString) {
try? FileManager.default.createDirectory(at: fullDirectory, withIntermediateDirectories: true, attributes: nil)
}
return fullDirectory
}
return nil
}()
deinit {
clearCache()
}
private func generateKeyPhoto(from videoURL: URL) -> URL? {
var percent:Float = 0.5
let videoAsset = AVURLAsset(url: videoURL)
if let stillImageTime = videoAsset.stillImageTime() {
percent = Float(stillImageTime.value) / Float(videoAsset.duration.value)
}
guard let imageFrame = videoAsset.getAssetFrame(percent: percent) else { return nil }
guard let jpegData = UIImageJPEGRepresentation(imageFrame, 1.0) else { return nil }
guard let url = cacheDirectory?.appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg") else { return nil }
do {
try? jpegData.write(to: url)
return url
}
}
private func clearCache() {
if let cacheDirectory = cacheDirectory {
try? FileManager.default.removeItem(at: cacheDirectory)
}
}
private func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
guard let cacheDirectory = cacheDirectory else {
DispatchQueue.main.async {
completion(nil, nil)
}
return
}
let assetIdentifier = UUID().uuidString
let _keyPhotoURL = imageURL ?? generateKeyPhoto(from: videoURL)
guard let keyPhotoURL = _keyPhotoURL, let pairedImageURL = addAssetID(assetIdentifier, toImage: keyPhotoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("jpg")) else {
DispatchQueue.main.async {
completion(nil, nil)
}
return
}
addAssetID(assetIdentifier, toVideo: videoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("mov"), progress: progress) { (_videoURL) in
if let pairedVideoURL = _videoURL {
_ = PHLivePhoto.request(withResourceFileURLs: [pairedVideoURL, pairedImageURL], placeholderImage: nil, targetSize: CGSize.zero, contentMode: PHImageContentMode.aspectFit, resultHandler: { (livePhoto: PHLivePhoto?, info: [AnyHashable : Any]) -> Void in
if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
return
}
DispatchQueue.main.async {
completion(livePhoto, (pairedImageURL, pairedVideoURL))
}
})
} else {
DispatchQueue.main.async {
completion(nil, nil)
}
}
}
}
private func extractResources(from livePhoto: PHLivePhoto, to directoryURL: URL, completion: @escaping (LivePhotoResources?) -> Void) {
let assetResources = PHAssetResource.assetResources(for: livePhoto)
let group = DispatchGroup()
var keyPhotoURL: URL?
var videoURL: URL?
for resource in assetResources {
let buffer = NSMutableData()
let options = PHAssetResourceRequestOptions()
options.isNetworkAccessAllowed = true
group.enter()
PHAssetResourceManager.default().requestData(for: resource, options: options, dataReceivedHandler: { (data) in
buffer.append(data)
}) { (error) in
if error == nil {
if resource.type == .pairedVideo {
videoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
} else {
keyPhotoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
}
} else {
print(error as Any)
}
group.leave()
}
}
group.notify(queue: DispatchQueue.main) {
guard let pairedPhotoURL = keyPhotoURL, let pairedVideoURL = videoURL else {
completion(nil)
return
}
completion((pairedPhotoURL, pairedVideoURL))
}
}
private func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
if let cacheDirectory = cacheDirectory {
extractResources(from: livePhoto, to: cacheDirectory, completion: completion)
}
}
private func saveAssetResource(_ resource: PHAssetResource, to directory: URL, resourceData: Data) -> URL? {
let fileExtension = UTTypeCopyPreferredTagWithClass(resource.uniformTypeIdentifier as CFString,kUTTagClassFilenameExtension)?.takeRetainedValue()
guard let ext = fileExtension else {
return nil
}
var fileUrl = directory.appendingPathComponent(NSUUID().uuidString)
fileUrl = fileUrl.appendingPathExtension(ext as String)
do {
try resourceData.write(to: fileUrl, options: [Data.WritingOptions.atomic])
} catch {
print("Could not save resource \(resource) to filepath \(String(describing: fileUrl))")
return nil
}
return fileUrl
}
func addAssetID(_ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL) -> URL? {
guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, kUTTypeJPEG, 1, nil),
let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil),
var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable : Any] else { return nil }
let assetIdentifierKey = "17"
let assetIdentifierInfo = [assetIdentifierKey : assetIdentifier]
imageProperties[kCGImagePropertyMakerAppleDictionary] = assetIdentifierInfo
CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary)
CGImageDestinationFinalize(imageDestination)
return destinationURL
}
var audioReader: AVAssetReader?
var videoReader: AVAssetReader?
var assetWriter: AVAssetWriter?
func addAssetID(_ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (URL?) -> Void) {
var audioWriterInput: AVAssetWriterInput?
var audioReaderOutput: AVAssetReaderOutput?
let videoAsset = AVURLAsset(url: videoURL)
let frameCount = videoAsset.countFrames(exact: false)
guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
completion(nil)
return
}
do {
// Create the Asset Writer
assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov)
// Create Video Reader Output
videoReader = try AVAssetReader(asset: videoAsset)
let videoReaderSettings = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)]
let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
videoReader?.add(videoReaderOutput)
// Create Video Writer Input
let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : videoTrack.naturalSize.width, AVVideoHeightKey : videoTrack.naturalSize.height])
videoWriterInput.transform = videoTrack.preferredTransform
videoWriterInput.expectsMediaDataInRealTime = true
assetWriter?.add(videoWriterInput)
// Create Audio Reader Output & Writer Input
if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
do {
let _audioReader = try AVAssetReader(asset: videoAsset)
let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
_audioReader.add(_audioReaderOutput)
audioReader = _audioReader
audioReaderOutput = _audioReaderOutput
let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
_audioWriterInput.expectsMediaDataInRealTime = false
assetWriter?.add(_audioWriterInput)
audioWriterInput = _audioWriterInput
} catch {
print(error)
}
}
// Create necessary identifier metadata and still image time metadata
let assetIdentifierMetadata = metadataForAssetID(assetIdentifier)
let stillImageTimeMetadataAdapter = createMetadataAdaptorForStillImageTime()
assetWriter?.metadata = [assetIdentifierMetadata]
assetWriter?.add(stillImageTimeMetadataAdapter.assetWriterInput)
// Start the Asset Writer
assetWriter?.startWriting()
assetWriter?.startSession(atSourceTime: kCMTimeZero)
// Add still image metadata
let _stillImagePercent: Float = 0.5
stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [metadataItemForStillImageTime()],timeRange: videoAsset.makeStillImageTimeRange(percent: _stillImagePercent, inFrameCount: frameCount)))
// For end of writing / progress
var writingVideoFinished = false
var writingAudioFinished = false
var currentFrameCount = 0
func didCompleteWriting() {
guard writingAudioFinished && writingVideoFinished else { return }
assetWriter?.finishWriting {
if self.assetWriter?.status == .completed {
completion(destinationURL)
} else {
completion(nil)
}
}
}
// Start writing video
if videoReader?.startReading() ?? false {
videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) {
while videoWriterInput.isReadyForMoreMediaData {
if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
currentFrameCount += 1
let percent:CGFloat = CGFloat(currentFrameCount)/CGFloat(frameCount)
progress(percent)
if !videoWriterInput.append(sampleBuffer) {
print("Cannot write: \(String(describing: self.assetWriter?.error?.localizedDescription))")
self.videoReader?.cancelReading()
}
} else {
videoWriterInput.markAsFinished()
writingVideoFinished = true
didCompleteWriting()
}
}
}
} else {
writingVideoFinished = true
didCompleteWriting()
}
// Start writing audio
if audioReader?.startReading() ?? false {
audioWriterInput?.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) {
while audioWriterInput?.isReadyForMoreMediaData ?? false {
guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else {
audioWriterInput?.markAsFinished()
writingAudioFinished = true
didCompleteWriting()
return
}
audioWriterInput?.append(sampleBuffer)
}
}
} else {
writingAudioFinished = true
didCompleteWriting()
}
} catch {
print(error)
completion(nil)
}
}
private func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
let item = AVMutableMetadataItem()
let keyContentIdentifier = "com.apple.quicktime.content.identifier"
let keySpaceQuickTimeMetadata = "mdta"
item.key = keyContentIdentifier as (NSCopying & NSObjectProtocol)?
item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
item.value = assetIdentifier as (NSCopying & NSObjectProtocol)?
item.dataType = "com.apple.metadata.datatype.UTF-8"
return item
}
private func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
let keyStillImageTime = "com.apple.quicktime.still-image-time"
let keySpaceQuickTimeMetadata = "mdta"
let spec : NSDictionary = [
kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
"\(keySpaceQuickTimeMetadata)/\(keyStillImageTime)",
kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
"com.apple.metadata.datatype.int8" ]
var desc : CMFormatDescription? = nil
CMMetadataFormatDescriptionCreateWithMetadataSpecifications(kCFAllocatorDefault, kCMMetadataFormatType_Boxed, [spec] as CFArray, &desc)
let input = AVAssetWriterInput(mediaType: .metadata,
outputSettings: nil, sourceFormatHint: desc)
return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
}
private func metadataItemForStillImageTime() -> AVMetadataItem {
let item = AVMutableMetadataItem()
let keyStillImageTime = "com.apple.quicktime.still-image-time"
let keySpaceQuickTimeMetadata = "mdta"
item.key = keyStillImageTime as (NSCopying & NSObjectProtocol)?
item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
item.value = 0 as (NSCopying & NSObjectProtocol)?
item.dataType = "com.apple.metadata.datatype.int8"
return item
}
}
fileprivate extension AVAsset {
func countFrames(exact:Bool) -> Int {
var frameCount = 0
if let videoReader = try? AVAssetReader(asset: self) {
if let videoTrack = self.tracks(withMediaType: .video).first {
frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
if exact {
frameCount = 0
let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
videoReader.add(videoReaderOutput)
videoReader.startReading()
// count frames
while true {
let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
if sampleBuffer == nil {
break
}
frameCount += 1
}
videoReader.cancelReading()
}
}
}
return frameCount
}
func stillImageTime() -> CMTime? {
var stillTime:CMTime? = nil
if let videoReader = try? AVAssetReader(asset: self) {
if let metadataTrack = self.tracks(withMediaType: .metadata).first {
let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
videoReader.add(videoReaderOutput)
videoReader.startReading()
let keyStillImageTime = "com.apple.quicktime.still-image-time"
let keySpaceQuickTimeMetadata = "mdta"
var found = false
while found == false {
if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
for item in group?.items ?? [] {
if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
stillTime = group?.timeRange.start
//print("stillImageTime = \(CMTimeGetSeconds(stillTime!))")
found = true
break
}
}
}
}
else {
break;
}
}
videoReader.cancelReading()
}
}
return stillTime
}
func makeStillImageTimeRange(percent:Float, inFrameCount:Int = 0) -> CMTimeRange {
var time = self.duration
var frameCount = inFrameCount
if frameCount == 0 {
frameCount = self.countFrames(exact: true)
}
let frameDuration = Int64(Float(time.value) / Float(frameCount))
time.value = Int64(Float(time.value) * percent)
//print("stillImageTime = \(CMTimeGetSeconds(time))")
return CMTimeRangeMake(time, CMTimeMake(frameDuration, time.timescale))
}
func getAssetFrame(percent:Float) -> UIImage?
{
let imageGenerator = AVAssetImageGenerator(asset: self)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.requestedTimeToleranceAfter = CMTimeMake(1,100)
imageGenerator.requestedTimeToleranceBefore = CMTimeMake(1,100)
var time = self.duration
time.value = Int64(Float(time.value) * percent)
do {
var actualTime = kCMTimeZero
let imageRef = try imageGenerator.copyCGImage(at: time, actualTime:&actualTime)
let img = UIImage(cgImage: imageRef)
return img
}
catch let error as NSError
{
print("Image generation failed with error \(error)")
return nil
}
}
}