国产探花免费观看_亚洲丰满少妇自慰呻吟_97日韩有码在线_资源在线日韩欧美_一区二区精品毛片,辰东完美世界有声小说,欢乐颂第一季,yy玄幻小说排行榜完本

首頁 > 系統 > iOS > 正文

iOS 在線視頻生成GIF圖功能的方法

2020-07-26 02:30:10
字體:
來源:轉載
供稿:網友

在一些視頻APP中,都可以看到一個將在線視頻轉成GIF圖的功能。下面就來說說思路以及實現。我們知道本地視頻可以生成GIF,那么將在線視頻截取成本地視頻不就可以了嗎?經過比較,騰訊視頻App也是這么做的。話不多說,下面開始上代碼:

第一步:截取視頻

#pragma mark -截取視頻- (void)interceptVideoAndVideoUrl:(NSURL *)videoUrl withOutPath:(NSString *)outPath outputFileType:(NSString *)outputFileType range:(NSRange)videoRange intercept:(InterceptBlock)interceptBlock {  _interceptBlock =interceptBlock;  //不添加背景音樂 NSURL *audioUrl =nil; //AVURLAsset此類主要用于獲取媒體信息,包括視頻、聲音等 AVURLAsset* audioAsset = [[AVURLAsset alloc] initWithURL:audioUrl options:nil]; AVURLAsset* videoAsset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];  //創建AVMutableComposition對象來添加視頻音頻資源的AVMutableCompositionTrack AVMutableComposition* mixComposition = [AVMutableComposition composition];  //CMTimeRangeMake(start, duration),start起始時間,duration時長,都是CMTime類型 //CMTimeMake(int64_t value, int32_t timescale),返回CMTime,value視頻的一個總幀數,timescale是指每秒視頻播放的幀數,視頻播放速率,(value / timescale)才是視頻實際的秒數時長,timescale一般情況下不改變,截取視頻長度通過改變value的值 //CMTimeMakeWithSeconds(Float64 seconds, int32_t preferredTimeScale),返回CMTime,seconds截取時長(單位秒),preferredTimeScale每秒幀數  //開始位置startTime CMTime startTime = CMTimeMakeWithSeconds(videoRange.location, videoAsset.duration.timescale); //截取長度videoDuration CMTime videoDuration = CMTimeMakeWithSeconds(videoRange.length, videoAsset.duration.timescale);  CMTimeRange videoTimeRange = CMTimeRangeMake(startTime, videoDuration);  //視頻采集compositionVideoTrack AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];  // 避免數組越界 tracksWithMediaType 找不到對應的文件時候返回空數組 //TimeRange截取的范圍長度 //ofTrack來源 //atTime插放在視頻的時間位置 [compositionVideoTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeVideo].count>0) ? [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject : nil atTime:kCMTimeZero error:nil];   //視頻聲音采集(也可不執行這段代碼不采集視頻音軌,合并后的視頻文件將沒有視頻原來的聲音)  AVMutableCompositionTrack *compositionVoiceTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];  [compositionVoiceTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeAudio].count>0)?[videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject:nil atTime:kCMTimeZero error:nil];  //聲音長度截取范圍==視頻長度 CMTimeRange audioTimeRange = CMTimeRangeMake(kCMTimeZero, videoDuration);  //音頻采集compositionCommentaryTrack AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];  [compositionAudioTrack insertTimeRange:audioTimeRange ofTrack:([audioAsset tracksWithMediaType:AVMediaTypeAudio].count > 0) ? [audioAsset tracksWithMediaType:AVMediaTypeAudio].firstObject : nil atTime:kCMTimeZero error:nil];  //AVAssetExportSession用于合并文件,導出合并后文件,presetName文件的輸出類型 AVAssetExportSession *assetExportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];   //混合后的視頻輸出路徑 NSURL *outPutURL = [NSURL fileURLWithPath:outPath];  if ([[NSFileManager defaultManager] fileExistsAtPath:outPath]) {  [[NSFileManager defaultManager] removeItemAtPath:outPath error:nil]; }  //輸出視頻格式 assetExportSession.outputFileType = outputFileType; assetExportSession.outputURL = outPutURL; //輸出文件是否網絡優化 assetExportSession.shouldOptimizeForNetworkUse = YES; [assetExportSession exportAsynchronouslyWithCompletionHandler:^{    dispatch_async(dispatch_get_main_queue(), ^{      switch (assetExportSession.status) {    case AVAssetExportSessionStatusFailed:          if (_interceptBlock) {            _interceptBlock(assetExportSession.error,outPutURL);     }               break;         case AVAssetExportSessionStatusCancelled:{          logdebug(@"Export Status: Cancell");          break;    }    case AVAssetExportSessionStatusCompleted: {          if (_interceptBlock) {            _interceptBlock(nil,outPutURL);     }          break;    }    case AVAssetExportSessionStatusUnknown: {          logdebug(@"Export Status: Unknown");    }    case AVAssetExportSessionStatusExporting : {          logdebug(@"Export Status: Exporting");    }    case AVAssetExportSessionStatusWaiting: {          logdebug(@"Export Status: Wating");    }             }        });     }];}

第二步:本地視頻生成GIF圖

/** 生成GIF圖片 @param videoURL 視頻的路徑URL @param loopCount 播放次數 @param time 每幀的時間間隔 默認0.25s @param imagePath 存放GIF圖片的文件路徑 @param completeBlock 完成的回調 */ #pragma mark--制作GIF- (void)createGIFfromURL:(NSURL*)videoURL loopCount:(int)loopCount delayTime:(CGFloat )time gifImagePath:(NSString *)imagePath complete:(CompleteBlock)completeBlock {   _completeBlock =completeBlock;  float delayTime = time?:0.25;  // Create properties dictionaries NSDictionary *fileProperties = [self filePropertiesWithLoopCount:loopCount]; NSDictionary *frameProperties = [self framePropertiesWithDelayTime:delayTime];  AVURLAsset *asset = [AVURLAsset assetWithURL:videoURL];  float videoWidth = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].width; float videoHeight = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize].height;  GIFSize optimalSize = GIFSizeMedium; if (videoWidth >= 1200 || videoHeight >= 1200)  optimalSize = GIFSizeVeryLow; else if (videoWidth >= 800 || videoHeight >= 800)  optimalSize = GIFSizeLow; else if (videoWidth >= 400 || videoHeight >= 400)  optimalSize = GIFSizeMedium; else if (videoWidth < 400|| videoHeight < 400)  optimalSize = GIFSizeHigh;  // Get the length of the video in seconds float videoLength = (float)asset.duration.value/asset.duration.timescale; int framesPerSecond = 4; int frameCount = videoLength*framesPerSecond;  // How far along the video track we want to move, in seconds. float increment = (float)videoLength/frameCount;  // Add frames to the buffer NSMutableArray *timePoints = [NSMutableArray array]; for (int currentFrame = 0; currentFrame<frameCount; ++currentFrame) {  float seconds = (float)increment * currentFrame;  CMTime time = CMTimeMakeWithSeconds(seconds, [timeInterval intValue]);  [timePoints addObject:[NSValue valueWithCMTime:time]]; }   //completion block NSURL *gifURL = [self createGIFforTimePoints:timePoints fromURL:videoURL fileProperties:fileProperties frameProperties:frameProperties gifImagePath:imagePath frameCount:frameCount gifSize:_gifSize?:GIFSizeMedium];  if (_completeBlock) {    // Return GIF URL  _completeBlock(_error,gifURL); }}

經過上面兩步,就可以生成本地的視頻和GIF圖了,存儲在沙盒即可。貼上兩步所用到的方法:

#pragma mark - Base methods- (NSURL *)createGIFforTimePoints:(NSArray *)timePoints fromURL:(NSURL *)url fileProperties:(NSDictionary *)fileProperties frameProperties:(NSDictionary *)frameProperties gifImagePath:(NSString *)imagePath frameCount:(int)frameCount gifSize:(GIFSize)gifSize{  NSURL *fileURL = [NSURL fileURLWithPath:imagePath]; if (fileURL == nil)  return nil; CGImageDestinationRef destination = CGImageDestinationCreateWithURL((__bridge CFURLRef)fileURL, kUTTypeGIF , frameCount, NULL); CGImageDestinationSetProperties(destination, (CFDictionaryRef)fileProperties); AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil]; AVAssetImageGenerator *generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset]; generator.appliesPreferredTrackTransform = YES;  CMTime tol = CMTimeMakeWithSeconds([tolerance floatValue], [timeInterval intValue]); generator.requestedTimeToleranceBefore = tol; generator.requestedTimeToleranceAfter = tol;  NSError *error = nil; CGImageRef previousImageRefCopy = nil; for (NSValue *time in timePoints) {  CGImageRef imageRef;    #if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR   imageRef = (float)gifSize/10 != 1 ? createImageWithScale([generator copyCGImageAtTime:[time CMTimeValue] actualTime:nil error:&error], (float)gifSize/10) : [generator copyCGImageAtTime:[time CMTimeValue] actualTime:nil error:&error];  #elif TARGET_OS_MAC   imageRef = [generator copyCGImageAtTime:[time CMTimeValue] actualTime:nil error:&error];  #endif    if (error) {      _error =error;   logdebug(@"Error copying image: %@", error);   return nil;     }  if (imageRef) {   CGImageRelease(previousImageRefCopy);   previousImageRefCopy = CGImageCreateCopy(imageRef);  } else if (previousImageRefCopy) {   imageRef = CGImageCreateCopy(previousImageRefCopy);  } else {      _error =[NSError errorWithDomain:NSStringFromClass([self class]) code:0 userInfo:@{NSLocalizedDescriptionKey:@"Error copying image and no previous frames to duplicate"}];   logdebug(@"Error copying image and no previous frames to duplicate");   return nil;  }  CGImageDestinationAddImage(destination, imageRef, (CFDictionaryRef)frameProperties);  CGImageRelease(imageRef); } CGImageRelease(previousImageRefCopy);  // Finalize the GIF if (!CGImageDestinationFinalize(destination)) {    _error =error;    logdebug(@"Failed to finalize GIF destination: %@", error);  if (destination != nil) {   CFRelease(destination);  }  return nil; } CFRelease(destination);  return fileURL;}#pragma mark - HelpersCGImageRef createImageWithScale(CGImageRef imageRef, float scale) {  #if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR CGSize newSize = CGSizeMake(CGImageGetWidth(imageRef)*scale, CGImageGetHeight(imageRef)*scale); CGRect newRect = CGRectIntegral(CGRectMake(0, 0, newSize.width, newSize.height));  UIGraphicsBeginImageContextWithOptions(newSize, NO, 0); CGContextRef context = UIGraphicsGetCurrentContext(); if (!context) {  return nil; }  // Set the quality level to use when rescaling CGContextSetInterpolationQuality(context, kCGInterpolationHigh); CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, newSize.height);  CGContextConcatCTM(context, flipVertical); // Draw into the context; this scales the image CGContextDrawImage(context, newRect, imageRef);  //Release old image CFRelease(imageRef); // Get the resized image from the context and a UIImage imageRef = CGBitmapContextCreateImage(context);  UIGraphicsEndImageContext(); #endif  return imageRef;}#pragma mark - Properties- (NSDictionary *)filePropertiesWithLoopCount:(int)loopCount { return @{(NSString *)kCGImagePropertyGIFDictionary:    @{(NSString *)kCGImagePropertyGIFLoopCount: @(loopCount)}    };}- (NSDictionary *)framePropertiesWithDelayTime:(float)delayTime { return @{(NSString *)kCGImagePropertyGIFDictionary:    @{(NSString *)kCGImagePropertyGIFDelayTime: @(delayTime)},    (NSString *)kCGImagePropertyColorModel:(NSString *)kCGImagePropertyColorModelRGB   };}

最后,截取的本地視頻可用AVPlayer播放,生成的GIF圖則用UIWebView或者WKWebView又或者 YYImage 加載即可。

以上就是本文的全部內容,希望對大家的學習有所幫助,也希望大家多多支持武林網。

發表評論 共有條評論
用戶名: 密碼:
驗證碼: 匿名發表
主站蜘蛛池模板: 鄢陵县| 肥城市| 贞丰县| 龙川县| 凌源市| 新田县| 武邑县| 榆林市| 梓潼县| 黎城县| 万载县| 宁南县| 罗山县| 吴堡县| 陇西县| 汾西县| 三门县| 凤冈县| 岫岩| 屯门区| 阜平县| 苗栗县| 垦利县| 扶风县| 康乐县| 武陟县| 黄大仙区| 永州市| 宜黄县| 江北区| 龙川县| 什邡市| 富顺县| 德令哈市| 驻马店市| 荆门市| 宁国市| 环江| 晋江市| 涟源市| 腾冲县|