iOS音视频相关知识

1、将CVPixelBufferRef对象转换为UIImage对象

CVPixelBufferRef pixelBuffer;
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
            
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer))];
            
UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
CGImageRelease(videoImage);

2、获取视频中的音频信息

+ (CGSize)videoSizeTransformFromVideoPath:(NSString *)videoPath {
    
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return CGSizeZero;
    }
    
    AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
    AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
    CMAudioFormatDescriptionRef item = (__bridge CMAudioFormatDescriptionRef)audioTrack.formatDescriptions.firstObject;
    const  AudioStreamBasicDescription *audioStreamDescription = CMAudioFormatDescriptionGetStreamBasicDescription(item);
    audioStreamDescription->mSampleRate;
    
    CGSize dimensions = CGSizeApplyAffineTransform(track.naturalSize, track.preferredTransform);
    return CGSizeMake(fabs(dimensions.width), fabs(dimensions.height));
}

3、获取视频的分辨率

+ (CGSize)videoSizeFromVideoPath:(NSString *)videoPath {
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return CGSizeZero;
    }
    
    AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
    AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    CGSize dimensions = CGSizeApplyAffineTransform(track.naturalSize, track.preferredTransform);
    return CGSizeMake(fabs(dimensions.width), fabs(dimensions.height));
}

4、获取视频的帧率

+ (CGFloat)nominalFrameRateFromVideoPath:(NSString *)videoPath {
    CGFloat fps = 0.00;
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return fps;
    }
    AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    fps = videoTrack.nominalFrameRate; 
    return fps;
} 

5、获取视频时长(单位毫秒)

+ (NSTimeInterval)videoDurationFromVideoPath:(NSString *)videoPath { 
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return 0.00;
    }
    NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
    AVURLAsset *urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoPath] options:inputOptions];
    return 1000.0 * urlAsset.duration.value / urlAsset.duration.timescale;
} 

6、AVFrame转换为UIImage(针对YUV420p数据)


+ (UIImage *)converUIImageFromAVFrame:(AVFrame*)frame {
    CVPixelBufferRef pixelBuffer = [GTVideoTool converCVPixelBufferRefFromAVFrame:frame];
    
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
    
    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
    CGImageRef videoImage = [temporaryContext
                             createCGImage:ciImage
                             fromRect:CGRectMake(0, 0,
                                                 CVPixelBufferGetWidth(pixelBuffer),
                                                 CVPixelBufferGetHeight(pixelBuffer))];
    
    UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
    
    CVPixelBufferRelease(pixelBuffer); 
    CGImageRelease(videoImage);
    
    return uiImage;
}

+ (CVPixelBufferRef)converCVPixelBufferRefFromAVFrame:(AVFrame *)avframe {
    if (!avframe || !avframe->data[0]) {
        return NULL;
    }
    
    CVPixelBufferRef outputPixelBuffer = NULL;
    
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             
                             @(avframe->linesize[0]), kCVPixelBufferBytesPerRowAlignmentKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferOpenGLESCompatibilityKey,
                             [NSDictionary dictionary], kCVPixelBufferIOSurfacePropertiesKey,
                             nil];
    
    
    if (avframe->linesize[1] != avframe->linesize[2]) {
        return  NULL;
    }
    
    size_t srcPlaneSize = avframe->linesize[1]*avframe->height/2;
    size_t dstPlaneSize = srcPlaneSize *2;
    uint8_t *dstPlane = malloc(dstPlaneSize);
    
    // interleave Cb and Cr plane
    for(size_t i = 0; i<srcPlaneSize; i++){
        dstPlane[2*i  ]=avframe->data[1][i];
        dstPlane[2*i+1]=avframe->data[2][i];
    }
    
    
    int ret = CVPixelBufferCreate(kCFAllocatorDefault,
                                  avframe->width,
                                  avframe->height,
                                  kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
                                  (__bridge CFDictionaryRef)(options),
                                  &outputPixelBuffer);
    
    CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
    
    size_t bytePerRowY = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
    size_t bytesPerRowUV = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
    
    void* base =  CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0);
    memcpy(base, avframe->data[0], bytePerRowY*avframe->height);
    
    base = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1);
    memcpy(base, dstPlane, bytesPerRowUV*avframe->height/2);
    
    CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
    
    free(dstPlane);
    
    if(ret != kCVReturnSuccess)
    {
        NSLog(@"CVPixelBufferCreate Failed");
        return NULL;
    }
    
    return outputPixelBuffer;
}

7、BGRA转换为UIImage对象

+ (UIImage *)converUIImageFromRGBA:(ST_GTV_RGBA *)argb {
    UIImage *image = [GTVideoTool imageFromBRGABytes:argb->p_rgba imageSize:CGSizeMake(argb->width, argb->heigh)];
    gtv_queue_rgba_free(argb);
    return image;
}

+ (UIImage *)imageFromBRGABytes:(unsigned char *)imageBytes imageSize:(CGSize)imageSize {
    CGImageRef imageRef = [self imageRefFromBGRABytes:imageBytes imageSize:imageSize];
    UIImage *image = [UIImage imageWithCGImage:imageRef];
    CGImageRelease(imageRef);
    
    return image;
}

+ (CGImageRef)imageRefFromBGRABytes:(unsigned char *)imageBytes imageSize:(CGSize)imageSize {
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(imageBytes,
                                                 imageSize.width,
                                                 imageSize.height,
                                                 8,
                                                 imageSize.width * 4,
                                                 colorSpace,
                                                 kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef imageRef = CGBitmapContextCreateImage(context);
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    
    return imageRef;
}

8、从视频中抽取多张图片

- (NSArray<UIImage *> *)generateThumbnailFromVideoPath:(NSString *)videoPath
                                            frameCount:(NSInteger)frameCount
                                         clipStartTime:(NSTimeInterval)clipStartTime
                                           clipEndTime:(NSTimeInterval)clipEndTime {
    if (frameCount == 0) {
        NSLog(@"error frameCount is equal to zero");
        return nil;
    }
    CGFloat videoDuration = clipEndTime -  clipStartTime;
    if (videoDuration <= 0) {
        NSLog(@"error videoDuration is less than zero");
        return nil;
    }
    CGFloat delayTime = videoDuration / frameCount;
    
    NSMutableArray *frameTimes = [[NSMutableArray alloc] initWithCapacity:frameCount];
    for (int currentFrame = 0; currentFrame < frameCount; currentFrame++) {
        CGFloat currentTime = (clipStartTime + currentFrame * delayTime) * 25;
        CMTime time = CMTimeMake(currentTime, 25);
        [frameTimes addObject:[NSValue valueWithCMTime:time]];
    }
    
    __block NSUInteger successCount = 0;
    NSMutableDictionary *imageDict = [NSMutableDictionary dictionary];
    dispatch_semaphore_t sema = dispatch_semaphore_create(0);
    
    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:videoPath] options:nil];
    AVAssetImageGenerator *generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
    generator.appliesPreferredTrackTransform = YES;
    [generator generateCGImagesAsynchronouslyForTimes:frameTimes
                                    completionHandler:^(CMTime requestedTime, CGImageRef  _Nullable cgImageRef, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
                                        if(result == AVAssetImageGeneratorSucceeded) {
                                            successCount ++;
                                            UIImage *image = [[UIImage alloc] initWithCGImage:cgImageRef scale:[UIScreen mainScreen].scale orientation:UIImageOrientationUp];
                                            
                                            for(int i = 0 ; i < frameTimes.count; i++){
                                                CMTime time = [[frameTimes objectAtIndex:i] CMTimeValue];
                                                if(CMTimeCompare(time , requestedTime) == 0){
                                                    [imageDict setObject:image forKey:@(i)];
                                                    break;
                                                }
                                            }
                                            
                                            if (successCount == frameTimes.count) {
                                                dispatch_semaphore_signal(sema);
                                            }
                                            
                                        } else {
                                            dispatch_semaphore_signal(sema);
                                        }
                                    }];
    
    dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER);
    [generator cancelAllCGImageGeneration];
    return imageDict.allValues;
}

9、在视频硬编码时,出现视频前几帧没有编码成功的问题,在github上搜索appendPixelBuffer相关的代码块。尝试在编码前,将CVPixelBufferRef拷贝一份再编码,解决了此问题。

- (void)appendPixelBuffer:(CVPixelBufferRef)videoPixelBuffer withTimestamp:(int)milli {
    [self startSessionIfNeededAtTime:CMTimeMake(milli, 1000)];
    if(![self.videoInput isReadyForMoreMediaData]) {
        GTVLog(@"videoInput is not ready");
        return;
    }
    
    if((int)(milli-self.videoTimestamp) > 60) {
        GTVLog(@"### video fps low (%d,%d)!", milli, (int)(milli-self.videoTimestamp));
    }
    
    if(milli <= self.videoTimestamp) {
        GTVLog(@"appendPixelBuffer: invalid pts (%d,%d)", milli, (int)self.videoTimestamp);
        return;
    }
    
    if(videoPixelBuffer != NULL) {
        if( self.videoWriter.status != AVAssetWriterStatusWriting || self.videoInput.readyForMoreMediaData == false ) {
            GTVLog(@"###videoWriter status %ld %@ %@", (long)self.videoWriter.status, self.videoWriter.error, self.videoInput.readyForMoreMediaData?@"YES":@"NO");
        } else {
            @try {
                CMTime presentTime = CMTimeMake(milli, 1000);
                
                // 修复前几帧视频编码失败问题
                int bufferW = (int)CVPixelBufferGetWidth(videoPixelBuffer);
                int bufferH = (int)CVPixelBufferGetHeight(videoPixelBuffer);
                
                CVPixelBufferRef pixelBufferCopy = NULL;
                if (CVPixelBufferCreate(kCFAllocatorDefault, bufferW, bufferH, kCVPixelFormatType_32BGRA, NULL, &pixelBufferCopy) == kCVReturnSuccess) {
                    self.writeFrameCount ++;
                    
                    CVPixelBufferLockBaseAddress(videoPixelBuffer, 0);
                    CVPixelBufferLockBaseAddress(pixelBufferCopy, 0);
                    
                    uint8_t *baseAddress = CVPixelBufferGetBaseAddress(videoPixelBuffer);
                    uint8_t *copyBaseAddress = CVPixelBufferGetBaseAddress(pixelBufferCopy);
                    memcpy(copyBaseAddress, baseAddress, bufferH * CVPixelBufferGetBytesPerRow(videoPixelBuffer));
                    
                    [self.pixelAdaptor appendPixelBuffer:pixelBufferCopy withPresentationTime:presentTime];
                    
                    CVPixelBufferUnlockBaseAddress(videoPixelBuffer, 0);
                    CVPixelBufferUnlockBaseAddress(pixelBufferCopy, 0);
                    
                    CVPixelBufferRelease(pixelBufferCopy);
                    
                } else {
                    self.writeFrameCount ++;
                    
                    [self.pixelAdaptor appendPixelBuffer:videoPixelBuffer withPresentationTime:presentTime];
                }
            }
            @catch(NSException * ex) {
                GTVLog(@"mp4muxer exception %@", ex);
            }
        }
    } else {
        GTVLog(@"preparePixelBuffer failed.");
    }
    
    self.videoTimestamp = milli;
}

10.获取视频中的关键帧数量(I帧)

- (NSInteger)numOfKeyFrameFromVideoPath:(AVURLAsset *)urlAsset{
    MYTimer *timer = [MYTimer timerWithFlag:@"获取关键帧数量"];
    NSError *error;
    AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:urlAsset error:&error];
    if (error) {
        [timer dot];
        return 0;
    }

    AVAssetTrack *videoTrack = [[urlAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVAssetReaderTrackOutput *trackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:nil];
    [assetReader addOutput:trackOutput];
    [assetReader startReading];

    NSInteger keyFrames = 0;
    while (YES) {
        CMSampleBufferRef sampleBuffer = [trackOutput copyNextSampleBuffer];
        if (sampleBuffer) {
            // NB: not every sample buffer corresponds to a frame!
            CFArrayRef attachmentarr = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, TRUE);
            if (attachmentarr && (CFArrayGetCount(attachmentarr) > 0)) {
                Boolean isKeyFrame = !CFDictionaryContainsKey(CFArrayGetValueAtIndex(attachmentarr, 0), kCMSampleAttachmentKey_NotSync);
                if (isKeyFrame) {
                    keyFrames += 1;
                }
            }
        } else {
            break;
        }
    }
    [timer dot];
    return keyFrames;
}

10.GIF倒放

+ (void)invertGIFWithModel:(NSString *)sourcePath
           progressHandler:(void (^_Nullable)(CGFloat progress))progressHandler
                   success:(void (^_Nullable)(NSURL * _Nullable outputURL))success
                   failure:(void (^_Nullable)(NSError * _Nullable error))failure
                   shouldCancel:(BOOL (^_Nullable)(void))shouldCancel
                    cancel:(void (^_Nullable)(void))cancel {
    YYImage *gifImage = [YYImage imageWithContentsOfFile:[GDVideoEditUtilManager cutFileStringVideoPath:sourcePath]];
    NSUInteger imageCount = [gifImage animatedImageFrameCount];
    if (imageCount == 0) {
        !failure ? : failure(nil);
        return;
    }
    
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        NSDictionary *fileProperties = @{(__bridge id)kCGImagePropertyGIFDictionary: @{(__bridge id)kCGImagePropertyGIFLoopCount: @0}};
        NSURL *outputURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"%f-gaoding-reverse.gif", [[NSDate dateWithTimeIntervalSinceNow:0] timeIntervalSince1970]]]];
        if ([[NSFileManager defaultManager] fileExistsAtPath:outputURL.path]) {
            [[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
        }
        
        CGImageDestinationRef destination = CGImageDestinationCreateWithURL((__bridge CFURLRef)outputURL, kUTTypeGIF, imageCount, NULL);
        CGImageDestinationSetProperties(destination, (__bridge CFDictionaryRef)fileProperties);
        
        for (NSInteger index = imageCount - 1; index >= 0; index--) {
            if ([GDMediaFrameRetriever cancelByUser:shouldCancel]) { // 用户取消
                CFRelease(destination);
                [[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
                dispatch_async(dispatch_get_main_queue(), ^{
                    !cancel ? : cancel();
                });
                return;
            }
            
            UIImage *image = [gifImage animatedImageFrameAtIndex:index];
            NSTimeInterval duration = [gifImage animatedImageDurationAtIndex:index];
            NSDictionary *frameProperties = @{(__bridge id)kCGImagePropertyGIFDictionary: @{(__bridge id)kCGImagePropertyGIFDelayTime: @(duration)}};
            CGImageDestinationAddImage(destination, image.CGImage, (__bridge CFDictionaryRef)frameProperties);
            CGFloat progress = (1 - index / (imageCount * 1.0)) * 0.9;
            dispatch_async(dispatch_get_main_queue(), ^{
                !progressHandler ? : progressHandler(progress);
            });
        }
         
        BOOL writtenImageSuccess = CGImageDestinationFinalize(destination);
        
        if (!writtenImageSuccess) {
            dispatch_async(dispatch_get_main_queue(), ^{
                NSError *error = [NSError errorWithDomain:@"GDMVideoEdit" code:9999 userInfo:@{@"视频生成失败")}];
                !failure ? : failure(error);
            });
        } else {
            dispatch_async(dispatch_get_main_queue(), ^{
                !progressHandler ? : progressHandler(1.0);
            });
        }
        CFRelease(destination);
        dispatch_async(dispatch_get_main_queue(), ^{
            !success ? : success(outputURL);
        });
    });
}

11.获取图片尺寸

+ (CGSize)imageSizeWithFile:(NSString *)filePath {
    NSURL *imageFileURL = [NSURL fileURLWithPath:filePath];
    CGImageSourceRef imageSource = CGImageSourceCreateWithURL((CFURLRef)imageFileURL, NULL);
    if (imageSource == NULL) {
        return CGSizeZero;
    }
    
    CGFloat width = 0.0f, height = 0.0f;
    CFDictionaryRef imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, NULL);
    CFRelease(imageSource);
    
    if (imageProperties != NULL) {
        CFNumberRef widthNum  = CFDictionaryGetValue(imageProperties, kCGImagePropertyPixelWidth);
        if (widthNum != NULL) {
            CFNumberGetValue(widthNum, kCFNumberCGFloatType, &width);
        }
        
        CFNumberRef heightNum = CFDictionaryGetValue(imageProperties, kCGImagePropertyPixelHeight);
        if (heightNum != NULL) {
            CFNumberGetValue(heightNum, kCFNumberCGFloatType, &height);
        }
        
        CFNumberRef orientationNum = CFDictionaryGetValue(imageProperties, kCGImagePropertyOrientation);
        if (orientationNum != NULL) {
            int orientation;
            CFNumberGetValue(orientationNum, kCFNumberIntType, &orientation);
            if (orientation > 4) {
                CGFloat temp = width;
                width = height;
                height = temp;
            }
        }
        
        CFRelease(imageProperties);
    }
    
    return CGSizeMake(width, height);
}
最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
  • 序言:七十年代末,一起剥皮案震惊了整个滨河市,随后出现的几起案子,更是在滨河造成了极大的恐慌,老刑警刘岩,带你破解...
    沈念sama阅读 213,014评论 6 492
  • 序言:滨河连续发生了三起死亡事件,死亡现场离奇诡异,居然都是意外死亡,警方通过查阅死者的电脑和手机,发现死者居然都...
    沈念sama阅读 90,796评论 3 386
  • 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
    开封第一讲书人阅读 158,484评论 0 348
  • 文/不坏的土叔 我叫张陵,是天一观的道长。 经常有香客问我,道长,这世上最难降的妖魔是什么? 我笑而不...
    开封第一讲书人阅读 56,830评论 1 285
  • 正文 为了忘掉前任,我火速办了婚礼,结果婚礼上,老公的妹妹穿的比我还像新娘。我一直安慰自己,他们只是感情好,可当我...
    茶点故事阅读 65,946评论 6 386
  • 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
    开封第一讲书人阅读 50,114评论 1 292
  • 那天,我揣着相机与录音,去河边找鬼。 笑死,一个胖子当着我的面吹牛,可吹牛的内容都是我干的。 我是一名探鬼主播,决...
    沈念sama阅读 39,182评论 3 412
  • 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
    开封第一讲书人阅读 37,927评论 0 268
  • 序言:老挝万荣一对情侣失踪,失踪者是张志新(化名)和其女友刘颖,没想到半个月后,有当地人在树林里发现了一具尸体,经...
    沈念sama阅读 44,369评论 1 303
  • 正文 独居荒郊野岭守林人离奇死亡,尸身上长有42处带血的脓包…… 初始之章·张勋 以下内容为张勋视角 年9月15日...
    茶点故事阅读 36,678评论 2 327
  • 正文 我和宋清朗相恋三年,在试婚纱的时候发现自己被绿了。 大学时的朋友给我发了我未婚夫和他白月光在一起吃饭的照片。...
    茶点故事阅读 38,832评论 1 341
  • 序言:一个原本活蹦乱跳的男人离奇死亡,死状恐怖,灵堂内的尸体忽然破棺而出,到底是诈尸还是另有隐情,我是刑警宁泽,带...
    沈念sama阅读 34,533评论 4 335
  • 正文 年R本政府宣布,位于F岛的核电站,受9级特大地震影响,放射性物质发生泄漏。R本人自食恶果不足惜,却给世界环境...
    茶点故事阅读 40,166评论 3 317
  • 文/蒙蒙 一、第九天 我趴在偏房一处隐蔽的房顶上张望。 院中可真热闹,春花似锦、人声如沸。这庄子的主人今日做“春日...
    开封第一讲书人阅读 30,885评论 0 21
  • 文/苍兰香墨 我抬头看了看天上的太阳。三九已至,却和暖如春,着一层夹袄步出监牢的瞬间,已是汗流浃背。 一阵脚步声响...
    开封第一讲书人阅读 32,128评论 1 267
  • 我被黑心中介骗来泰国打工, 没想到刚下飞机就差点儿被人妖公主榨干…… 1. 我叫王不留,地道东北人。 一个月前我还...
    沈念sama阅读 46,659评论 2 362
  • 正文 我出身青楼,却偏偏与公主长得像,于是被迫代替她去往敌国和亲。 传闻我的和亲对象是个残疾皇子,可洞房花烛夜当晚...
    茶点故事阅读 43,738评论 2 351

推荐阅读更多精彩内容