方法一:
只能录制iOS页面,unity页面为黑屏
方法二:
类似于方法一
方法三:
Unity3D研究院之IOS截屏 话筒录音 录制截屏视频保存沙盒(另类实现方法)
OC的代码截图只有UI部分,U3D截图只有3D部分。为了解决这个问题截屏时我们需要把这两张图片合成为一张全新的图片。这里再说一下用苹果私有API截图是可以同时将UI部分U3D部分保存为一张图片,不过有可能APPStore不能审核通过所以大家还是老老实实用合并的方法来做。
unity脚本,如图所示.添加
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using UnityEngine;
using UnityEngine.UI;
public class NewBehaviourScript : MonoBehaviour {
Button _start;
Button _stop;
Button _share;
// Use this for initialization
void Start () {
_start = this.transform.Find("Start").GetComponent<Button>();
_stop = this.transform.Find("Stop").GetComponent<Button>();
_share = this.transform.Find("Share").GetComponent<Button>();
_start.onClick.AddListener(StartVideo);
_stop.onClick.AddListener(StopVideo);
_share.onClick.AddListener (ShareVideo);
}
private void StopVideo()
{
#if UNITY_ANDROID
ShareREC.StopRecorder();
#elif UNITY_IOS
StopV();
#endif
}
[DllImport("__Internal")]
public static extern void StopV();
[DllImport("__Internal")]
public static extern void StartV();
[DllImport("__Internal")]
public static extern void SharaV();
private void ShareVideo()
{
#if UNITY_ANDROID
ShareREC.ShowShare();
#elif UNITY_IOS
StartV();
#endif
}
private void StartVideo()
{
#if UNITY_ANDROID
ShareREC.StartRecorder();
#elif UNITY_IOS
StartV();
#endif
}
[DllImport("__Internal")]
private static extern void unityToIOS (string str);
void OnGUI()
{
// 当点击按钮后,调用外部方法
if (GUI.Button (new Rect (300, 300, 300, 100), "跳转到IOS界面")) {
// Unity调用ios函数,同时传递数据
unityToIOS ("Hello IOS");
}
}
// Update is called once per frame
void Update () {
}
int count = 0;
//在这里OC的代码通知U3D截屏
void StartScreenshot(string str)
{
Application.CaptureScreenshot(count +"u3d.JPG");
count++;
}
}
然后我们将这个Unity3D工程导出成IOS的项目 。Unity会生成对应的XCODE工程。我们写一个全新的ViewController覆盖在U3D生成的OPGL viewController之上,用于写UI高级控件,接着打开UnityAppController.mm文件。
//--------------------下面的MyViewController就是我们新写的Contoller----------------
YourViewController * myView = [[YourViewController alloc] init];
[UnityGetGLViewController().view addSubview:myView.view];
--------------------上面的MyViewController就是我们新写的Contoller----
[UnityGetGLViewController().view bringSubviewToFront:myView.view];
MyViewController.h
//
// MyViewController.h
// Unity-iPhone
//
// Created by guopenglai on 2017/10/17.
//
#import <UIKit/UIKit.h>
#import "Foundation/Foundation.h"
#import "AVFoundation/AVFoundation.h"
#import "MediaPlayer/MediaPlayer.h"
@interface MyViewController : UIViewController
@end
MyViewController.m
#import "MyViewController.h"
@interface MyViewController ()<AVAudioRecorderDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>
{
//时间计时器
NSTimer *_timer;
int _count;
UILabel * _labe;
//录音
AVAudioRecorder * _recorder;
//读取动画
UITextView *_sharedLoadingTextView;
UIActivityIndicatorView* _sharedActivityView;
}
@end
@implementation MyViewController
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
self.view.backgroundColor = [UIColor redColor];
UIWindow *screenWindow = [[UIApplication sharedApplication] keyWindow];
UIGraphicsBeginImageContext(screenWindow.frame.size);
[screenWindow.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
UIImageWriteToSavedPhotosAlbum(viewImage, nil, nil, nil);
#if !TARGET_IPHONE_SIMULATOR
self.view.backgroundColor = [UIColor clearColor];
#else
self.view.backgroundColor = [UIColor clearColor];
#endif
UIButton * start = [UIButton buttonWithType:UIButtonTypeRoundedRect];
[start setFrame:CGRectMake(0, 100, 200, 30)];
[start setTitle:@"开始截屏" forState:UIControlStateNormal];
[start addTarget:self action:@selector(startPress) forControlEvents:UIControlEventTouchDown];
UIButton * end = [UIButton buttonWithType:UIButtonTypeRoundedRect];
[end setFrame:CGRectMake(0, 150, 200, 30)];
[end setTitle:@"结束截屏(开始录制视频)" forState:UIControlStateNormal];
[end addTarget:self action:@selector(endPress) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:start];
[self.view addSubview:end];
_labe = [[UILabel alloc]initWithFrame:CGRectMake(30, 200, 300, 30)];
_labe.text = [NSString stringWithFormat:@"%@%d",@"雨松MOMO开始计时:=== ",_count];
[self.view addSubview:_labe];
//初始化录音
[self prepareToRecord];
}
-(void)addLoading:(NSString*) info
{
//顶部文本视图
_sharedLoadingTextView = [[UITextView alloc] initWithFrame:CGRectMake(0, 0, 130, 130)] ;
[_sharedLoadingTextView setBackgroundColor:[UIColor blackColor]];
[_sharedLoadingTextView setText:info];
[_sharedLoadingTextView setTextColor:[UIColor whiteColor]];
[_sharedLoadingTextView setTextAlignment:UITextAlignmentCenter];
[_sharedLoadingTextView setFont:[UIFont systemFontOfSize:15]];
_sharedLoadingTextView.textAlignment = UITextAlignmentCenter;
_sharedLoadingTextView.alpha = 0.8f;
_sharedLoadingTextView.center = self.view.center;
_sharedLoadingTextView.layer.cornerRadius = 10;
_sharedLoadingTextView.layer.masksToBounds = YES;
//创建Loading动画视图
_sharedActivityView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleGray] ;
//设置动画视图的风格,这里设定它为白色
_sharedActivityView.activityIndicatorViewStyle=UIActivityIndicatorViewStyleWhiteLarge;
//设置它显示的区域
_sharedActivityView.frame = CGRectMake(0,0, 320, 480);
_sharedActivityView.center = self.view.center;
//开始播放动画
[_sharedActivityView startAnimating];
[self.view addSubview:_sharedLoadingTextView];
[self.view addSubview:_sharedActivityView];
}
-(void)removeLoading
{
[_sharedLoadingTextView removeFromSuperview];
[_sharedActivityView removeFromSuperview];
}
-(void)startPress
{
_count = 0;
_timer = [NSTimer scheduledTimerWithTimeInterval: 0.1
target: self
selector: @selector(heartBeat:)
userInfo: nil
repeats: YES];
//开始录音
[_recorder record];
}
-(void)endPress
{
NSLog(@"开始制作视频");
if(_timer != nil)
{
[_timer invalidate];
_timer = nil;
[self addLoading:@"开始制作视频"];
[NSThread detachNewThreadSelector:@selector(startThreadMainMethod) toTarget:self withObject:nil];
}
}
-(void)startThreadMainMethod
{
//在这里制作视频
NSMutableArray *_array = [[NSMutableArray alloc]init];
NSString * Path = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
for(int i =0; i< _count; i++)
{
//读取存在沙盒里面的文件图片
NSString * _pathSecond = [NSString stringWithFormat:@"%@/%d%@",Path,i,@".JPG"];
NSString * _pathFirst = [NSString stringWithFormat:@"%@/%d%@",Path,i,@"u3d.JPG"];
//因为拿到的是个路径 把它加载成一个data对象
NSData *data0=[NSData dataWithContentsOfFile:_pathFirst];
NSData *data1=[NSData dataWithContentsOfFile:_pathSecond];
//直接把该 图片读出来
UIImage *img0=[UIImage imageWithData:data0];
UIImage *img1=[UIImage imageWithData:data1];
[_array addObject:[self MergerImage : img0 : img1]];
}
Path = [NSString stringWithFormat:@"%@/%@%@",Path,@"veido",@".MP4"];
[_recorder stop];
[self writeImages:_array ToMovieAtPath:Path withSize: CGSizeMake(320, 480) inDuration:_count*0.1 byFPS:10];
[self removeLoading];
NSLog(@"recorder successfully");
// UIAlertView *recorderSuccessful = [[UIAlertView alloc] initWithTitle:@"" message:@"视频录制成功"
// delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
// [recorderSuccessful show];
}
- (void) heartBeat: (NSTimer*) timer
{
_labe.text = [NSString stringWithFormat:@"%@%d",@"雨松MOMO开始计时:=== ",_count];
//这个是私有API运气不好会被拒接
//这个方法比较给力 可以直接把ios前端和 U3D中的所有图像都截取出来
//extern CGImageRef UIGetScreenImage();
//UIImage *image = [UIImage imageWithCGImage:UIGetScreenImage()];
//UIImageWriteToSavedPhotosAlbum(image,nil,nil,nil);
//保险起见还是用如下方法截图
//这个方法不能截取U3D的图像
UIWindow *screenWindow = [[UIApplication sharedApplication]keyWindow];
UIGraphicsBeginImageContext(screenWindow.frame.size);
[screenWindow.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSData *data;
if (UIImagePNGRepresentation(image) == nil)
{
data = UIImageJPEGRepresentation(image, 1);
}
else
{
data = UIImagePNGRepresentation(image);
}
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString * Path = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
[fileManager createDirectoryAtPath:Path withIntermediateDirectories:YES attributes:nil error:nil];
Path = [NSString stringWithFormat:@"%@/%d%@",Path,_count,@".JPG"];
[fileManager createFileAtPath:Path contents:data attributes:nil];
//通知U3D开始截屏
UnitySendMessage("Canvas","StartScreenshot","");
//注意unity脚本绑定的位置
_count++;
}
//合并图片,把ios前景图片和U3D图片合并在一起
-(UIImage*) MergerImage:(UIImage*) firstImg:(UIImage*) secondImg
{
UIGraphicsBeginImageContext(CGSizeMake(320, 480));
[firstImg drawInRect:CGRectMake(0, 0, firstImg.size.width, firstImg.size.height)];
[secondImg drawInRect:CGRectMake(0, 0, secondImg.size.width, secondImg.size.height)];
UIImage *resultImage=UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return resultImage;
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
- (void) writeImages:(NSArray *)imagesArray ToMovieAtPath:(NSString *) path withSize:(CGSize) size
inDuration:(float)duration byFPS:(int32_t)fps
{
//在这里将之前截取的图片合并成一个视频
//Wire the writer:
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
fileType:AVFileTypeQuickTimeMovie
error:&error] ;
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] ;
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write some samples:
CVPixelBufferRef buffer = NULL;
int frameCount = 0;
int imagesCount = [imagesArray count];
float averageTime = duration/imagesCount;
int averageFrame = (int)(averageTime * fps);
for(UIImage * img in imagesArray)
{
buffer = [self pixelBufferFromCGImage:[img CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) fps);
float frameSeconds = CMTimeGetSeconds(frameTime);
NSLog(@"frameCount:%d,kRecordingFPS:%d,frameSeconds:%f",frameCount,fps,frameSeconds);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(buffer)
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount = frameCount + averageFrame;
}
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(@"finishWriting");
//将静态视频 和声音合并成一个新视频
[self CompileFilesToMakeMovie];
}
- (void) prepareToRecord
{
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
NSError *err = nil;
[audioSession setCategory :AVAudioSessionCategoryPlayAndRecord error:&err];
if(err){
NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
return;
}
[audioSession setActive:YES error:&err];
err = nil;
if(err){
NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
return;
}
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue :[NSNumber numberWithInt:kAudioFormatLinearPCM] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];
[recordSetting setValue :[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
[recordSetting setValue :[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsBigEndianKey];
[recordSetting setValue :[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsFloatKey];
// Create a new dated file
NSString * recorderFilePath = [NSString stringWithFormat:@"%@/%@.caf", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], @"sound"] ;
NSURL *url = [NSURL fileURLWithPath:recorderFilePath];
err = nil;
_recorder = [[ AVAudioRecorder alloc] initWithURL:url settings:recordSetting error:&err];
if(!_recorder){
NSLog(@"recorder: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
UIAlertView *alert =
[[UIAlertView alloc] initWithTitle: @"Warning"
message: [err localizedDescription]
delegate: nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[alert show];
return;
}
//prepare to record
[_recorder setDelegate:self];
[_recorder prepareToRecord];
_recorder.meteringEnabled = YES;
BOOL audioHWAvailable = audioSession.inputIsAvailable;
if (! audioHWAvailable) {
UIAlertView *cantRecordAlert =
[[UIAlertView alloc] initWithTitle: @"Warning"
message: @"Audio input hardware not available"
delegate: nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[cantRecordAlert show];
return;
}
}
//代理 这里可以监听录音成功
- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *) aRecorder successfully:(BOOL)flag
{
// NSLog(@"recorder successfully");
// UIAlertView *recorderSuccessful = [[UIAlertView alloc] initWithTitle:@"" message:@"录音成功"
// delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
// [recorderSuccessful show];
// [recorderSuccessful release];
}
//代理 这里可以监听录音失败
- (void)audioRecorderEncodeErrorDidOccur:(AVAudioRecorder *)arecorder error:(NSError *)error
{
// UIAlertView *recorderFailed = [[UIAlertView alloc] initWithTitle:@"" message:@"发生错误"
// delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
// [recorderFailed show];
// [recorderFailed release];
}
-(void)CompileFilesToMakeMovie
{
//这个方法在沙盒中把静态图片组成的视频 与录制的声音合并成一个新视频
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString* audio_inputFileName = @"sound.caf";
NSString* audio_inputFilePath = [NSString stringWithFormat:@"%@/%@", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], audio_inputFileName] ;
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = @"veido.mp4";
NSString* video_inputFilePath = [NSString stringWithFormat:@"%@/%@", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], video_inputFileName] ;
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = @"outputVeido.mov";
NSString* outputFilePath = [NSString stringWithFormat:@"%@/%@", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], outputFileName] ;
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = @"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
}
];
}
@end
卡的一笔
方法四:
使用replaykit
import <ReplayKit/ReplayKit.h>记得Linked Frameworks and Libraries中导入
extern "C"
{
void StartV()
{
NSLog(@"开始");
//如果还没有开始录制,判断系统是否支持
if ([RPScreenRecorder sharedRecorder].available) {
NSLog(@"OK");
//如果支持,就使用下面的方法可以启动录制回放
[[RPScreenRecorder sharedRecorder] startRecordingWithMicrophoneEnabled:YES handler:^(NSError * _Nullable error) {
NSLog(@"%@", error);
//处理发生的错误,如设用户权限原因无法开始录制等
}];
} else {
NSLog(@"录制回放功能不可用");
}
}
void StopV()
{
NSLog(@"结束");
//停止录制回放,并显示回放的预览,在预览中用户可以选择保存视频到相册中、放弃、或者分享出去
[[RPScreenRecorder sharedRecorder] stopRecordingWithHandler:^(RPPreviewViewController * _Nullable previewViewController, NSError * _Nullable error) {
if (error) {
NSLog(@"%@", error);
//处理发生的错误,如磁盘空间不足而停止等
}
if (previewViewController) {
//设置预览页面到代理
previewViewController.previewControllerDelegate = GetAppController();
[GetAppController().window.rootViewController presentViewController:previewViewController animated:YES completion:nil];
}
}];
}
void SharaV()
{
//通知U3D开始截屏
UnitySendMessage("Canvas","StartScreenshot","");
NSString * Path = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
NSString * _pathFirst = [NSString stringWithFormat:@"%@/%ld%@",Path,GetAppController().frameCount,@"u3d.JPG"];
//因为拿到的是个路径 把它加载成一个data对象
NSData *data0=[NSData dataWithContentsOfFile:_pathFirst];
//直接把该 图片读出来
UIImage *img0=[UIImage imageWithData:data0];
UIImageWriteToSavedPhotosAlbum(img0, GetAppController(), @selector(image:didFinishSavingWithError:contextInfo:), NULL);
}
void unityToIOS(char* str)
{
}
}
- (void)image: (UIImage *) image didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo
{
NSString *msg = nil ;
if(error != NULL){
msg = @"保存图片失败" ;
}else{
msg = @"保存图片成功" ;
}
}