i trying screen capture view has applcameraviewcontoller in it. reason when gets saved camera roll no mater what camera looking @ not captured uiview labels black background labels. want have labels on top of camera view. suggestions or examples on how go this. here screen capture .m assuming reason why happening.
@interface asscreenrecorder() @property (strong, nonatomic) avassetwriter *videowriter; @property (strong, nonatomic) avassetwriterinput *videowriterinput; @property (strong, nonatomic) avassetwriterinputpixelbufferadaptor *avadaptor; @property (strong, nonatomic) cadisplaylink *displaylink; @property (strong, nonatomic) nsdictionary *outputbufferpoolauxattributes; @property (nonatomic) cftimeinterval firsttimestamp; @property (nonatomic) bool isrecording; @end @implementation asscreenrecorder { dispatch_queue_t _render_queue; dispatch_queue_t _append_pixelbuffer_queue; dispatch_semaphore_t _framerenderingsemaphore; dispatch_semaphore_t _pixelappendsemaphore; cgsize _viewsize; cgfloat _scale; cgcolorspaceref _rgbcolorspace; cvpixelbufferpoolref _outputbufferpool; } #pragma mark - initializers + (instancetype)sharedinstance { static dispatch_once_t once; static asscreenrecorder *sharedinstance; dispatch_once(&once, ^{ sharedinstance = [[self alloc] init]; }); return sharedinstance; } - (instancetype)init { self = [super init]; if (self) { _viewsize = [uiapplication sharedapplication].delegate.window.bounds.size; _scale = [uiscreen mainscreen].scale; // record half size resolution retina ipads if ((ui_user_interface_idiom() == uiuserinterfaceidiompad) && _scale > 1) { _scale = 1.0; } _isrecording = no; _append_pixelbuffer_queue = dispatch_queue_create("asscreenrecorder.append_queue", dispatch_queue_serial); _render_queue = dispatch_queue_create("asscreenrecorder.render_queue", dispatch_queue_serial); dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( dispatch_queue_priority_high, 0)); _framerenderingsemaphore = dispatch_semaphore_create(1); _pixelappendsemaphore = dispatch_semaphore_create(1); } return self; } #pragma mark - public - (void)setvideourl:(nsurl *)videourl { nsassert(!_isrecording, @"videourl can not changed whilst recording in progress"); _videourl = videourl; } - (bool)startrecording { if (!_isrecording) { [self setupwriter]; _isrecording = (_videowriter.status == avassetwriterstatuswriting); _displaylink = [cadisplaylink displaylinkwithtarget:self selector:@selector(writevideoframe)]; [_displaylink addtorunloop:[nsrunloop mainrunloop] formode:nsrunloopcommonmodes]; } return _isrecording; } - (void)stoprecordingwithcompletion:(videocompletionblock)completionblock; { if (_isrecording) { _isrecording = no; [_displaylink removefromrunloop:[nsrunloop mainrunloop] formode:nsrunloopcommonmodes]; [self completerecordingsession:completionblock]; } } #pragma mark - private -(void)setupwriter { _rgbcolorspace = cgcolorspacecreatedevicergb(); nsdictionary *bufferattributes = @{(id)kcvpixelbufferpixelformattypekey : @(kcvpixelformattype_32bgra), (id)kcvpixelbuffercgbitmapcontextcompatibilitykey : @yes, (id)kcvpixelbufferwidthkey : @(_viewsize.width * _scale), (id)kcvpixelbufferheightkey : @(_viewsize.height * _scale), (id)kcvpixelbufferbytesperrowalignmentkey : @(_viewsize.width * _scale * 4) }; _outputbufferpool = null; cvpixelbufferpoolcreate(null, null, (__bridge cfdictionaryref)(bufferattributes), &_outputbufferpool); nserror* error = nil; _videowriter = [[avassetwriter alloc] initwithurl:self.videourl ?: [self tempfileurl] filetype:avfiletypequicktimemovie error:&error]; nsparameterassert(_videowriter); nsinteger pixelnumber = _viewsize.width * _viewsize.height * _scale; nsdictionary* videocompression = @{avvideoaveragebitratekey: @(pixelnumber * 11.4)}; nsdictionary* videosettings = @{avvideocodeckey: avvideocodech264, avvideowidthkey: [nsnumber numberwithint:_viewsize.width*_scale], avvideoheightkey: [nsnumber numberwithint:_viewsize.height*_scale], avvideocompressionpropertieskey: videocompression}; _videowriterinput = [avassetwriterinput assetwriterinputwithmediatype:avmediatypevideo outputsettings:videosettings]; nsparameterassert(_videowriterinput); _videowriterinput.expectsmediadatainrealtime = yes; _videowriterinput.transform = [self videotransformfordeviceorientation]; _avadaptor = [avassetwriterinputpixelbufferadaptor assetwriterinputpixelbufferadaptorwithassetwriterinput:_videowriterinput sourcepixelbufferattributes:nil]; [_videowriter addinput:_videowriterinput]; [_videowriter startwriting]; [_videowriter startsessionatsourcetime:cmtimemake(0, 1000)]; } - (cgaffinetransform)videotransformfordeviceorientation { cgaffinetransform videotransform; switch ([uidevice currentdevice].orientation) { case uideviceorientationlandscapeleft: videotransform = cgaffinetransformmakerotation(-m_pi_2); break; case uideviceorientationlandscaperight: videotransform = cgaffinetransformmakerotation(m_pi_2); break; case uideviceorientationportraitupsidedown: videotransform = cgaffinetransformmakerotation(m_pi); break; default: videotransform = cgaffinetransformidentity; } return videotransform; } - (nsurl*)tempfileurl { nsstring *outputpath = [nshomedirectory() stringbyappendingpathcomponent:@"tmp/screencapture.mp4"]; [self removetempfilepath:outputpath]; return [nsurl fileurlwithpath:outputpath]; } - (void)removetempfilepath:(nsstring*)filepath { nsfilemanager* filemanager = [nsfilemanager defaultmanager]; if ([filemanager fileexistsatpath:filepath]) { nserror* error; if ([filemanager removeitematpath:filepath error:&error] == no) { nslog(@"could not delete old recording:%@", [error localizeddescription]); } } } - (void)completerecordingsession:(videocompletionblock)completionblock; { dispatch_async(_render_queue, ^{ dispatch_sync(_append_pixelbuffer_queue, ^{ [_videowriterinput markasfinished]; [_videowriter finishwritingwithcompletionhandler:^{ void (^completion)(void) = ^() { [self cleanup]; dispatch_async(dispatch_get_main_queue(), ^{ if (completionblock) completionblock(); }); }; if (self.videourl) { completion(); } else { alassetslibrary *library = [[alassetslibrary alloc] init]; [library writevideoatpathtosavedphotosalbum:_videowriter.outputurl completionblock:^(nsurl *asseturl, nserror *error) { if (error) { nslog(@"error copying video camera roll:%@", [error localizeddescription]); } else { [self removetempfilepath:_videowriter.outputurl.path]; completion(); } }]; } }]; }); }); } - (void)cleanup { self.avadaptor = nil; self.videowriterinput = nil; self.videowriter = nil; self.firsttimestamp = 0; self.outputbufferpoolauxattributes = nil; cgcolorspacerelease(_rgbcolorspace); cvpixelbufferpoolrelease(_outputbufferpool); } - (void)writevideoframe { // throttle number of frames prevent meltdown // technique gleaned brad larson's answer here: http://stackoverflow.com/a/5956119 if (dispatch_semaphore_wait(_framerenderingsemaphore, dispatch_time_now) != 0) { return; } dispatch_async(_render_queue, ^{ if (![_videowriterinput isreadyformoremediadata]) return; if (!self.firsttimestamp) { self.firsttimestamp = _displaylink.timestamp; } cftimeinterval elapsed = (_displaylink.timestamp - self.firsttimestamp); cmtime time = cmtimemakewithseconds(elapsed, 1000); cvpixelbufferref pixelbuffer = null; cgcontextref bitmapcontext = [self createpixelbufferandbitmapcontext:&pixelbuffer]; if (self.delegate) { [self.delegate writebackgroundframeincontext:&bitmapcontext]; } // draw each window context (other windows include uikeyboard, uialert) // fix: uikeyboard rendered correctly in portrait orientation dispatch_sync(dispatch_get_main_queue(), ^{ uigraphicspushcontext(bitmapcontext); { (uiwindow *window in [[uiapplication sharedapplication] windows]) { [window drawviewhierarchyinrect:cgrectmake(0, 0, _viewsize.width, _viewsize.height) afterscreenupdates:no]; } } uigraphicspopcontext(); }); // append pixelbuffer on async dispatch_queue, next frame rendered whilst 1 appends // must not overwhelm queue pixelbuffers, therefore: // check if _append_pixelbuffer_queue ready // if it’s not ready, release pixelbuffer , bitmapcontext if (dispatch_semaphore_wait(_pixelappendsemaphore, dispatch_time_now) == 0) { dispatch_async(_append_pixelbuffer_queue, ^{ bool success = [_avadaptor appendpixelbuffer:pixelbuffer withpresentationtime:time]; if (!success) { nslog(@"warning: unable write buffer video"); } cgcontextrelease(bitmapcontext); cvpixelbufferunlockbaseaddress(pixelbuffer, 0); cvpixelbufferrelease(pixelbuffer); dispatch_semaphore_signal(_pixelappendsemaphore); }); } else { cgcontextrelease(bitmapcontext); cvpixelbufferunlockbaseaddress(pixelbuffer, 0); cvpixelbufferrelease(pixelbuffer); } dispatch_semaphore_signal(_framerenderingsemaphore); }); } - (cgcontextref)createpixelbufferandbitmapcontext:(cvpixelbufferref *)pixelbuffer { cvpixelbufferpoolcreatepixelbuffer(null, _outputbufferpool, pixelbuffer); cvpixelbufferlockbaseaddress(*pixelbuffer, 0); cgcontextref bitmapcontext = null; bitmapcontext = cgbitmapcontextcreate(cvpixelbuffergetbaseaddress(*pixelbuffer), cvpixelbuffergetwidth(*pixelbuffer), cvpixelbuffergetheight(*pixelbuffer), 8, cvpixelbuffergetbytesperrow(*pixelbuffer), _rgbcolorspace, kcgbitmapbyteorder32little | kcgimagealphapremultipliedfirst ); cgcontextscalectm(bitmapcontext, _scale, _scale); cgaffinetransform flipvertical = cgaffinetransformmake(1, 0, 0, -1, 0, _viewsize.height); cgcontextconcatctm(bitmapcontext, flipvertical); return bitmapcontext; }
i more happy provide full source code tackle because posting multiple .m's on take lot of space.
Comments
Post a Comment