objective c - Make Video From Image iOS -


i found tutorial http://codethink.no-ip.org/wordpress/archives/673#comment-118063 question screen capture video in ios programmatically of how this, , bit outdated ios, renewed it, , close having work, putting uiimages isn't quite working right now.

here how call method in viewdidload

[captureview performselector:@selector(startrecording) withobject:nil afterdelay:1.0];                             [captureview performselector:@selector(stoprecording) withobject:nil afterdelay:5.0]; 

and captureview iboutlet connected view.

and have class screencapture.h & .m

here .h

@protocol screencaptureviewdelegate <nsobject> - (void) recordingfinished:(nsstring*)outputpathornil; @end  @interface screencaptureview : uiview {     //video writing     avassetwriter *videowriter;     avassetwriterinput *videowriterinput;     avassetwriterinputpixelbufferadaptor *avadaptor;      //recording state     bool _recording;     nsdate* startedat;     void* bitmapdata; }  //for recording video - (bool) startrecording; - (void) stoprecording;  //for accessing current screen , adjusting capture rate, etc. @property(retain) uiimage* currentscreen; @property(assign) float framerate; @property(nonatomic, assign) id<screencaptureviewdelegate> delegate;  @end 

and here .m

@interface screencaptureview(private) - (void) writevideoframeattime:(cmtime)time; @end  @implementation screencaptureview  @synthesize currentscreen, framerate, delegate;  - (void) initialize {     // initialization code     self.clearscontextbeforedrawing = yes;     self.currentscreen = nil;     self.framerate = 10.0f;     //10 frames per seconds     _recording = false;     videowriter = nil;     videowriterinput = nil;     avadaptor = nil;     startedat = nil;     bitmapdata = null; }  - (id) initwithcoder:(nscoder *)adecoder {     self = [super initwithcoder:adecoder];     if (self) {         [self initialize];     }     return self; }  - (id) init {     self = [super init];     if (self) {         [self initialize];     }     return self; }  - (id)initwithframe:(cgrect)frame {     self = [super initwithframe:frame];     if (self) {         [self initialize];     }     return self; }  - (cgcontextref) createbitmapcontextofsize:(cgsize) size {     cgcontextref    context = null;     cgcolorspaceref colorspace;     int             bitmapbytecount;     int             bitmapbytesperrow;      bitmapbytesperrow   = (size.width * 4);     bitmapbytecount     = (bitmapbytesperrow * size.height);     colorspace = cgcolorspacecreatedevicergb();     if (bitmapdata != null) {         free(bitmapdata);     }     bitmapdata = malloc( bitmapbytecount );     if (bitmapdata == null) {         fprintf (stderr, "memory not allocated!");         return null;     }      context = cgbitmapcontextcreate (bitmapdata,                                      size.width,                                      size.height,                                      8,      // bits per component                                      bitmapbytesperrow,                                      colorspace,                                      (cgbitmapinfo) kcgimagealphanoneskipfirst);      cgcontextsetallowsantialiasing(context,no);     if (context== null) {         free (bitmapdata);         fprintf (stderr, "context not created!");         return null;     }     cgcolorspacerelease( colorspace );      return context; }  static int framecount = 0;            //debugging - (void) drawrect:(cgrect)rect {     nsdate* start = [nsdate date];     cgcontextref context = [self createbitmapcontextofsize:self.frame.size];      //not sure why necessary...image renders upside-down , mirrored     cgaffinetransform flipvertical = cgaffinetransformmake(1, 0, 0, -1, 0, self.frame.size.height);     cgcontextconcatctm(context, flipvertical);      [self.layer renderincontext:context];      cgimageref cgimage = cgbitmapcontextcreateimage(context);     uiimage* background = [uiimage imagewithcgimage: cgimage];     cgimagerelease(cgimage);      self.currentscreen = background;      //debugging     if (framecount < 40) {           nsstring* filename = [nsstring stringwithformat:@"documents/frame_%d.png", framecount];           nsstring* pngpath = [nshomedirectory() stringbyappendingpathcomponent:filename];           [uiimagepngrepresentation(self.currentscreen) writetofile: pngpath atomically: yes];           framecount++;     }      //note:  record scrollview while scrolling need implement uiscrollviewdelegate such calls     //       'setneedsdisplay' on screencaptureview.     if (_recording) {         float milliselapsed = [[nsdate date] timeintervalsincedate:startedat] * 1000.0;         [self writevideoframeattime:cmtimemake((int)milliselapsed, 1000)];     }      float processingseconds = [[nsdate date] timeintervalsincedate:start];     float delayremaining = (1.0 / self.framerate) - processingseconds;      cgcontextrelease(context);      //redraw @ specified framerate     [self performselector:@selector(setneedsdisplay) withobject:nil afterdelay:delayremaining > 0.0 ? delayremaining : 0.01]; }  - (void) cleanupwriter {     avadaptor = nil;      videowriterinput = nil;      videowriter = nil;      startedat = nil;      if (bitmapdata != null) {         free(bitmapdata);         bitmapdata = null;     } }  - (void)dealloc {     [self cleanupwriter]; }  - (nsurl*) tempfileurl {     nsstring* outputpath = [[nsstring alloc] initwithformat:@"%@/%@", [nssearchpathfordirectoriesindomains(nsdocumentdirectory, nsuserdomainmask, yes) objectatindex:0], @"output.mp4"];     nsurl* outputurl = [[nsurl alloc] initfileurlwithpath:outputpath];     nsfilemanager* filemanager = [nsfilemanager defaultmanager];     if ([filemanager fileexistsatpath:outputpath]) {         nserror* error;         if ([filemanager removeitematpath:outputpath error:&error] == no) {             nslog(@"could not delete old recording file @ path:  %@", outputpath);         }     }      return outputurl; }  -(bool) setupwriter {     nserror* error = nil;     videowriter = [[avassetwriter alloc] initwithurl:[self tempfileurl] filetype:avfiletypequicktimemovie error:&error];     nsparameterassert(videowriter);      //configure video     nsdictionary* videocompressionprops = [nsdictionary dictionarywithobjectsandkeys:                                            [nsnumber numberwithdouble:1024.0*1024.0], avvideoaveragebitratekey,                                            nil ];      nsdictionary* videosettings = [nsdictionary dictionarywithobjectsandkeys:                                    avvideocodech264, avvideocodeckey,                                    [nsnumber numberwithint:self.frame.size.width], avvideowidthkey,                                    [nsnumber numberwithint:self.frame.size.height], avvideoheightkey,                                    videocompressionprops, avvideocompressionpropertieskey,                                    nil];      videowriterinput = [avassetwriterinput assetwriterinputwithmediatype:avmediatypevideo outputsettings:videosettings];      nsparameterassert(videowriterinput);     videowriterinput.expectsmediadatainrealtime = yes;     nsdictionary* bufferattributes = [nsdictionary dictionarywithobjectsandkeys:                                       [nsnumber numberwithint:kcvpixelformattype_32argb], kcvpixelbufferpixelformattypekey, nil];      avadaptor = [avassetwriterinputpixelbufferadaptor assetwriterinputpixelbufferadaptorwithassetwriterinput:videowriterinput sourcepixelbufferattributes:bufferattributes];      //add input     [videowriter addinput:videowriterinput];     [videowriter startwriting];     [videowriter startsessionatsourcetime:cmtimemake(0, 1000)];      return yes; }  - (void) completerecordingsession {      [videowriterinput markasfinished];      // wait video     int status = videowriter.status;     while (status == avassetwriterstatusunknown) {         nslog(@"waiting...");         [nsthread sleepfortimeinterval:0.5f];         status = videowriter.status;     }      @synchronized(self) {            [videowriter finishwritingwithcompletionhandler:^{             [self cleanupwriter];            bool success = yes;            id delegateobj = self.delegate;            nsstring *outputpath = [[nsstring alloc] initwithformat:@"%@/%@", [nssearchpathfordirectoriesindomains(nsdocumentdirectory, nsuserdomainmask, yes) objectatindex:0], @"output.mp4"];            nsurl *outputurl = [[nsurl alloc] initfileurlwithpath:outputpath];             nslog(@"completed recording, file stored at:  %@", outputurl);            if ([delegateobj respondstoselector:@selector(recordingfinished:)]) {                [delegateobj performselectoronmainthread:@selector(recordingfinished:) withobject:(success ? outputurl : nil) waituntildone:yes];            }          }];       }  }  - (bool) startrecording {     bool result = no;     @synchronized(self) {         if (! _recording) {             result = [self setupwriter];             startedat = [nsdate date];             _recording = true;         }     }      return result; }  - (void) stoprecording {     @synchronized(self) {         if (_recording) {             _recording = false;             [self completerecordingsession];         }     } }  -(void) writevideoframeattime:(cmtime)time {     if (![videowriterinput isreadyformoremediadata]) {         nslog(@"not ready video data");     }     else {         @synchronized (self) {             uiimage *newframe = self.currentscreen;             cvpixelbufferref pixelbuffer = null;             cgimageref cgimage = cgimagecreatecopy([newframe cgimage]);             cfdataref image = cgdataprovidercopydata(cgimagegetdataprovider(cgimage));              int status = cvpixelbufferpoolcreatepixelbuffer(kcfallocatordefault, avadaptor.pixelbufferpool, &pixelbuffer);             if(status != 0){                 //could not buffer pool                 nslog(@"error creating pixel buffer:  status=%d", status);             }             // set image data pixel buffer             cvpixelbufferlockbaseaddress( pixelbuffer, 0 );             uint8_t *destpixels = cvpixelbuffergetbaseaddress(pixelbuffer);             cfdatagetbytes(image, cfrangemake(0, cfdatagetlength(image)), destpixels);  //xxx:  work if pixel buffer contiguous , has same bytesperrow input data              if(status == 0){                 bool success = [avadaptor appendpixelbuffer:pixelbuffer withpresentationtime:time];                 if (!success)                     nslog(@"warning:  unable write buffer video");             }              //clean             cvpixelbufferunlockbaseaddress( pixelbuffer, 0 );             cvpixelbufferrelease( pixelbuffer );             cfrelease(image);             cgimagerelease(cgimage);         }      }  } 

and can see in drawrect method save images, , great, when try make video, creates still image looks this, when images this.

here output, video this. when picture looks normal (not slanted , weird)

enter image description here

my question going wrong when video being made?

thanks , time, know long question.

pixel buffer adaptors work pixel sizes of images. you're going need change size of images. can imagine what's happening in video writer trying write your, let's say, 361x241 images 360x240 size space. each row starts last pixel of last row ends getting diagonally skewed see. check apple docs supported dimensions. believe used 480x320 , it's supported. can use method resize images:

+(uiimage *)scaleimage:(uiimage*)image tosize:(cgsize)newsize {      cgrect scaledimagerect = cgrectzero;      cgfloat aspectwidth = newsize.width / image.size.width;     cgfloat aspectheight = newsize.height / image.size.height;     cgfloat aspectratio = 3.0 / 2;      scaledimagerect.size.width = image.size.width * aspectratio;     scaledimagerect.size.height = image.size.height * aspectratio;     scaledimagerect.origin.x = (newsize.width - scaledimagerect.size.width) / 2.0f;     scaledimagerect.origin.y = (newsize.height - scaledimagerect.size.height) / 2.0f;      uigraphicsbeginimagecontextwithoptions(cgsizemake(480, 320), no, 0 );     [image drawinrect:scaledimagerect];     uiimage* scaledimage = uigraphicsgetimagefromcurrentimagecontext();     uigraphicsendimagecontext();      return scaledimage; } 

Popular posts from this blog