使用 AVCaptureSession进行实时采集音视频(YUV、),编码

通过AVCaptureVideoDataOutputSampleBufferDelegate获取到音视频buffer- 数据

分别对音视频原始数据进行编码

传输

ViewController

  1. //
  2. // ViewController.h
  3. // H264AACEncode
  4. //
  5. // Created by ZhangWen on 15/10/14.
  6. // Copyright © 2015年 Zhangwen. All rights reserved.
  7. //
  8.  
  9. #import <UIKit/UIKit.h>
  10. #import <AVFoundation/AVFoundation.h>
  11. #import "AACEncoder.h"
  12. #import "H264Encoder.h"
  13.  
  14. @interface ViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate,H264EncoderDelegate>
  15.  
  16. @end
  1. //
  2. // ViewController.m
  3. // H264AACEncode
  4. //
  5. // Created by ZhangWen on 15/10/14.
  6. // Copyright © 2015年 Zhangwen. All rights reserved.
  7. //
  8.  
  9. #import "ViewController.h"
  10.  
  11. #define CAPTURE_FRAMES_PER_SECOND 20
  12. #define SAMPLE_RATE 44100
  13. #define VideoWidth 480
  14. #define VideoHeight 640
  15.  
  16. @interface ViewController ()
  17. {
  18. UIButton *startBtn;
  19. bool startCalled;
  20.  
  21. H264Encoder *h264Encoder;
  22. AACEncoder *aacEncoder;
  23.  
  24. AVCaptureSession *captureSession;
  25.  
  26. dispatch_queue_t _audioQueue;
  27.  
  28. AVCaptureConnection* _audioConnection;
  29. AVCaptureConnection* _videoConnection;
  30.  
  31. NSMutableData *_data;
  32. NSString *h264File;
  33. NSFileHandle *fileHandle;
  34.  
  35. }
  36. @end
  37.  
  38. @implementation ViewController
  39.  
  40. - (void)viewDidLoad {
  41. [super viewDidLoad];
  42. // Do any additional setup after loading the view, typically from a nib.
  43.  
  44. startCalled = true;
  45.  
  46. _data = [[NSMutableData alloc] init];
  47. captureSession = [[AVCaptureSession alloc] init];
  48.  
  49. [self initStartBtn];
  50.  
  51. }
  52.  
  53. #pragma mark
  54. #pragma mark - 设置音频 capture
  55. - (void) setupAudioCapture {
  56. aacEncoder = [[AACEncoder alloc] init];
  57. // create capture device with video input
  58.  
  59. /*
  60. * Create audio connection
  61. */
  62. AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  63. NSError *error = nil;
  64. AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:&error];
  65. if (error) {
  66. NSLog(@"Error getting audio input device: %@", error.description);
  67. }
  68. if ([captureSession canAddInput:audioInput]) {
  69. [captureSession addInput:audioInput];
  70. }
  71.  
  72. _audioQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL);
  73. AVCaptureAudioDataOutput* audioOutput = [[AVCaptureAudioDataOutput alloc] init];
  74. [audioOutput setSampleBufferDelegate:self queue:_audioQueue];
  75. if ([captureSession canAddOutput:audioOutput]) {
  76. [captureSession addOutput:audioOutput];
  77. }
  78. _audioConnection = [audioOutput connectionWithMediaType:AVMediaTypeAudio];
  79. }
  80.  
  81. - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position
  82. {
  83. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  84. for ( AVCaptureDevice *device in devices )
  85. if ( device.position == position )
  86. return device;
  87. return nil;
  88. }
  89.  
  90. #pragma mark
  91. #pragma mark - 设置视频 capture
  92. - (void) setupVideoCaprure
  93. {
  94. h264Encoder = [H264Encoder alloc];
  95. [h264Encoder initWithConfiguration];
  96.  
  97. NSError *deviceError;
  98.  
  99. AVCaptureDevice *cameraDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  100. // cameraDevice = [self cameraWithPosition:AVCaptureDevicePositionBack];
  101. // cameraDevice.position = AVCaptureDevicePositionBack;
  102. AVCaptureDeviceInput *inputDevice = [AVCaptureDeviceInput deviceInputWithDevice:cameraDevice error:&deviceError];
  103.  
  104. // make output device
  105.  
  106. AVCaptureVideoDataOutput *outputDevice = [[AVCaptureVideoDataOutput alloc] init];
  107.  
  108. NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
  109.  
  110. NSNumber* val = [NSNumber
  111. numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
  112. NSDictionary* videoSettings =
  113. [NSDictionary dictionaryWithObject:val forKey:key];
  114.  
  115. NSError *error;
  116. [cameraDevice lockForConfiguration:&error];
  117. if (error == nil) {
  118.  
  119. NSLog(@"cameraDevice.activeFormat.videoSupportedFrameRateRanges IS %@",[cameraDevice.activeFormat.videoSupportedFrameRateRanges objectAtIndex:]);
  120.  
  121. if (cameraDevice.activeFormat.videoSupportedFrameRateRanges){
  122.  
  123. [cameraDevice setActiveVideoMinFrameDuration:CMTimeMake(, CAPTURE_FRAMES_PER_SECOND)];
  124. [cameraDevice setActiveVideoMaxFrameDuration:CMTimeMake(, CAPTURE_FRAMES_PER_SECOND)];
  125. }
  126. }else{
  127. // handle error2
  128. }
  129. [cameraDevice unlockForConfiguration];
  130.  
  131. // Start the session running to start the flow of data
  132.  
  133. outputDevice.videoSettings = videoSettings;
  134.  
  135. [outputDevice setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
  136.  
  137. // initialize capture session
  138.  
  139. if ([captureSession canAddInput:inputDevice]) {
  140. [captureSession addInput:inputDevice];
  141. }
  142. if ([captureSession canAddOutput:outputDevice]) {
  143. [captureSession addOutput:outputDevice];
  144. }
  145.  
  146. // begin configuration for the AVCaptureSession
  147. [captureSession beginConfiguration];
  148.  
  149. // picture resolution
  150. [captureSession setSessionPreset:[NSString stringWithString:AVCaptureSessionPreset640x480]];
  151.  
  152. _videoConnection = [outputDevice connectionWithMediaType:AVMediaTypeVideo];
  153.  
  154. //Set landscape (if required)
  155. if ([_videoConnection isVideoOrientationSupported])
  156. {
  157. AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeRight; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
  158. [_videoConnection setVideoOrientation:orientation];
  159. }
  160.  
  161. // make preview layer and add so that camera's view is displayed on screen
  162.  
  163. NSFileManager *fileManager = [NSFileManager defaultManager];
  164. NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
  165. NSString *documentsDirectory = [paths objectAtIndex:];
  166.  
  167. h264File = [documentsDirectory stringByAppendingPathComponent:@"test.h264"];
  168. [fileManager removeItemAtPath:h264File error:nil];
  169. [fileManager createFileAtPath:h264File contents:nil attributes:nil];
  170.  
  171. // Open the file using POSIX as this is anyway a test application
  172. //fd = open([h264File UTF8String], O_RDWR);
  173. fileHandle = [NSFileHandle fileHandleForWritingAtPath:h264File];
  174.  
  175. [h264Encoder initEncode:VideoWidth height:VideoHeight];
  176. h264Encoder.delegate = self;
  177.  
  178. }
  179.  
  180. #pragma mark
  181. #pragma mark - sampleBuffer 数据
  182. -(void) captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
  183.  
  184. {
  185. CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  186. double dPTS = (double)(pts.value) / pts.timescale;
  187.  
  188. // NSLog(@"DPTS is %f",dPTS);
  189.  
  190. if (connection == _videoConnection) {
  191. [h264Encoder encode:sampleBuffer];
  192. } else if (connection == _audioConnection) {
  193.  
  194. [aacEncoder encodeSampleBuffer:sampleBuffer completionBlock:^(NSData *encodedData, NSError *error) {
  195. if (encodedData) {
  196.  
  197. NSLog(@"Audio data (%lu): %@", (unsigned long)encodedData.length, encodedData.description);
  198.  
  199. #pragma mark
  200. #pragma mark - 音频数据(encodedData)
  201. [_data appendData:encodedData];
  202.  
  203. } else {
  204. NSLog(@"Error encoding AAC: %@", error);
  205. }
  206. }];
  207.  
  208. }
  209.  
  210. }
  211.  
  212. #pragma mark
  213. #pragma mark - 视频 sps pps
  214. - (void)gotSpsPps:(NSData*)sps pps:(NSData*)pps
  215. {
  216.  
  217. const char bytes[] = "\x00\x00\x00\x01";
  218. size_t length = (sizeof bytes) - ; //string literals have implicit trailing '\0'
  219. NSData *ByteHeader = [NSData dataWithBytes:bytes length:length];
  220. [fileHandle writeData:ByteHeader];
  221. [fileHandle writeData:sps];
  222. [fileHandle writeData:ByteHeader];
  223. [fileHandle writeData:pps];
  224.  
  225. }
  226.  
  227. #pragma mark
  228. #pragma mark - 视频数据回调
  229. - (void)gotEncodedData:(NSData*)data isKeyFrame:(BOOL)isKeyFrame
  230. {
  231. NSLog(@"Video data (%lu): %@", (unsigned long)data.length, data.description);
  232.  
  233. if (fileHandle != NULL)
  234. {
  235. const char bytes[] = "\x00\x00\x00\x01";
  236. size_t length = (sizeof bytes) - ; //string literals have implicit trailing '\0'
  237. NSData *ByteHeader = [NSData dataWithBytes:bytes length:length];
  238.  
  239. #pragma mark
  240. #pragma mark - 视频数据(data)
  241.  
  242. [fileHandle writeData:ByteHeader];
  243. //[fileHandle writeData:UnitHeader];
  244. [fileHandle writeData:data];
  245.  
  246. }
  247. }
  248.  
  249. #pragma mark
  250. #pragma mark - 录制
  251. - (void)startBtnClicked
  252. {
  253. if (startCalled)
  254. {
  255. [self startCamera];
  256. startCalled = false;
  257. [startBtn setTitle:@"Stop" forState:UIControlStateNormal];
  258.  
  259. }
  260. else
  261. {
  262. [startBtn setTitle:@"Start" forState:UIControlStateNormal];
  263. startCalled = true;
  264. [self stopCarmera];
  265. }
  266.  
  267. }
  268.  
  269. - (void) startCamera
  270. {
  271. [self setupAudioCapture];
  272. [self setupVideoCaprure];
  273. [captureSession commitConfiguration];
  274. [captureSession startRunning];
  275. }
  276.  
  277. - (void) stopCarmera
  278. {
  279. [h264Encoder End];
  280. [captureSession stopRunning];
  281. //close(fd);
  282. [fileHandle closeFile];
  283. fileHandle = NULL;
  284.  
  285. // 获取程序Documents目录路径
  286. NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
  287. NSString *documentsDirectory = [paths objectAtIndex:];
  288.  
  289. NSMutableString * path = [[NSMutableString alloc]initWithString:documentsDirectory];
  290. [path appendString:@"/AACFile"];
  291.  
  292. [_data writeToFile:path atomically:YES];
  293.  
  294. }
  295.  
  296. - (void)initStartBtn
  297. {
  298. startBtn = [UIButton buttonWithType:UIButtonTypeCustom];
  299. startBtn.frame = CGRectMake(, , , );
  300. startBtn.center = self.view.center;
  301. [startBtn addTarget:self action:@selector(startBtnClicked) forControlEvents:UIControlEventTouchUpInside];
  302. [startBtn setTitle:@"Start" forState:UIControlStateNormal];
  303. [startBtn setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
  304. [self.view addSubview:startBtn];
  305. }
  306.  
  307. - (void)didReceiveMemoryWarning {
  308. [super didReceiveMemoryWarning];
  309. // Dispose of any resources that can be recreated.
  310. }
  311.  
  312. @end

AACEncoder

  1. //
  2. // AACEncoder.h
  3. // H264AACEncode
  4. //
  5. // Created by ZhangWen on 15/10/14.
  6. // Copyright © 2015年 Zhangwen. All rights reserved.
  7. //
  8.  
  9. #import <Foundation/Foundation.h>
  10. #import <AVFoundation/AVFoundation.h>
  11. #import <AudioToolbox/AudioToolbox.h>
  12.  
  13. @interface AACEncoder : NSObject
  14.  
  15. @property (nonatomic) dispatch_queue_t encoderQueue;
  16. @property (nonatomic) dispatch_queue_t callbackQueue;
  17.  
  18. - (void) encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer completionBlock:(void (^)(NSData *encodedData, NSError* error))completionBlock;
  19.  
  20. @end
  1. //
  2. // AACEncoder.m
  3. // H264AACEncode
  4. //
  5. // Created by ZhangWen on 15/10/14.
  6. // Copyright © 2015年 Zhangwen. All rights reserved.
  7. //
  8.  
  9. #import "AACEncoder.h"
  10.  
  11. @interface AACEncoder()
  12. @property (nonatomic) AudioConverterRef audioConverter;
  13. @property (nonatomic) uint8_t *aacBuffer;
  14. @property (nonatomic) NSUInteger aacBufferSize;
  15. @property (nonatomic) char *pcmBuffer;
  16. @property (nonatomic) size_t pcmBufferSize;
  17.  
  18. @end
  19.  
  20. @implementation AACEncoder
  21.  
  22. - (void) dealloc {
  23. AudioConverterDispose(_audioConverter);
  24. free(_aacBuffer);
  25. }
  26.  
  27. - (id) init {
  28. if (self = [super init]) {
  29. _encoderQueue = dispatch_queue_create("AAC Encoder Queue", DISPATCH_QUEUE_SERIAL);
  30. _callbackQueue = dispatch_queue_create("AAC Encoder Callback Queue", DISPATCH_QUEUE_SERIAL);
  31. _audioConverter = NULL;
  32. _pcmBufferSize = ;
  33. _pcmBuffer = NULL;
  34. _aacBufferSize = ;
  35. _aacBuffer = malloc(_aacBufferSize * sizeof(uint8_t));
  36. memset(_aacBuffer, , _aacBufferSize);
  37. }
  38. return self;
  39. }
  40.  
  41. - (void) setupEncoderFromSampleBuffer:(CMSampleBufferRef)sampleBuffer {
  42. AudioStreamBasicDescription inAudioStreamBasicDescription = *CMAudioFormatDescriptionGetStreamBasicDescription((CMAudioFormatDescriptionRef)CMSampleBufferGetFormatDescription(sampleBuffer));
  43.  
  44. AudioStreamBasicDescription outAudioStreamBasicDescription = {}; // Always initialize the fields of a new audio stream basic description structure to zero, as shown here: ...
  45. outAudioStreamBasicDescription.mSampleRate = inAudioStreamBasicDescription.mSampleRate; // The number of frames per second of the data in the stream, when the stream is played at normal speed. For compressed formats, this field indicates the number of frames per second of equivalent decompressed data. The mSampleRate field must be nonzero, except when this structure is used in a listing of supported formats (see “kAudioStreamAnyRate”).
  46. outAudioStreamBasicDescription.mFormatID = kAudioFormatMPEG4AAC; // kAudioFormatMPEG4AAC_HE does not work. Can't find `AudioClassDescription`. `mFormatFlags` is set to 0.
  47. outAudioStreamBasicDescription.mFormatFlags = kMPEG4Object_AAC_LC; // Format-specific flags to specify details of the format. Set to 0 to indicate no format flags. See “Audio Data Format Identifiers” for the flags that apply to each format.
  48. outAudioStreamBasicDescription.mBytesPerPacket = ; // The number of bytes in a packet of audio data. To indicate variable packet size, set this field to 0. For a format that uses variable packet size, specify the size of each packet using an AudioStreamPacketDescription structure.
  49. outAudioStreamBasicDescription.mFramesPerPacket = ; // The number of frames in a packet of audio data. For uncompressed audio, the value is 1. For variable bit-rate formats, the value is a larger fixed number, such as 1024 for AAC. For formats with a variable number of frames per packet, such as Ogg Vorbis, set this field to 0.
  50. outAudioStreamBasicDescription.mBytesPerFrame = ; // The number of bytes from the start of one frame to the start of the next frame in an audio buffer. Set this field to 0 for compressed formats. ...
  51. outAudioStreamBasicDescription.mChannelsPerFrame = ; // The number of channels in each frame of audio data. This value must be nonzero.
  52. outAudioStreamBasicDescription.mBitsPerChannel = ; // ... Set this field to 0 for compressed formats.
  53. outAudioStreamBasicDescription.mReserved = ; // Pads the structure out to force an even 8-byte alignment. Must be set to 0.
  54. AudioClassDescription *description = [self
  55. getAudioClassDescriptionWithType:kAudioFormatMPEG4AAC
  56. fromManufacturer:kAppleSoftwareAudioCodecManufacturer];
  57.  
  58. OSStatus status = AudioConverterNewSpecific(&inAudioStreamBasicDescription, &outAudioStreamBasicDescription, , description, &_audioConverter);
  59. if (status != ) {
  60. NSLog(@"setup converter: %d", (int)status);
  61. }
  62. }
  63.  
  64. - (AudioClassDescription *)getAudioClassDescriptionWithType:(UInt32)type
  65. fromManufacturer:(UInt32)manufacturer
  66. {
  67. static AudioClassDescription desc;
  68.  
  69. UInt32 encoderSpecifier = type;
  70. OSStatus st;
  71.  
  72. UInt32 size;
  73. st = AudioFormatGetPropertyInfo(kAudioFormatProperty_Encoders,
  74. sizeof(encoderSpecifier),
  75. &encoderSpecifier,
  76. &size);
  77. if (st) {
  78. NSLog(@"error getting audio format propery info: %d", (int)(st));
  79. return nil;
  80. }
  81.  
  82. unsigned int count = size / sizeof(AudioClassDescription);
  83. AudioClassDescription descriptions[count];
  84. st = AudioFormatGetProperty(kAudioFormatProperty_Encoders,
  85. sizeof(encoderSpecifier),
  86. &encoderSpecifier,
  87. &size,
  88. descriptions);
  89. if (st) {
  90. NSLog(@"error getting audio format propery: %d", (int)(st));
  91. return nil;
  92. }
  93.  
  94. for (unsigned int i = ; i < count; i++) {
  95. if ((type == descriptions[i].mSubType) &&
  96. (manufacturer == descriptions[i].mManufacturer)) {
  97. memcpy(&desc, &(descriptions[i]), sizeof(desc));
  98. return &desc;
  99. }
  100. }
  101.  
  102. return nil;
  103. }
  104.  
  105. static OSStatus inInputDataProc(AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void *inUserData)
  106. {
  107. AACEncoder *encoder = (__bridge AACEncoder *)(inUserData);
  108. UInt32 requestedPackets = *ioNumberDataPackets;
  109. //NSLog(@"Number of packets requested: %d", (unsigned int)requestedPackets);
  110. size_t copiedSamples = [encoder copyPCMSamplesIntoBuffer:ioData];
  111. if (copiedSamples < requestedPackets) {
  112. //NSLog(@"PCM buffer isn't full enough!");
  113. *ioNumberDataPackets = ;
  114. return -;
  115. }
  116. *ioNumberDataPackets = ;
  117. //NSLog(@"Copied %zu samples into ioData", copiedSamples);
  118. return noErr;
  119. }
  120.  
  121. - (size_t) copyPCMSamplesIntoBuffer:(AudioBufferList*)ioData {
  122. size_t originalBufferSize = _pcmBufferSize;
  123. if (!originalBufferSize) {
  124. return ;
  125. }
  126. ioData->mBuffers[].mData = _pcmBuffer;
  127. ioData->mBuffers[].mDataByteSize = _pcmBufferSize;
  128. _pcmBuffer = NULL;
  129. _pcmBufferSize = ;
  130. return originalBufferSize;
  131. }
  132.  
  133. - (void) encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer completionBlock:(void (^)(NSData * encodedData, NSError* error))completionBlock {
  134. CFRetain(sampleBuffer);
  135. dispatch_async(_encoderQueue, ^{
  136. if (!_audioConverter) {
  137. [self setupEncoderFromSampleBuffer:sampleBuffer];
  138. }
  139. CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
  140. CFRetain(blockBuffer);
  141. OSStatus status = CMBlockBufferGetDataPointer(blockBuffer, , NULL, &_pcmBufferSize, &_pcmBuffer);
  142. NSError *error = nil;
  143. if (status != kCMBlockBufferNoErr) {
  144. error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
  145. }
  146. //NSLog(@"PCM Buffer Size: %zu", _pcmBufferSize);
  147.  
  148. memset(_aacBuffer, , _aacBufferSize);
  149. AudioBufferList outAudioBufferList = {};
  150. outAudioBufferList.mNumberBuffers = ;
  151. outAudioBufferList.mBuffers[].mNumberChannels = ;
  152. outAudioBufferList.mBuffers[].mDataByteSize = _aacBufferSize;
  153. outAudioBufferList.mBuffers[].mData = _aacBuffer;
  154. AudioStreamPacketDescription *outPacketDescription = NULL;
  155. UInt32 ioOutputDataPacketSize = ;
  156. status = AudioConverterFillComplexBuffer(_audioConverter, inInputDataProc, (__bridge void *)(self), &ioOutputDataPacketSize, &outAudioBufferList, outPacketDescription);
  157. //NSLog(@"ioOutputDataPacketSize: %d", (unsigned int)ioOutputDataPacketSize);
  158. NSData *data = nil;
  159. if (status == ) {
  160. NSData *rawAAC = [NSData dataWithBytes:outAudioBufferList.mBuffers[].mData length:outAudioBufferList.mBuffers[].mDataByteSize];
  161. NSData *adtsHeader = [self adtsDataForPacketLength:rawAAC.length];
  162. NSMutableData *fullData = [NSMutableData dataWithData:adtsHeader];
  163. [fullData appendData:rawAAC];
  164. data = fullData;
  165. } else {
  166. error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
  167. }
  168. if (completionBlock) {
  169. dispatch_async(_callbackQueue, ^{
  170. completionBlock(data, error);
  171. });
  172. }
  173. CFRelease(sampleBuffer);
  174. CFRelease(blockBuffer);
  175. });
  176. }
  177.  
  178. /**
  179. * Add ADTS header at the beginning of each and every AAC packet.
  180. * This is needed as MediaCodec encoder generates a packet of raw
  181. * AAC data.
  182. *
  183. * Note the packetLen must count in the ADTS header itself.
  184. * See: http://wiki.multimedia.cx/index.php?title=ADTS
  185. * Also: http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Channel_Configurations
  186. **/
  187. - (NSData*) adtsDataForPacketLength:(NSUInteger)packetLength {
  188. int adtsLength = ;
  189. char *packet = malloc(sizeof(char) * adtsLength);
  190. // Variables Recycled by addADTStoPacket
  191. int profile = ; //AAC LC
  192. //39=MediaCodecInfo.CodecProfileLevel.AACObjectELD;
  193. int freqIdx = ; //44.1KHz
  194. int chanCfg = ; //MPEG-4 Audio Channel Configuration. 1 Channel front-center
  195. NSUInteger fullLength = adtsLength + packetLength;
  196. // fill in ADTS data
  197. packet[] = (char)0xFF; // 11111111 = syncword
  198. packet[] = (char)0xF9; // 1111 1 00 1 = syncword MPEG-2 Layer CRC
  199. packet[] = (char)(((profile-)<<) + (freqIdx<<) +(chanCfg>>));
  200. packet[] = (char)(((chanCfg&)<<) + (fullLength>>));
  201. packet[] = (char)((fullLength&0x7FF) >> );
  202. packet[] = (char)(((fullLength&)<<) + 0x1F);
  203. packet[] = (char)0xFC;
  204. NSData *data = [NSData dataWithBytesNoCopy:packet length:adtsLength freeWhenDone:YES];
  205. return data;
  206. }
  207.  
  208. @end

H264Encoder

  1. //
  2. // H264Encoder.h
  3. // H264AACEncode
  4. //
  5. // Created by ZhangWen on 15/10/14.
  6. // Copyright © 2015年 Zhangwen. All rights reserved.
  7. //
  8.  
  9. #import <Foundation/Foundation.h>
  10. #import <AVFoundation/AVFoundation.h>
  11. #import <VideoToolbox/VideoToolbox.h>
  12.  
  13. @protocol H264EncoderDelegate <NSObject>
  14.  
  15. - (void)gotSpsPps:(NSData*)sps pps:(NSData*)pps;
  16. - (void)gotEncodedData:(NSData*)data isKeyFrame:(BOOL)isKeyFrame;
  17.  
  18. @end
  19. @interface H264Encoder : NSObject
  20.  
  21. - (void) initWithConfiguration;
  22. - (void) start:(int)width height:(int)height;
  23. - (void) initEncode:(int)width height:(int)height;
  24. - (void) encode:(CMSampleBufferRef )sampleBuffer;
  25. - (void) End;
  26.  
  27. @property (weak, nonatomic) NSString *error;
  28. @property (weak, nonatomic) id<H264EncoderDelegate> delegate;
  29.  
  30. @end
  1. //
  2. // H264Encoder.m
  3. // H264AACEncode
  4. //
  5. // Created by ZhangWen on 15/10/14.
  6. // Copyright © 2015年 Zhangwen. All rights reserved.
  7. //
  8.  
  9. #import "H264Encoder.h"
  10.  
  11. @implementation H264Encoder
  12.  
  13. {
  14. NSString * yuvFile;
  15. VTCompressionSessionRef EncodingSession;
  16. dispatch_queue_t aQueue;
  17. CMFormatDescriptionRef format;
  18. CMSampleTimingInfo * timingInfo;
  19. BOOL initialized;
  20. int frameCount;
  21. NSData *sps;
  22. NSData *pps;
  23. }
  24. @synthesize error;
  25.  
  26. - (void) initWithConfiguration
  27. {
  28.  
  29. EncodingSession = nil;
  30. initialized = true;
  31. aQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, );
  32. frameCount = ;
  33. sps = NULL;
  34. pps = NULL;
  35.  
  36. }
  37.  
  38. void didCompressH264(void *outputCallbackRefCon, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags,
  39. CMSampleBufferRef sampleBuffer )
  40. {
  41. // NSLog(@"didCompressH264 called with status %d infoFlags %d", (int)status, (int)infoFlags);
  42. if (status != ) return;
  43.  
  44. if (!CMSampleBufferDataIsReady(sampleBuffer))
  45. {
  46. NSLog(@"didCompressH264 data is not ready ");
  47. return;
  48. }
  49. H264Encoder* encoder = (__bridge H264Encoder*)outputCallbackRefCon;
  50.  
  51. // Check if we have got a key frame first
  52. bool keyframe = !CFDictionaryContainsKey( (CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), )), kCMSampleAttachmentKey_NotSync);
  53.  
  54. if (keyframe)
  55. {
  56. CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
  57. // CFDictionaryRef extensionDict = CMFormatDescriptionGetExtensions(format);
  58. // Get the extensions
  59. // From the extensions get the dictionary with key "SampleDescriptionExtensionAtoms"
  60. // From the dict, get the value for the key "avcC"
  61.  
  62. size_t sparameterSetSize, sparameterSetCount;
  63. const uint8_t *sparameterSet;
  64. OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, , &sparameterSet, &sparameterSetSize, &sparameterSetCount, );
  65. if (statusCode == noErr)
  66. {
  67. // Found sps and now check for pps
  68. size_t pparameterSetSize, pparameterSetCount;
  69. const uint8_t *pparameterSet;
  70. OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, , &pparameterSet, &pparameterSetSize, &pparameterSetCount, );
  71. if (statusCode == noErr)
  72. {
  73. // Found pps
  74. encoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
  75. encoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
  76. if (encoder->_delegate)
  77. {
  78. [encoder->_delegate gotSpsPps:encoder->sps pps:encoder->pps];
  79. }
  80. }
  81. }
  82. }
  83.  
  84. CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
  85. size_t length, totalLength;
  86. char *dataPointer;
  87. OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, , &length, &totalLength, &dataPointer);
  88. if (statusCodeRet == noErr) {
  89.  
  90. size_t bufferOffset = ;
  91. static const int AVCCHeaderLength = ;
  92. while (bufferOffset < totalLength - AVCCHeaderLength) {
  93.  
  94. // Read the NAL unit length
  95. uint32_t NALUnitLength = ;
  96. memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength);
  97.  
  98. // Convert the length value from Big-endian to Little-endian
  99. NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
  100.  
  101. NSData* data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength];
  102. [encoder->_delegate gotEncodedData:data isKeyFrame:keyframe];
  103.  
  104. // Move to the next NAL unit in the block buffer
  105. bufferOffset += AVCCHeaderLength + NALUnitLength;
  106. }
  107.  
  108. }
  109.  
  110. }
  111.  
  112. - (void) start:(int)width height:(int)height
  113. {
  114. int frameSize = (width * height * 1.5);
  115.  
  116. if (!initialized)
  117. {
  118. NSLog(@"H264: Not initialized");
  119. error = @"H264: Not initialized";
  120. return;
  121. }
  122. dispatch_sync(aQueue, ^{
  123.  
  124. // For testing out the logic, lets read from a file and then send it to encoder to create h264 stream
  125.  
  126. // Create the compression session
  127. OSStatus status = VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)(self), &EncodingSession);
  128. NSLog(@"H264: VTCompressionSessionCreate %d", (int)status);
  129.  
  130. if (status != )
  131. {
  132. NSLog(@"H264: Unable to create a H264 session");
  133. error = @"H264: Unable to create a H264 session";
  134.  
  135. return ;
  136.  
  137. }
  138.  
  139. // Set the properties
  140. VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
  141. VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanFalse);
  142. VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, );
  143.  
  144. VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_High_AutoLevel);
  145.  
  146. // Tell the encoder to start encoding
  147. VTCompressionSessionPrepareToEncodeFrames(EncodingSession);
  148.  
  149. // Start reading from the file and copy it to the buffer
  150.  
  151. // Open the file using POSIX as this is anyway a test application
  152. int fd = open([yuvFile UTF8String], O_RDONLY);
  153. if (fd == -)
  154. {
  155. NSLog(@"H264: Unable to open the file");
  156. error = @"H264: Unable to open the file";
  157.  
  158. return ;
  159. }
  160.  
  161. NSMutableData* theData = [[NSMutableData alloc] initWithLength:frameSize] ;
  162. NSUInteger actualBytes = frameSize;
  163. while (actualBytes > )
  164. {
  165. void* buffer = [theData mutableBytes];
  166. NSUInteger bufferSize = [theData length];
  167.  
  168. actualBytes = read(fd, buffer, bufferSize);
  169. if (actualBytes < frameSize)
  170. [theData setLength:actualBytes];
  171.  
  172. frameCount++;
  173. // Create a CM Block buffer out of this data
  174. CMBlockBufferRef BlockBuffer = NULL;
  175. OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL, buffer, actualBytes,kCFAllocatorNull, NULL, , actualBytes, kCMBlockBufferAlwaysCopyDataFlag, &BlockBuffer);
  176.  
  177. // Check for error
  178. if (status != noErr)
  179. {
  180. NSLog(@"H264: CMBlockBufferCreateWithMemoryBlock failed with %d", (int)status);
  181. error = @"H264: CMBlockBufferCreateWithMemoryBlock failed ";
  182.  
  183. return ;
  184. }
  185.  
  186. // Create a CM Sample Buffer
  187. CMSampleBufferRef sampleBuffer = NULL;
  188. CMFormatDescriptionRef formatDescription;
  189. CMFormatDescriptionCreate ( kCFAllocatorDefault, // Allocator
  190. kCMMediaType_Video,
  191. 'I420',
  192. NULL,
  193. &formatDescription );
  194. CMSampleTimingInfo sampleTimingInfo = {CMTimeMake(, )};
  195.  
  196. OSStatus statusCode = CMSampleBufferCreate(kCFAllocatorDefault, BlockBuffer, YES, NULL, NULL, formatDescription, , , &sampleTimingInfo, , NULL, &sampleBuffer);
  197.  
  198. // Check for error
  199. if (statusCode != noErr) {
  200. NSLog(@"H264: CMSampleBufferCreate failed with %d", (int)statusCode);
  201. error = @"H264: CMSampleBufferCreate failed ";
  202.  
  203. return;
  204. }
  205. CFRelease(BlockBuffer);
  206. BlockBuffer = NULL;
  207.  
  208. // Get the CV Image buffer
  209. CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
  210.  
  211. // Create properties
  212. CMTime presentationTimeStamp = CMTimeMake(frameCount, );
  213. //CMTime duration = CMTimeMake(1, DURATION);
  214. VTEncodeInfoFlags flags;
  215.  
  216. // Pass it to the encoder
  217. statusCode = VTCompressionSessionEncodeFrame(EncodingSession,
  218. imageBuffer,
  219. presentationTimeStamp,
  220. kCMTimeInvalid,
  221. NULL, NULL, &flags);
  222. // Check for error
  223. if (statusCode != noErr) {
  224. NSLog(@"H264: VTCompressionSessionEncodeFrame failed with %d", (int)statusCode);
  225. error = @"H264: VTCompressionSessionEncodeFrame failed ";
  226.  
  227. // End the session
  228. VTCompressionSessionInvalidate(EncodingSession);
  229. CFRelease(EncodingSession);
  230. EncodingSession = NULL;
  231. error = NULL;
  232. return;
  233. }
  234. // NSLog(@"H264: VTCompressionSessionEncodeFrame Success");
  235.  
  236. }
  237.  
  238. // Mark the completion
  239. VTCompressionSessionCompleteFrames(EncodingSession, kCMTimeInvalid);
  240.  
  241. // End the session
  242. VTCompressionSessionInvalidate(EncodingSession);
  243. CFRelease(EncodingSession);
  244. EncodingSession = NULL;
  245. error = NULL;
  246.  
  247. close(fd);
  248. });
  249.  
  250. }
  251. - (void) initEncode:(int)width height:(int)height
  252. {
  253. dispatch_sync(aQueue, ^{
  254.  
  255. // For testing out the logic, lets read from a file and then send it to encoder to create h264 stream
  256.  
  257. // Create the compression session
  258. OSStatus status = VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)(self), &EncodingSession);
  259. NSLog(@"H264: VTCompressionSessionCreate %d", (int)status);
  260.  
  261. if (status != )
  262. {
  263. NSLog(@"H264: Unable to create a H264 session");
  264. error = @"H264: Unable to create a H264 session";
  265.  
  266. return ;
  267.  
  268. }
  269.  
  270. // Set the properties
  271. VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
  272. VTSessionSetProperty(EncodingSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Main_AutoLevel);
  273.  
  274. // Tell the encoder to start encoding
  275. VTCompressionSessionPrepareToEncodeFrames(EncodingSession);
  276. });
  277. }
  278. - (void) encode:(CMSampleBufferRef )sampleBuffer
  279. {
  280. dispatch_sync(aQueue, ^{
  281.  
  282. frameCount++;
  283. // Get the CV Image buffer
  284. CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
  285.  
  286. // Create properties
  287. CMTime presentationTimeStamp = CMTimeMake(frameCount, );
  288. //CMTime duration = CMTimeMake(1, DURATION);
  289. VTEncodeInfoFlags flags;
  290.  
  291. // Pass it to the encoder
  292. OSStatus statusCode = VTCompressionSessionEncodeFrame(EncodingSession,
  293. imageBuffer,
  294. presentationTimeStamp,
  295. kCMTimeInvalid,
  296. NULL, NULL, &flags);
  297. // Check for error
  298. if (statusCode != noErr) {
  299. NSLog(@"H264: VTCompressionSessionEncodeFrame failed with %d", (int)statusCode);
  300. error = @"H264: VTCompressionSessionEncodeFrame failed ";
  301.  
  302. // End the session
  303. VTCompressionSessionInvalidate(EncodingSession);
  304. CFRelease(EncodingSession);
  305. EncodingSession = NULL;
  306. error = NULL;
  307. return;
  308. }
  309. // NSLog(@"H264: VTCompressionSessionEncodeFrame Success");
  310. });
  311.  
  312. }
  313.  
  314. - (void) End
  315. {
  316. // Mark the completion
  317. VTCompressionSessionCompleteFrames(EncodingSession, kCMTimeInvalid);
  318.  
  319. // End the session
  320. VTCompressionSessionInvalidate(EncodingSession);
  321. CFRelease(EncodingSession);
  322. EncodingSession = NULL;
  323. error = NULL;
  324.  
  325. }
  326.  
  327. @end

第六十篇、音视频采集硬编码(H264+ACC)的更多相关文章

  1. 手机Android音视频采集与直播推送,实现单兵、移动监控类应用

    从安卓智能手机.平板,到可穿戴的Android Ware.眼镜.手表.再到Android汽车.智能家居.电视,甚至最近看新闻,日本出的几款机器人都是Android系统的,再把目光放回监控行业,传统监控 ...

  2. C#专业的音视频采集录制类库SharpCapture介绍

    SharpCapture是高性能.轻量级.接口清晰.使用简单的C#语言编写的.NET音视频采集.屏幕录制类库.本类库可以采集系统声卡.麦克风.摄像头.屏幕画面,支持声卡和话筒混音采集. 可以应用到直播 ...

  3. 一个基于JRTPLIB的轻量级RTSP客户端(myRTSPClient)——实现篇:(六)RTP音视频传输解析层之音视频数据传输格式

    一.差异 本地音视频数据格式和用来传输的音视频数据格式存在些许差异,由于音视频数据流到达客户端时,需要考虑数据流的数据边界.分包.组包顺序等问题,所以传输中的音视频数据往往会多一些字节. 举个例子,有 ...

  4. 全志Tina_dolphin播放音视频裸流(h264,pcm)验证

    最近在验证tina对裸流音视频的支持,主要指h264视频裸流及pcm音频裸流. 在原始sdk中有针对很多video和audio类型的parser,但就是没有找到pcm和h264的parser,所以需要 ...

  5. iOS-VideoToolbox硬编码H264

    前言 VideoToolBox是iOS8之后,苹果开发的用于硬解码编码H264/H265(iOS11以后支持)的API. 对于H264还不了解的童鞋一定要先看下这边的H264的简介. 编码流程 我们实 ...

  6. 第六十篇:Vue的基本使用

    好家伙,要来了,经典"hello world" 试用一下vue ① 导入 vue.js的 script 脚本文件 ② 在页面中声明一个将要被vue所控制的DOM区域 ③ 创建vm实 ...

  7. C#音视频网络流解码:H264视频和ACC音频

    下面两种方式是直接翻译过来的,还有问题,比如指针的使用和值的传入.考虑C#和C++的差异,还是要抱着怀疑的态度去看待,不一定是对的. H264视频解码网络流: using FFmpeg.AutoGen ...

  8. freeswitch 音 视频 支持的编码

    FreeSWITCH 支持很多的语音编解码:[13] PCMU – G.711 µ-law PCMA – G.711 A-law G.722 G.722.1 G.722.1c G.726 G.726  ...

  9. EasyPusher安卓Android手机直播推送之MediaCodec 硬编码H264格式

    本文转自Holo的博客:http://blog.csdn.net/u013758734/article/details/50834770 最近在研究EasyDarwin的Push库EasyPusher ...

随机推荐

  1. 正则表达式(Regular Expression)

    匹配中文字符的正则表达式: [\u4e00-\u9fa5] 评注:匹配中文还真是个头疼的事,有了这个表达式就好办了 匹配双字节字符(包括汉字在内):[^\x00-\xff] 评注:可以用来计算字符串的 ...

  2. 通过ulimit改善linux系统性能(摘自IBM)

    本文介绍了 ulimit 内键指令的主要功能以及用于改善系统性能的 ulimit 用法.通过这篇文章,读者不仅能够了解 ulimit 所起的作用.而且能够学会怎样更好地通过 ulimit 限制资源的使 ...

  3. iOS开发——语法篇OC篇&高级语法精讲二

    Objective高级语法精讲二 Objective-C是基于C语言加入了面向对象特性和消息转发机制的动态语言,这意味着它不仅需要一个编译器,还需要Runtime系统来动态创建类和对象,进行消息发送和 ...

  4. 交换a、b

    有两个变量a和b,不使用任何中间变量交换a和b. 方法一: 采用如下方法: a=a+b; b=a-b; a=a-b; 这样做的缺点就是如果a.b都是比较大的数,则a=a+b时就会越界. 而采用: a= ...

  5. java_有返回值线程_提前加载例子

    package com.demo.test3; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionE ...

  6. C#_uploadify_mvc_version

    jQuery Uploadify在ASP.NET MVC3中的使用 1.Uploadify简介 Uploadify是基于jQuery的一种上传插件,支持多文件.带进度条显示上传,在项目开发中常被使用. ...

  7. uboot_starts_analysis.pdf

    Uboot中start.S源码的指令级的详尽解析 HTML版本的在线地址为:http://www.crifan.com/files/doc/docbook/uboot_starts_analysis/ ...

  8. UITableViewCell高度自适应探索--AutoLayout结合Frame

    UITableViewCell高度自适应探索--UITableView+FDTemplateLayoutCell地址: http://www.jianshu.com/p/7839e3a273a6UIT ...

  9. SQlServer2008 之 定时执行sql语句作业的制定

    1.打开[SQL Server Management Studio],在[对象资源管理器]列表中选择[SQL Server 代理]: 2.鼠标右击[SQL Server 代理],选择[启动(S)],如 ...

  10. AJAX XMLHttpRequest

    <html> <head> <title>XMLHTTPRequest对象的说明DEMO</title> <script language=&qu ...