Right now am using RTMPStreamPublisher
to publish the video at wowzaserver from iphone application. i have downloaded some demo code from this GitHub Project https://github.com/slavavdovichenko/MediaLibDemos/tree/master/RTMPStreamPublisher.
Now this sample is working great..but when i tried to create my own project with the same implementation then its giving me some error...am sure that there is not any kind of missing library or framework issue.. i had checked everything is same in my created project as per the demo they given..
but still don't know where am doing mistake... my error is as follows..
[NULL @ 0x181a0c00] [IMGUTILS @ 0x27d78630] Picture size 0x10 is invalid
[NULL @ 0x181a0c00] ignoring invalid width/height values
[flv @ 0x181a0c00] Specified pix_fmt is not supported
video codec is not opened
2013-11-28 12:39:27.064 LiveStreaming[268:60b] AudioCodec: codecID = 86050, codecType = 42, bitRate = 16000, _sampleBytes = 4
encoder supports the sample formats:
flt,
audio codec best options: sample_rate = 44100
[nellymoser @ 0x18179a00] Specified sample_fmt is not supported.
audio codec is not opened
i really don't know what's the real problem...as there is just generalize error of EXC_BAD_ACCESS
any idea??that what should be wrong??
NOTE: I am using following Library
Just after creating the project i had added libraries and frameworks to it. Using the CommLibiOS and MediaLibiOS libraries requires to add the following frameworks: CFNetwork, AudioToolbox, AVFoundation, CoreFoundation, CoreMedia, CoreVideo, Security, and library libz.dylib.
Then added CommLibiOS and MediaLibiOS libraries to the project.
following is my .h file
//
// ViewController.h
// RTMPStreamPublisher
//
// Created by Vyacheslav Vdovichenko on 7/10/12.
// Copyright (c) 2012 The Midnight Coders, Inc. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import "MediaStreamPlayer.h"
#import <MediaPlayer/MediaPlayer.h>
#import <CoreMedia/CoreMedia.h>
@interface HomeViewController : UIViewController <UITextFieldDelegate> {
IBOutlet UITextField *hostTextField;
IBOutlet UITextField *streamTextField;
IBOutlet UIView *previewView;
IBOutlet UIButton *btnConnect;
IBOutlet UIButton *btnToggle;
IBOutlet UIButton *btnPublish;
IBOutlet UILabel *memoryLabel;
}
-(IBAction)connectControl:(id)sender;
-(IBAction)publishControl:(id)sender;
-(IBAction)camerasToggle:(id)sender;
@end
following is my .m file
//
// ViewController.m
// RTMPStreamPublisher
//
// Created by Vyacheslav Vdovichenko on 7/10/12.
// Copyright (c) 2012 The Midnight Coders, Inc. All rights reserved.
//
#import "HomeViewController.h"
#import "DEBUG.h"
#import "MemoryTicker.h"
#import "BroadcastStreamClient.h"
#import "MediaStreamPlayer.h"
#import "VideoPlayer.h"
#import "MPMediaEncoder.h"
#import <Foundation/Foundation.h>
#import <CoreMedia/CoreMedia.h>
#import <MediaPlayer/MediaPlayer.h>
#import <MobileCoreServices/UTCoreTypes.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import "PhoneStreaming.h"
@interface HomeViewController () <MPIMediaStreamEvent> {
MemoryTicker *memoryTicker;
RTMPClient *socket;
BroadcastStreamClient *upstream;
}
-(void)sizeMemory:(NSNumber *)memory;
-(void)setDisconnect;
@end
@implementation HomeViewController
#pragma mark -
#pragma mark View lifecycle
-(void)viewDidLoad {
//[DebLog setIsActive:YES];
[super viewDidLoad];
memoryTicker = [[MemoryTicker alloc] initWithResponder:self andMethod:@selector(sizeMemory:)];
memoryTicker.asNumber = YES;
socket = nil;
upstream = nil;
//echoCancellationOn;
//hostTextField.text = @"rtmp://80.74.155.7/live";
//hostTextField.text = @"rtmp://10.0.1.33:1935/live";
//hostTextField.text = @"rtmp://10.0.1.33:1935/videorecording";
//hostTextField.text = @"rtmp://192.168.2.63:1935/live";
//hostTextField.text = @"rtmp://192.168.2.63:1935/videorecording";
hostTextField.text = @"rtmp://192.168.3.187:1935/live";
//hostTextField.text = @"rtmp://192.168.2.101:1935/live";
hostTextField.delegate = self;
streamTextField.text = @"slavav";
//streamTextField.text = @"outgoingaudio_c109";
//streamTextField.text = @"myStream";
streamTextField.delegate = self;
}
-(void)viewDidUnload {
[super viewDidUnload];
// Release any retained subviews of the main view.
}
-(NSUInteger)supportedInterfaceOrientations {
return UIInterfaceOrientationMaskPortrait;
}
#pragma mark -
#pragma mark Private Methods
// MEMORY
-(void)sizeMemory:(NSNumber *)memory {
memoryLabel.text = [NSString stringWithFormat:@"%d", [memory intValue]];
}
// ALERT
-(void)showAlert:(NSString *)message {
dispatch_async(dispatch_get_main_queue(), ^(void) {
UIAlertView *av = [[UIAlertView alloc] initWithTitle:@"Receive" message:message delegate:self
cancelButtonTitle:@"Ok" otherButtonTitles:nil];
[av show];
});
}
// ACTIONS
-(void)doConnect {
upstream = [[BroadcastStreamClient alloc] init:hostTextField.text];
upstream.delegate = self;
[upstream setPreviewLayer:previewView];
[upstream stream:streamTextField.text publishType:PUBLISH_LIVE];
//btnConnect.title = @"Disconnect";}
}
-(void)doDisconnect {
[upstream disconnect];
}
-(void)setDisconnect {
[socket disconnect];
socket = nil;
[upstream teardownPreviewLayer];
upstream = nil;
// btnConnect.title = @"Connect";
btnToggle.enabled = NO;
// btnPublish.title = @"Start";
btnPublish.enabled = NO;
hostTextField.hidden = NO;
streamTextField.hidden = NO;
previewView.hidden = YES;
}
-(void)sendMetadata {
NSString *camera = upstream.isUsingFrontFacingCamera ? @"FRONT" : @"BACK";
NSDate *date = [NSDate date];
NSDictionary *meta = [NSDictionary dictionaryWithObjectsAndKeys:camera, @"camera", [date description], @"date", nil];
[upstream sendMetadata:meta event:@"changedCamera:"];
}
#pragma mark -
#pragma mark Public Methods
// ACTIONS
-(IBAction)connectControl:(id)sender {
NSLog(@"connectControl: host = %@", hostTextField.text);
(!upstream) ? [self doConnect] : [self doDisconnect];
}
-(IBAction)publishControl:(id)sender {
NSLog(@"publishControl: stream = %@", streamTextField.text);
PhoneStreaming *add = [[PhoneStreaming alloc]
initWithNibName:@"PhoneStreaming" bundle:nil];
[self presentViewController:add animated:YES completion:nil];
// (upstream.state != STREAM_PLAYING) ? [upstream start] : [upstream pause];
}
-(IBAction)camerasToggle:(id)sender {
NSLog(@"camerasToggle:");
if (upstream.state != STREAM_PLAYING)
return;
[upstream setVideoOrientation:
upstream.isUsingFrontFacingCamera ? AVCaptureVideoOrientationLandscapeRight : AVCaptureVideoOrientationLandscapeLeft];
[upstream switchCameras];
[self sendMetadata];
}
#pragma mark -
#pragma mark UITextFieldDelegate Methods
-(BOOL)textFieldShouldReturn:(UITextField *)textField {
[textField resignFirstResponder];
return YES;
}
#pragma mark -
#pragma mark MPIMediaStreamEvent Methods
-(void)stateChanged:(id)sender state:(MPMediaStreamState)state description:(NSString *)description {
NSLog(@" $$$$$$ <MPIMediaStreamEvent> stateChangedEvent: %d = %@ [%@]", (int)state, description, [NSThread isMainThread]?@"M":@"T");
NSLog(@"SOCKET %@",socket.description);
switch (state) {
case CONN_DISCONNECTED: {
[self setDisconnect];
break;
}
case CONN_CONNECTED: {
if (![description isEqualToString:MP_RTMP_CLIENT_IS_CONNECTED])
break;
#if 0 // use encoder -> MPMediaEncoder instance
upstream.encoder = [MPMediaEncoder new];
#endif
[upstream start];
break;
}
case STREAM_CREATED: {
break;
}
case STREAM_PAUSED: {
// btnPublish.title = @"Start";
btnToggle.enabled = NO;
break;
}
case STREAM_PLAYING: {
[self sendMetadata];
[upstream setPreviewLayer:previewView];
hostTextField.hidden = YES;
streamTextField.hidden = YES;
previewView.hidden = NO;
// btnPublish.title = @"Pause";
btnPublish.enabled = YES;
btnToggle.enabled = YES;
break;
}
default:
break;
}
}
-(void)connectFailed:(id)sender code:(int)code description:(NSString *)description {
NSLog(@" $$$$$$ <MPIMediaStreamEvent> connectFailedEvent: %d = %@, [%@]", code, description, [NSThread isMainThread]?@"M":@"T");
if (!upstream)
return;
[self setDisconnect];
[self showAlert:(code == -1) ?
@"Unable to connect to the server. Make sure the hostname/IP address and port number are valid" :
[NSString stringWithFormat:@"connectFailedEvent: %@", description]];
}
/*/// Send metadata for each video frame
-(void)pixelBufferShouldBePublished:(CVPixelBufferRef)pixelBuffer timestamp:(int)timestamp {
//[upstream sendMetadata:@{@"videoTimestamp":[NSNumber numberWithInt:timestamp]} event:@"videoFrameOptions:"];
//
CVPixelBufferRef frameBuffer = pixelBuffer;
// Get the base address of the pixel buffer.
uint8_t *baseAddress = CVPixelBufferGetBaseAddress(frameBuffer);
// Get the data size for contiguous planes of the pixel buffer.
size_t bufferSize = CVPixelBufferGetDataSize(frameBuffer);
// Get the pixel buffer width and height.
size_t width = CVPixelBufferGetWidth(frameBuffer);
size_t height = CVPixelBufferGetHeight(frameBuffer);
[upstream sendMetadata:@{@"videoTimestamp":[NSNumber numberWithInt:timestamp], @"bufferSize":[NSNumber numberWithInt:bufferSize], @"width":[NSNumber numberWithInt:width], @"height":[NSNumber numberWithInt:height]} event:@"videoFrameOptions:"];
//
}
/*/
@end