进程之间通讯代码使用方法.
第一步.
解压,添加到项目.必须也勾选扩展
第二步 SampleHandler 这个文件中,添加如下代码
#import <AgoraRtcKit/AgoraRtcEngineKit.h>
#import "RongRTCBufferUtil.h"
@interface SampleHandler ()
@property(nonatomic , strong) RongRTCClientSocket *clientSocket;
@property (nonatomic, assign) size_t w;
@property (nonatomic, assign) size_t h;
@property (nonatomic,assign) AgoraVideoRotation rotate;
@property (nonatomic,assign)NSTimeInterval lastSendTs;
@property (nonatomic,)CMSampleBufferRef bufferCopy;
@property (nonatomic,weak) NSTimer *bufferTimer;
@end
懒加载
- (RongRTCClientSocket *)clientSocket {
if (!_clientSocket) {
_clientSocket = [[RongRTCClientSocket alloc] init];
[_clientSocket createCliectSocket:(int)_w height:(int)_h];
}
return _clientSocket;
}
在此方法里
- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {
//判断是视频的话.调用下面的方法.进行编码通讯.
}
- (void)sendData:(CMSampleBufferRef)sampleBuffer{
if (_w == 0 || _h == 0) {
CVPixelBufferRef buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
_w = CVPixelBufferGetWidth(buffer);
_h = CVPixelBufferGetHeight(buffer);
}
[self.clientSocket encodeBuffer:sampleBuffer];
}
第三步在主APP的控制器内调用
@property (nonatomic, strong) RongRTCServerSocket *screenServerSocket;
懒加载
-(RongRTCServerSocket *)screenServerSocket{
if (!_screenServerSocket) {
RongRTCServerSocket *socket = [[RongRTCServerSocket alloc] init];
socket.delegate = self;
_screenServerSocket = socket;
}
return _screenServerSocket;
}
下面是代理方式.就能收到扩展过来的buffer了,是解码过的.
-(void)VZdidProcessSampleBuffer:(CVPixelBufferRef)cvPixelBufferRef andWith:(CMTime)time{
if (self.isScreen) {
DebugLog(@"%@",cvPixelBufferRef);
[self.screenVideos.consumer consumePixelBuffer:cvPixelBufferRef withTimestamp:time rotation:self.currerRotation];
}
}
-(void)startPushOrStopPush:(BOOL)isStart{
self.isScreen = isStart;
}
接到扩展启动的时候APP调用的方法.
- (void)screenStartAction {
///第一种方法实现的地方
if (!_screenServerSocket) {
CGFloat W = (MIN(ScreenWidth, ScreenHeight)) * UIScreen.mainScreen.scale;
CGFloat H = MAX(ScreenWidth, SCREEN_HEIGHT) * UIScreen.mainScreen.scale;
[self.screenServerSocket createServerSocket:W height:H];
}
dispatch_async(dispatch_get_main_queue(), ^{
[self.rtcEngine muteAllRemoteAudioStreams:YES];
[self.rtcEngine muteAllRemoteVideoStreams:YES];
[self.rtcEngine setVideoSource:self.screenVideos];
});
RongRTCServerSocketProtocol 这就是代理..
再贴上ScreenVideoSouces
#import <Foundation/Foundation.h>
#import <AgoraRtcKit/AgoraRtcEngineKit.h>
@protocol VZScreenShareVideoDelegate <NSObject>
-(void)startPushOrStopPush:(BOOL)isStart;
@end
NS_ASSUME_NONNULL_BEGIN
@interface VZScreenShareVideo : NSObject <AgoraVideoSourceProtocol>
@property (nonatomic,weak) id<VZScreenShareVideoDelegate> delegate;
@end
NS_ASSUME_NONNULL_END
#import "VZScreenShareVideo.h"
@implementation VZScreenShareVideo
@synthesize consumer;
-(BOOL)shouldInitialize{
return YES;
}
- (AgoraVideoBufferType)bufferType{
return AgoraVideoBufferTypePixelBuffer;
};
-(AgoraVideoContentHint)contentHint{
return AgoraVideoContentHintNone;
}
- (AgoraVideoCaptureType)captureType{
return AgoraVideoCaptureTypeScreen;
}
- (void)shouldDispose {
}
- (void)shouldStart {
if (self.delegate) {
[self.delegate startPushOrStopPush:YES];
}
}
- (void)shouldStop {
if (self.delegate) {
[self.delegate startPushOrStopPush:NO];
}
}