本文档详细说明iOS12+的屏幕共享使用方法。
屏幕流获取
1、在项目中添加Targets
2、添加Broadcast Upload Extension—->Next
3、在你想要实现屏幕共享的界面添加调取方法
#import "ViewController.h"
#import <ReplayKit/ReplayKit.h>
#define TAG_SHARESCREEN 10086
@interface ViewController ()
@property (nonatomic, strong) RPSystemBroadcastPickerView*broadPickerView;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
_broadPickerView = [[RPSystemBroadcastPickerView alloc] initWithFrame:CGRectMake(50, 50, 200, 200)];
_broadPickerView.preferredExtension = @"此处填写你创建的Broadcast Upload Extension 的Bundle id(不是SetupUI的那个)";
[self.view addSubview:_broadPickerView];
}
@end
运行点击屏幕上的按钮便调取出来了开始录屏的界面
根据需求:可能不想要系统自带的按钮,可以做以下优化:
#import "ViewController.h"
#import <ReplayKit/ReplayKit.h>
#define TAG_SHARESCREEN 10086
@interface ViewController ()
@property (nonatomic, strong) RPSystemBroadcastPickerView*broadPickerView;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
_broadPickerView = [[RPSystemBroadcastPickerView alloc] initWithFrame:CGRectMake(50, 50, 200, 200)];
_broadPickerView.preferredExtension = @"此处填写你创建的Broadcast Upload Extension 的Bundle id(不是SetupUI的那个)";
UIButton *button = [[UIButton alloc] initWithFrame:CGRectMake(50, 50, 300, 40)];
[button setTitle:@"点我就好了" forState:UIControlStateNormal];
[button addTarget:self action:@selector(clickedOnStartRecordButton:) forControlEvents:UIControlEventTouchUpInside];
[button setTitleColor:[UIColor redColor] forState:UIControlStateNormal];
button.tag = TAG_SHARESCREEN;
[self.view addSubview:button];
}
- (void)clickedOnStartRecordButton:(UIButton *)sender
{
if (sender.tag == TAG_SHARESCREEN)
{
for (UIView *view in _broadPickerView.subviews)
{
if ([view isKindOfClass:[UIButton class]])
{
//调起录像方法,UIControlEventTouchUpInside的方法看其他文章用的是UIControlEventTouchDown,
//我使用时用UIControlEventTouchUpInside用好使,看个人情况决定
[(UIButton*)view sendActionsForControlEvents:UIControlEventTouchUpInside];
}
}
}
}
@end
调试:
1、先运行项目(demo)到手机上
例:demo
此时操作录屏等操作,只能断点到demo里的代码
2、检测IDRS_Demo中的数据
检测IDRS_Demo中的数据:运行IDRS_Demo 选择上述的项目
选择项目匹配:
此时可断点到 IDRS_Demo 下的所有代码
3、检测IDRS_DemoSetupUI:
检测IDRS_DemoSetupUI:运行IDRS_DemoSetupUI选择上述的项目(这个地方没有用到)
此时可断点到 IDRS_DemoSetupUI 下的所有代码
同步数据
把IDRS_Demo获取的屏幕流推送给demo(主App),demo(主App)推送屏幕流给RTC
简述方法实现
Socket实现数据同步:
- 1、使用了Socket和Codec两个文件夹中的代码,把两个文件夹拖入自己的主App中。
- 2、SampleHandler中使用时,需要关联主App加载的代码,方法如下:
详细的使用方法
1、实现获取屏幕流
SampleHandler.m中实现,使用socket发送
#import "SampleHandler.h"
#import "IDRSClientSocket.h"
@interface SampleHandler()
@property(nonatomic , strong)IDRSClientSocket *clientSocket;
@end
监听开始
- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *,NSObject *> *)setupInfo {
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
self.clientSocket = [[IDRSClientSocket alloc] init];
[self.clientSocket createCliectSocket];
[self sendStringData:@"初始化"];
}
监听结束
- (void)broadcastFinished {
// User has requested to finish the broadcast.
[self sendStringData:@"停止"];
[_clientSocket close];
}
监听数据流
- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {
//监听数据回流:
switch (sampleBufferType) {
case RPSampleBufferTypeVideo:
// Handle video sample buffer
[self sendData:sampleBuffer];
break;
case RPSampleBufferTypeAudioApp:
// Handle audio sample buffer for app audio
//音频流信息:44100,双声道,16bit
break;
case RPSampleBufferTypeAudioMic:
// Handle audio sample buffer for mic audio
//音频流信息:48000,单声道,16bit
break;
default:
break;
}
}
具体发送信息方法
//数据流推送
- (void)sendData:(CMSampleBufferRef)sampleBuffer{
[self.clientSocket encodeBuffer:sampleBuffer];
}
//文字推送
-(void)sendStringData:(NSString*)string{
[self.clientSocket encodeStringBuffer:string];
}
2、实现socket接受
#import "ViewController.h"
#import <ReplayKit/ReplayKit.h>
#import "IDRSServerSocket.h"
@interface ViewController ()<IDRSServerSocketProtocol>
@property (nonatomic, strong) RPSystemBroadcastPickerView*broadPickerView;
@property(nonatomic , strong)IDRSServerSocket *serverSocket;
@end
初始化socket
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
[self.serverSocket createServerSocket];
}
-(IDRSServerSocket *)serverSocket{
if (!_serverSocket) {
IDRSServerSocket *socket = [[IDRSServerSocket alloc] init];
socket.delegate = self;
_serverSocket = socket;
}
return _serverSocket;
}
接收信息
-(void)didProcessSampleBuffer:(CMSampleBufferRef)sampleBuffer{
//把数据推送给aliRTC
[self screenStreaming:sampleBuffer];
}
-(void)didGetStringBuffer:(NSString *)string{
if ([string isEqualToString:@"初始化"]) {
}else if ([string isEqualToString:@"停止"]){
//更换本地预览视图,断开socket连接
isVideoTrue = true;
[self.engine setExternalVideoSource:NO useTexture:NO sourceType:AliRtcVideosourceCameraLargeType renderMode:AliRtcRenderModeAuto];
[self.engine startPreview];
[self.serverSocket disconnect];
}
}
3、屏幕流推给aliRTC
aliRTC外部视频输入接口
-(void)screenStreaming:(CMSampleBufferRef)sampleBuffer{
if (isVideoTrue) {
isVideoTrue = false;
[self.engine setLocalViewConfig:nil forTrack:AliRtcVideoTrackCamera];
[self.engine setExternalVideoSource:YES useTexture:NO sourceType:AliRtcVideosourceCameraLargeType renderMode:AliRtcRenderModeFill];
}
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
AliRtcVideoDataSample *dataSample = [[AliRtcVideoDataSample alloc] init];
dataSample.pixelBuffer = pixelBuffer;
dataSample.type = AliRtcBufferType_CVPixelBuffer;
int ret = 0;
ret = [self.engine pushExternalVideoFrame:dataSample sourceType:AliRtcVideosourceCameraLargeType];
}