Screen Sharing and Meeting Recording using Amazon Chime SDK in react native

480 Views Asked by At

I have integrated amazon chime sdk with my react native application following this guide. https://github.com/aws-samples/amazon-chime-react-native-demo I am new to react native and have poor understanding of native development. I need to integrate Screen Sharing and Meeting recording in my application and I am stuck for a week now. Any help will be much appreciated.

These are the files containing the functions MeetingObservers.h

#import <AmazonChimeSDK/AmazonChimeSDK-Swift.h>
#import <Foundation/Foundation.h>

@class NativeMobileSDKBridge;

#define kEventOnMeetingStart @"OnMeetingStart"
#define kEventOnMeetingEnd @"OnMeetingEnd"
#define kEventOnAttendeesJoin @"OnAttendeesJoin"
#define kEventOnAttendeesLeave @"OnAttendeesLeave"
#define kEventOnAttendeesMute @"OnAttendeesMute"
#define kEventOnAttendeesUnmute @"OnAttendeesUnmute"
#define kEventOnAddVideoTile @"OnAddVideoTile"
#define kEventOnRemoveVideoTile @"OnRemoveVideoTile"
#define kEventOnDataMessageReceive @"OnDataMessageReceive"
#define kEventOnError @"OnError"

#define kErrorEventOnMaximumConcurrentVideoReached @"OnMaximumConcurrentVideoReached"
#define sVideoAtCapacityViewOnly 206

@interface MeetingObservers : NSObject <RealtimeObserver, VideoTileObserver, AudioVideoObserver, DataMessageObserver>
- (id)initWithBridge:(NativeMobileSDKBridge *) bridge logger:(ConsoleLogger * )logger;
@end

MeetingObservers.m

//
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: MIT-0
//

#import <Foundation/Foundation.h>
#import "MeetingObservers.h"
#import "NativeMobileSDKBridge.h"

@implementation MeetingObservers
{
  NativeMobileSDKBridge* _bridge;
  ConsoleLogger * _logger;
}

- (id)initWithBridge: (NativeMobileSDKBridge *) bridge logger:(ConsoleLogger * )logger
{
  _bridge = bridge;
  _logger = logger;
  return self;
}

- (void)attendeesDidJoinWithAttendeeInfo:(NSArray<AttendeeInfo *> * _Nonnull)attendeeInfo
{
  for (id currentAttendeeInfo in attendeeInfo) 
  {
    [_bridge sendEventWithName:kEventOnAttendeesJoin body:@{@"attendeeId":[currentAttendeeInfo attendeeId], @"externalUserId":[currentAttendeeInfo externalUserId]}];
    [_logger infoWithMsg:[NSString stringWithFormat:@"Attendee %@ join", [currentAttendeeInfo externalUserId]]];
  }
}

- (void)attendeesDidLeaveWithAttendeeInfo:(NSArray<AttendeeInfo *> * _Nonnull)attendeeInfo
{
  for (id currentAttendeeInfo in attendeeInfo) 
  {
    [_bridge sendEventWithName:kEventOnAttendeesLeave body:@{@"attendeeId":[currentAttendeeInfo attendeeId], @"externalUserId":[currentAttendeeInfo externalUserId]}];
    [_logger infoWithMsg:[NSString stringWithFormat:@"AttendeeQuit(leave) : %@ ", [currentAttendeeInfo externalUserId]]];
  }
}

- (void)attendeesDidMuteWithAttendeeInfo:(NSArray<AttendeeInfo *> * _Nonnull)attendeeInfo
{
  for (id currentAttendeeInfo in attendeeInfo) 
  {
    [_bridge sendEventWithName:kEventOnAttendeesMute body:[currentAttendeeInfo attendeeId]];
    [_logger infoWithMsg:[NSString stringWithFormat:@"Attendee %@ mute", [currentAttendeeInfo externalUserId]]];
  }
}

- (void)attendeesDidUnmuteWithAttendeeInfo:(NSArray<AttendeeInfo *> * _Nonnull)attendeeInfo
{
  for (id currentAttendeeInfo in attendeeInfo)
  {
    [_bridge sendEventWithName:kEventOnAttendeesUnmute body:[currentAttendeeInfo attendeeId]];
    [_logger infoWithMsg:[NSString stringWithFormat:@"Attendee %@ unmute", [currentAttendeeInfo externalUserId]]];
  }
}

- (void)signalStrengthDidChangeWithSignalUpdates:(NSArray<SignalUpdate *> * _Nonnull)signalUpdates
{
  for (id currentSignalUpdate in signalUpdates)
  {
    [_logger infoWithMsg:[NSString stringWithFormat:@"Attendee %@ signalStrength changed to %lu", [[currentSignalUpdate attendeeInfo] attendeeId], [currentSignalUpdate signalStrength]]];
  }
}

- (void)volumeDidChangeWithVolumeUpdates:(NSArray<VolumeUpdate *> * _Nonnull)volumeUpdates
{
  for (id currentVolumeUpdate in volumeUpdates)
  {
    [_logger infoWithMsg:[NSString stringWithFormat:@"Attendee %@ volumeLevel changed to %ld", [[currentVolumeUpdate attendeeInfo] attendeeId], [currentVolumeUpdate volumeLevel]]];
  }
}

- (void)attendeesDidDropWithAttendeeInfo:(NSArray<AttendeeInfo *> * _Nonnull)attendeeInfo
{
  for (id currentAttendeeInfo in attendeeInfo) 
  {
    [_bridge sendEventWithName:kEventOnAttendeesLeave body:@{@"attendeeId":[currentAttendeeInfo attendeeId], @"externalUserId":[currentAttendeeInfo externalUserId]}];
    [_logger infoWithMsg:[NSString stringWithFormat:@"AttendeeQuit(drop) : %@ ", [currentAttendeeInfo externalUserId]]];
  }
}

- (void)videoTileDidAddWithTileState:(VideoTileState * _Nonnull)tileState
{
  [_bridge sendEventWithName:kEventOnAddVideoTile body:@{@"tileId":[NSNumber numberWithInt: (int)tileState.tileId], @"isLocal":@(tileState.isLocalTile), @"isScreenShare":@(tileState.isContent), @"attendeeId":tileState.attendeeId, @"pauseState":[NSNumber numberWithInt: (int)tileState.pauseState], @"videoStreamContentHeight":[NSNumber numberWithInt: (int)tileState.videoStreamContentHeight], @"videoStreamContentWidth":[NSNumber numberWithInt: (int)tileState.videoStreamContentWidth]}];
}

- (void)videoTileDidPauseWithTileState:(VideoTileState * _Nonnull)tileState
{
  // Not implemented for demo purposes
}

- (void)videoTileDidRemoveWithTileState:(VideoTileState * _Nonnull)tileState
{
   [_bridge sendEventWithName:kEventOnRemoveVideoTile body:@{@"tileId":[NSNumber numberWithInt: (int)tileState.tileId], @"isLocal":@(tileState.isLocalTile), @"isScreenShare":@(tileState.isContent)}];
}

- (void)videoTileDidResumeWithTileState:(VideoTileState * _Nonnull)tileState
{
  // Not implemented for demo purposes
}

- (void)videoTileSizeDidChangeWithTileState:(VideoTileState * _Nonnull)tileState {
  // Not implemented for demo purposes
}


- (void)audioSessionDidCancelReconnect
{
  // Not implemented for demo purposes
}

- (void)audioSessionDidStartConnectingWithReconnecting:(BOOL)reconnecting
{
  // Not implemented for demo purposes
}

- (void)audioSessionDidStartWithReconnecting:(BOOL)reconnecting
{
  if (!reconnecting)
  {
    [_logger infoWithMsg:@"Meeting Started!"];
    [_bridge sendEventWithName:kEventOnMeetingStart body:nil];
  }
}

- (void)audioSessionDidStopWithStatusWithSessionStatus:(MeetingSessionStatus * _Nonnull)sessionStatus
{
  // Not implemented for demo purposes
}

- (void)connectionDidBecomePoor
{
  // Not implemented for demo purposes
}

- (void)connectionDidRecover
{
  // Not implemented for demo purposes
}

- (void)videoSessionDidStartConnecting
{
  // Not implemented for demo purposes
}

- (void)videoSessionDidStartWithStatusWithSessionStatus:(MeetingSessionStatus * _Nonnull)sessionStatus
{
  if (sessionStatus.statusCode == sVideoAtCapacityViewOnly)
  {
    [_bridge sendEventWithName:kEventOnError body:kErrorEventOnMaximumConcurrentVideoReached];
  }
}

- (void)videoSessionDidStopWithStatusWithSessionStatus:(MeetingSessionStatus * _Nonnull)sessionStatus
{
  // Not implemented for demo purposes
}

- (void)audioSessionDidDrop
{
  // Not implemented for demo purposes
}

- (void)remoteVideoSourcesDidBecomeAvailableWithSources:(NSArray<RemoteVideoSource *> * _Nonnull)sources {
  // Not implemented for demo purposes
}

- (void)remoteVideoSourcesDidBecomeUnavailableWithSources:(NSArray<RemoteVideoSource *> * _Nonnull)sources {
  // Not implemented for demo purposes
}

- (void)dataMessageDidReceivedWithDataMessage:(DataMessage *)dataMessage {
  [_bridge sendEventWithName:kEventOnDataMessageReceive body:@{
    @"data":[dataMessage text],
    @"topic":[dataMessage topic],
    @"senderAttendeeId":[dataMessage senderAttendeeId],
    @"senderExternalUserId":[dataMessage senderExternalUserId],
    @"throttled":@(dataMessage.throttled),
    @"timestampMs":@(dataMessage.timestampMs)
  }];
}


@end

NativeMobileSDKBridge.h

#import <Foundation/Foundation.h>
#import <React/RCTBridgeModule.h>
#import <React/RCTEventEmitter.h>
#import <AmazonChimeSDK/AmazonChimeSDK-Swift.h>

#define kMeetingId @"MeetingId"
#define kExternalMeetingId @"ExternalMeetingId"
#define kMediaRegion @"MediaRegion"

#define kAttendeeId @"AttendeeId"
#define kExternalUserId @"ExternalUserId"
#define kJoinToken @"JoinToken"

#define kMediaPlacement @"MediaPlacement"
#define kAudioFallbackUrl @"AudioFallbackUrl"
#define kAudioHostUrl @"AudioHostUrl"
#define kTurnControlUrl @"TurnControlUrl"
#define kSignalingUrl @"SignalingUrl"

@interface NativeMobileSDKBridge : RCTEventEmitter <RCTBridgeModule>
@end

NativeMobileSDKBridge.m

#import "NativeMobileSDKBridge.h"
#import <AVFoundation/AVFoundation.h>
#import "RNVideoViewManager.h"
#import <AmazonChimeSDKMedia/AmazonChimeSDKMedia.h>
#import "MeetingObservers.h"
#import <React/RCTUIManager.h>

@implementation NativeMobileSDKBridge

static DefaultMeetingSession *meetingSession;
static ConsoleLogger *logger;

RCT_EXPORT_MODULE();

- (NSArray<NSString *> *)supportedEvents
{
  return
  @[
    kEventOnMeetingStart,
    kEventOnMeetingEnd,
    kEventOnAttendeesJoin,
    kEventOnAttendeesLeave,
    kEventOnAttendeesMute,
    kEventOnAttendeesUnmute,
    kEventOnAddVideoTile,
    kEventOnRemoveVideoTile,
    kEventOnDataMessageReceive,
    kEventOnError
  ];
}

# pragma mark: Native Function
RCT_EXPORT_METHOD(startMeeting:(NSDictionary *)meetingInfoDict attendeeInfo:(NSDictionary *)attendeeInfoDict)
{
  if (meetingSession != nil)
  {
    [meetingSession.audioVideo stop];
    meetingSession = nil;
  }

  logger = [[ConsoleLogger alloc] initWithName:@"NativeMobileSDKBridge" level:LogLevelDEFAULT];
  [logger infoWithMsg: [[NSString alloc] initWithFormat:@"Running Amazon Chime SDK (%@)", Versioning.sdkVersion]];
  // Parse meeting join data from payload
  NSDictionary *mediaPlacementDict = [meetingInfoDict objectForKey:kMediaPlacement];

  // Parse meeting info
  NSString *meetingId = [meetingInfoDict objectForKey:kMeetingId];
  NSString *externalMeetingId = [meetingInfoDict objectForKey:kExternalMeetingId];
  NSString *meetingRegion = [meetingInfoDict objectForKey:kMediaRegion];

  // Parse meeting join info
  NSString *audioFallbackUrl = [mediaPlacementDict objectForKey:kAudioFallbackUrl];
  NSString *audioHostUrl = [mediaPlacementDict objectForKey:kAudioHostUrl];
  NSString *turnControlUrl = [mediaPlacementDict objectForKey:kTurnControlUrl];
  NSString *signalingUrl = [mediaPlacementDict objectForKey:kSignalingUrl];

  // Parse attendee info
  NSString *attendeeId = [attendeeInfoDict objectForKey:kAttendeeId];
  NSString *externalUserId = [attendeeInfoDict objectForKey:kExternalUserId];
  NSString *joinToken = [attendeeInfoDict objectForKey:kJoinToken];

  // Initialize meeting session through AmazonChimeSDK
  MediaPlacement *mediaPlacement = [[MediaPlacement alloc] initWithAudioFallbackUrl:audioFallbackUrl
                                                                       audioHostUrl:audioHostUrl
                                                                       signalingUrl:signalingUrl
                                                                     turnControlUrl:turnControlUrl];

  Meeting *meeting = [[Meeting alloc] initWithExternalMeetingId:externalMeetingId
                                                 mediaPlacement:mediaPlacement
                                                    mediaRegion:meetingRegion
                                                      meetingId:meetingId];
  
  CreateMeetingResponse *createMeetingResponse = [[CreateMeetingResponse alloc] initWithMeeting:meeting];

  Attendee *attendee = [[Attendee alloc] initWithAttendeeId:attendeeId
                                             externalUserId:externalUserId joinToken:joinToken];

  CreateAttendeeResponse *createAttendeeResponse = [[CreateAttendeeResponse alloc] initWithAttendee:attendee];
  MeetingSessionConfiguration *meetingSessionConfiguration = [[MeetingSessionConfiguration alloc] initWithCreateMeetingResponse:createMeetingResponse
                                                                                                         createAttendeeResponse:createAttendeeResponse];

  meetingSession = [[DefaultMeetingSession alloc] initWithConfiguration:meetingSessionConfiguration
                                                                 logger:logger];
  [self startAudioClient];
}

RCT_EXPORT_METHOD(stopMeeting)
{
  [meetingSession.audioVideo stop];
  meetingSession = nil;
  [self sendEventWithName:kEventOnMeetingEnd body: nil];
}

RCT_EXPORT_METHOD(setMute:(BOOL)isMute)
{
  BOOL success = true;
  if (isMute)
  {
      success = [meetingSession.audioVideo realtimeLocalMute];
  }
  else
  {
      success = [meetingSession.audioVideo realtimeLocalUnmute];
  }

  if (!success)
  {
    [self sendEventWithName:kEventOnError body:@"Failed to set mute state"];
  }
}

RCT_EXPORT_METHOD(setCameraOn:(BOOL)isOn)
{
  if (isOn)
  {
    [self startVideo];
  }
  else
  {
    [meetingSession.audioVideo stopLocalVideo];
  }

}

RCT_EXPORT_METHOD(bindVideoView:(NSNumber * _Nonnull)viewIdentifier tileId:(NSNumber * _Nonnull)tileId)
{
  dispatch_async(dispatch_get_main_queue(), ^{
    UIView* view = [self.bridge.uiManager viewForReactTag:viewIdentifier];
    [meetingSession.audioVideo bindVideoViewWithVideoView:(DefaultVideoRenderView*)view tileId:[tileId integerValue]];
  });
}

RCT_EXPORT_METHOD(unbindVideoView:(NSNumber * _Nonnull)tileId)
{
  dispatch_async(dispatch_get_main_queue(), ^{
    [meetingSession.audioVideo unbindVideoViewWithTileId:[tileId integerValue]];
  });
}

#pragma mark: Media Related Function
-(void)startVideo
{
  AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
  switch (status)
  {
    case AVAuthorizationStatusNotDetermined:
    {
      [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted)
      {
          if (granted)
          { // Access has been granted ..retry starting video
            [self startVideo];
          } else { // Access denied
            [self sendEventWithName:kEventOnError body:@"User denied camera permission"];
          }
      }];
      break;
    }
    case AVAuthorizationStatusAuthorized:
    {
      NSError* error;
      [meetingSession.audioVideo startLocalVideoAndReturnError:&error];
      if(error != nil)
      {
        [self sendEventWithName:kEventOnError body:@"Fail to start local video"];
      }
      break;
    }
    case AVAuthorizationStatusDenied:
    {
      [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]];
      break;
    }
    default:
      break;
  }
}

-(void)startAudioClient
{
  if (meetingSession == nil)
  {
    [logger errorWithMsg:@"meetingSession is not initialized"];
    return;
  }
  MeetingObservers* observer = [[MeetingObservers alloc] initWithBridge:self logger:logger];
  [meetingSession.audioVideo addRealtimeObserverWithObserver:observer];
  [meetingSession.audioVideo addVideoTileObserverWithObserver:observer];
  [meetingSession.audioVideo addAudioVideoObserverWithObserver:observer];
  [meetingSession.audioVideo addRealtimeDataMessageObserverWithTopic:@"chat" observer:observer];
  [self startAudioVideo];
}

-(void)startAudioVideo
{
   NSError* error = nil;
   BOOL started = [meetingSession.audioVideo startAndReturnError:&error];
   if (started && error == nil)
   {
     [logger infoWithMsg:@"RN meeting session was started successfully"];

     [meetingSession.audioVideo startRemoteVideo];
   }
   else
   {
     NSString *errorMsg = [NSString stringWithFormat:@"Failed to start meeting, error: %@", error.description];
     [logger errorWithMsg:errorMsg];
     
     // Handle missing permission error
     if ([error.domain isEqual:@"AmazonChimeSDK.PermissionError"])
     {
       AVAudioSessionRecordPermission permissionStatus = [[AVAudioSession sharedInstance] recordPermission];
       if (permissionStatus == AVAudioSessionRecordPermissionUndetermined)
       {
         [[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted)
         {
           if (granted)
           {
             [logger infoWithMsg:@"Audio permission granted"];
             // Retry after permission is granted
             [self startAudioVideo];
           }
           else
           {
             [logger infoWithMsg:@"Audio permission not granted"];
             [self sendEventWithName:kEventOnMeetingEnd body:nil];
           }
         }];
       }
       else if (permissionStatus == AVAudioSessionRecordPermissionDenied)
       {
         [logger errorWithMsg:@"User did not grant permission, should redirect to Settings"];
         [self sendEventWithName:kEventOnMeetingEnd body:nil];
       }
     }
     else
     {
       // Uncaught error
       [self sendEventWithName:kEventOnError body: errorMsg];
       [self sendEventWithName:kEventOnMeetingEnd body:nil];
     }
   }
}

RCT_EXPORT_METHOD(sendDataMessage:(NSString* _Nonnull)topic data:(NSString* _Nonnull)data lifetimeMs:(int)lifetimeMs)
{
  if (meetingSession == nil) {
    return;
  }
  
  [meetingSession.audioVideo realtimeSendDataMessageWithTopic:topic data:data lifetimeMs:lifetimeMs error:nil];
}

@end

I am trying to integrate the functionality of screen sharing and recording meeting but I am not familiar with native programming

0

There are 0 best solutions below