diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index a0cf5b55d3..2ad8b84f85 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -385,7 +385,12 @@ if (is_ios || is_mac) { "objc/components/network/RTCNetworkMonitor.mm", ] - configs += [ ":used_from_extension" ] + configs += [ + "..:common_objc", + ":used_from_extension", + ] + + public_configs = [ ":common_config_objc" ] frameworks = [ "Network.framework" ] diff --git a/sdk/objc/api/RTCVideoRendererAdapter+Private.h b/sdk/objc/api/RTCVideoRendererAdapter+Private.h index 9b123d2d05..cac9ab665c 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter+Private.h +++ b/sdk/objc/api/RTCVideoRendererAdapter+Private.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCVideoRendererAdapter () +@interface RTC_OBJC_TYPE(RTCVideoRendererAdapter) () /** * The Objective-C video renderer passed to this adapter during construction. diff --git a/sdk/objc/api/RTCVideoRendererAdapter.h b/sdk/objc/api/RTCVideoRendererAdapter.h index b0b6f04488..bbb8c6e71c 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.h +++ b/sdk/objc/api/RTCVideoRendererAdapter.h @@ -10,6 +10,8 @@ #import +#import "RTCMacros.h" + NS_ASSUME_NONNULL_BEGIN /* @@ -18,7 +20,7 @@ NS_ASSUME_NONNULL_BEGIN * adapter adapts calls made to that interface to the RTCVideoRenderer supplied * during construction. */ -@interface RTCVideoRendererAdapter : NSObject +@interface RTC_OBJC_TYPE (RTCVideoRendererAdapter): NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/RTCVideoRendererAdapter.mm b/sdk/objc/api/RTCVideoRendererAdapter.mm index ef02f72f60..d992c64108 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.mm +++ b/sdk/objc/api/RTCVideoRendererAdapter.mm @@ -17,10 +17,9 @@ namespace webrtc { -class VideoRendererAdapter - : public rtc::VideoSinkInterface { +class VideoRendererAdapter : public rtc::VideoSinkInterface { public: - VideoRendererAdapter(RTCVideoRendererAdapter* adapter) { + VideoRendererAdapter(RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter) { adapter_ = adapter; size_ = CGSizeZero; } @@ -28,9 +27,9 @@ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame); - CGSize current_size = (videoFrame.rotation % 180 == 0) - ? CGSizeMake(videoFrame.width, videoFrame.height) - : CGSizeMake(videoFrame.height, videoFrame.width); + CGSize current_size = (videoFrame.rotation % 180 == 0) ? + CGSizeMake(videoFrame.width, videoFrame.height) : + CGSizeMake(videoFrame.height, videoFrame.width); if (!CGSizeEqualToSize(size_, current_size)) { size_ = current_size; @@ -40,12 +39,12 @@ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { } private: - __weak RTCVideoRendererAdapter *adapter_; + __weak RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter_; CGSize size_; }; -} +} // namespace webrtc -@implementation RTCVideoRendererAdapter { +@implementation RTC_OBJC_TYPE (RTCVideoRendererAdapter) { std::unique_ptr _adapter; } @@ -60,7 +59,7 @@ - (instancetype)initWithNativeRenderer:(id)vide return self; } -- (rtc::VideoSinkInterface *)nativeVideoRenderer { +- (rtc::VideoSinkInterface*)nativeVideoRenderer { return _adapter.get(); } diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h index 4eb91b93c7..73c1a4e26a 100644 --- a/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h @@ -21,7 +21,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCAudioDeviceModule () +@interface RTC_OBJC_TYPE(RTCAudioDeviceModule) () - (instancetype)initWithNativeModule:(rtc::scoped_refptr )module workerThread:(rtc::Thread *)workerThread; diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h index 1a9e339bd6..b02cecfd0b 100644 --- a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h @@ -39,8 +39,8 @@ RTC_OBJC_EXPORT // Executes low-level API's in sequence to switch the device // Use outputDevice / inputDevice property unless you need to know if setting the device is // successful. -- (BOOL)trySetOutputDevice:(nullable RTCIODevice *)device; -- (BOOL)trySetInputDevice:(nullable RTCIODevice *)device; +- (BOOL)trySetOutputDevice:(nullable RTC_OBJC_TYPE(RTCIODevice) *)device; +- (BOOL)trySetInputDevice:(nullable RTC_OBJC_TYPE(RTCIODevice) *)device; - (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler; diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm index 5c116fae53..c88de392d7 100644 --- a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm @@ -77,7 +77,7 @@ - (instancetype)initWithNativeModule:(rtc::scoped_refptrBlockingCall([self] { NSArray *devices = [self _outputDevices]; @@ -92,11 +92,11 @@ - (RTCIODevice *)outputDevice { }); } -- (void)setOutputDevice: (RTCIODevice *)device { +- (void)setOutputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { [self trySetOutputDevice: device]; } -- (BOOL)trySetOutputDevice: (RTCIODevice *)device { +- (BOOL)trySetOutputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { return _workerThread->BlockingCall([self, device] { @@ -108,7 +108,7 @@ - (BOOL)trySetOutputDevice: (RTCIODevice *)device { } if (device != nil) { - index = [devices indexOfObjectPassingTest:^BOOL(RTCIODevice *e, NSUInteger i, BOOL *stop) { + index = [devices indexOfObjectPassingTest:^BOOL(RTC_OBJC_TYPE(RTCIODevice) *e, NSUInteger i, BOOL *stop) { return (*stop = [e.deviceId isEqualToString:device.deviceId]); }]; if (index == NSNotFound) { @@ -129,7 +129,7 @@ - (BOOL)trySetOutputDevice: (RTCIODevice *)device { }); } -- (RTCIODevice *)inputDevice { +- (RTC_OBJC_TYPE(RTCIODevice) *)inputDevice { return _workerThread->BlockingCall([self] { @@ -145,11 +145,11 @@ - (RTCIODevice *)inputDevice { }); } -- (void)setInputDevice: (RTCIODevice *)device { +- (void)setInputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { [self trySetInputDevice: device]; } -- (BOOL)trySetInputDevice: (RTCIODevice *)device { +- (BOOL)trySetInputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { return _workerThread->BlockingCall([self, device] { @@ -161,7 +161,7 @@ - (BOOL)trySetInputDevice: (RTCIODevice *)device { } if (device != nil) { - index = [devices indexOfObjectPassingTest:^BOOL(RTCIODevice *e, NSUInteger i, BOOL *stop) { + index = [devices indexOfObjectPassingTest:^BOOL(RTC_OBJC_TYPE(RTCIODevice) *e, NSUInteger i, BOOL *stop) { return (*stop = [e.deviceId isEqualToString:device.deviceId]); }]; if (index == NSNotFound) { @@ -261,7 +261,7 @@ - (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler _native->PlayoutDeviceName(i, name, guid); NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; - RTCIODevice *device = [[RTCIODevice alloc] initWithType:RTCIODeviceTypeOutput deviceId:strGUID name:strName]; + RTC_OBJC_TYPE(RTCIODevice) *device = [[RTC_OBJC_TYPE(RTCIODevice) alloc] initWithType:RTCIODeviceTypeOutput deviceId:strGUID name:strName]; [result addObject: device]; } } @@ -283,7 +283,7 @@ - (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler _native->RecordingDeviceName(i, name, guid); NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; - RTCIODevice *device = [[RTCIODevice alloc] initWithType:RTCIODeviceTypeInput deviceId:strGUID name:strName]; + RTC_OBJC_TYPE(RTCIODevice) *device = [[RTC_OBJC_TYPE(RTCIODevice) alloc] initWithType:RTCIODeviceTypeInput deviceId:strGUID name:strName]; [result addObject: device]; } } diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 87deaf8aa8..40a6f5362c 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -28,12 +28,12 @@ class AudioSinkConverter : public rtc::RefCountInterface, public webrtc::AudioTrackSinkInterface { private: os_unfair_lock *lock_; - __weak RTCAudioTrack *audio_track_; + __weak RTC_OBJC_TYPE(RTCAudioTrack) *audio_track_; int64_t total_frames_ = 0; bool attached_ = false; public: - AudioSinkConverter(RTCAudioTrack *audioTrack, os_unfair_lock *lock) { + AudioSinkConverter(RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack, os_unfair_lock *lock) { RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter init"; audio_track_ = audioTrack; lock_ = lock; @@ -274,7 +274,7 @@ - (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer { NSArray *renderers = [_renderers allObjects]; os_unfair_lock_unlock(&_lock); - for (id renderer in renderers) { + for (id renderer in renderers) { [renderer renderSampleBuffer:sampleBuffer]; } } diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm index 7f8ae739e0..3e9c768a6f 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm @@ -34,16 +34,16 @@ explicit ObjCEncodedImageBuffer(NSData *data) : data_(data) {} NSData *data_; }; -} +} // namespace // A simple wrapper around webrtc::EncodedImageBufferInterface to make it usable with associated // objects. -@interface RTCWrappedEncodedImageBuffer : NSObject +@interface RTC_OBJC_TYPE (RTCWrappedEncodedImageBuffer): NSObject @property(nonatomic) rtc::scoped_refptr buffer; - (instancetype)initWithEncodedImageBuffer: (rtc::scoped_refptr)buffer; @end -@implementation RTCWrappedEncodedImageBuffer +@implementation RTC_OBJC_TYPE (RTCWrappedEncodedImageBuffer) @synthesize buffer = _buffer; - (instancetype)initWithEncodedImageBuffer: (rtc::scoped_refptr)buffer { @@ -59,7 +59,7 @@ @implementation RTC_OBJC_TYPE (RTCEncodedImage) (Private) - (rtc::scoped_refptr)encodedData { - RTCWrappedEncodedImageBuffer *wrappedBuffer = + RTC_OBJC_TYPE(RTCWrappedEncodedImageBuffer) *wrappedBuffer = objc_getAssociatedObject(self, @selector(encodedData)); return wrappedBuffer.buffer; } @@ -68,7 +68,7 @@ - (void)setEncodedData:(rtc::scoped_refptr) return objc_setAssociatedObject( self, @selector(encodedData), - [[RTCWrappedEncodedImageBuffer alloc] initWithEncodedImageBuffer:buffer], + [[RTC_OBJC_TYPE(RTCWrappedEncodedImageBuffer) alloc] initWithEncodedImageBuffer:buffer], OBJC_ASSOCIATION_RETAIN_NONATOMIC); } diff --git a/sdk/objc/api/peerconnection/RTCIODevice+Private.h b/sdk/objc/api/peerconnection/RTCIODevice+Private.h index 0eb09b83a4..e736c993e1 100644 --- a/sdk/objc/api/peerconnection/RTCIODevice+Private.h +++ b/sdk/objc/api/peerconnection/RTCIODevice+Private.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCIODevice () +@interface RTC_OBJC_TYPE(RTCIODevice) () - (instancetype)initWithType:(RTCIODeviceType)type deviceId:(NSString *)deviceId diff --git a/sdk/objc/api/peerconnection/RTCIODevice.mm b/sdk/objc/api/peerconnection/RTCIODevice.mm index 27e1255e8e..b3738f71fe 100644 --- a/sdk/objc/api/peerconnection/RTCIODevice.mm +++ b/sdk/objc/api/peerconnection/RTCIODevice.mm @@ -19,7 +19,7 @@ NSString *const kDefaultDeviceId = @"default"; -@implementation RTCIODevice +@implementation RTC_OBJC_TYPE(RTCIODevice) @synthesize type = _type; @synthesize deviceId = _deviceId; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index 439490d370..5c82750d20 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -66,7 +66,7 @@ RTC_OBJC_EXPORT audioProcessingModule: (nullable id)audioProcessingModule; -@property(nonatomic, readonly) RTCAudioDeviceModule *audioDeviceModule; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioDeviceModule) *audioDeviceModule; - (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 5cb4f38c05..7b0557ba25 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -65,7 +65,7 @@ @implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { std::unique_ptr _workerThread; std::unique_ptr _signalingThread; rtc::scoped_refptr _nativeAudioDeviceModule; - RTCDefaultAudioProcessingModule *_defaultAudioProcessingModule; + RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) *_defaultAudioProcessingModule; BOOL _hasStartedAecDump; } @@ -133,16 +133,16 @@ - (instancetype)init { - (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType { - webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTCRtpReceiver nativeMediaTypeForMediaType: mediaType]); + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); - return [[RTCRtpCapabilities alloc] initWithNativeCapabilities: capabilities]; + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeCapabilities: capabilities]; } - (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTCRtpMediaType)mediaType { - webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTCRtpReceiver nativeMediaTypeForMediaType: mediaType]); + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); - return [[RTCRtpCapabilities alloc] initWithNativeCapabilities: capabilities]; + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeCapabilities: capabilities]; } - (instancetype) @@ -164,10 +164,10 @@ - (instancetype)init { } rtc::scoped_refptr audio_device_module = [self createAudioDeviceModule:bypassVoiceProcessing]; - if ([audioProcessingModule isKindOfClass:[RTCDefaultAudioProcessingModule class]]) { - _defaultAudioProcessingModule = (RTCDefaultAudioProcessingModule *)audioProcessingModule; + if ([audioProcessingModule isKindOfClass:[RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) class]]) { + _defaultAudioProcessingModule = (RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) *)audioProcessingModule; } else { - _defaultAudioProcessingModule = [[RTCDefaultAudioProcessingModule alloc] init]; + _defaultAudioProcessingModule = [[RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) alloc] init]; } NSLog(@"AudioProcessingModule: %@", _defaultAudioProcessingModule); @@ -273,8 +273,9 @@ - (instancetype)initWithNativeAudioEncoderFactory: bypassVoiceProcessing == YES); }); - _audioDeviceModule = [[RTCAudioDeviceModule alloc] initWithNativeModule: _nativeAudioDeviceModule - workerThread: _workerThread.get()]; + _audioDeviceModule = + [[RTC_OBJC_TYPE(RTCAudioDeviceModule) alloc] initWithNativeModule:_nativeAudioDeviceModule + workerThread:_workerThread.get()]; media_deps.adm = _nativeAudioDeviceModule; media_deps.task_queue_factory = dependencies.task_queue_factory.get(); diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h index 070a0e74a5..4d7025bf93 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h @@ -12,9 +12,9 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryBuilder (DefaultComponents) +@interface RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) (DefaultComponents) -+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)defaultBuilder; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm index 522e520e12..a2f633e1a4 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm @@ -22,10 +22,10 @@ #import "sdk/objc/native/api/audio_device_module.h" #endif -@implementation RTCPeerConnectionFactoryBuilder (DefaultComponents) +@implementation RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) (DefaultComponents) -+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder { - RTCPeerConnectionFactoryBuilder *builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)defaultBuilder { + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *builder = [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; auto audioEncoderFactory = webrtc::CreateBuiltinAudioEncoderFactory(); [builder setAudioEncoderFactory:audioEncoderFactory]; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h index f0b0de156a..a46839b6b3 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h @@ -25,9 +25,9 @@ class AudioProcessing; NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryBuilder : NSObject +@interface RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) : NSObject -+ (RTCPeerConnectionFactoryBuilder *)builder; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)builder; - (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm index 0981fb3879..4cb12b0a59 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm @@ -18,7 +18,7 @@ #include "modules/audio_device/include/audio_device.h" #include "modules/audio_processing/include/audio_processing.h" -@implementation RTCPeerConnectionFactoryBuilder { +@implementation RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) { std::unique_ptr _videoEncoderFactory; std::unique_ptr _videoDecoderFactory; rtc::scoped_refptr _audioEncoderFactory; @@ -27,8 +27,8 @@ @implementation RTCPeerConnectionFactoryBuilder { rtc::scoped_refptr _audioProcessingModule; } -+ (RTCPeerConnectionFactoryBuilder *)builder { - return [[RTCPeerConnectionFactoryBuilder alloc] init]; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)builder { + return [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; } - (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory { diff --git a/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm b/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm index 32664e7c9f..5d5abc1511 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm @@ -39,8 +39,8 @@ - (instancetype)initWithNativeCapabilities:(const webrtc::RtpCapabilities &)nati NSMutableArray *result = [NSMutableArray array]; for (auto &element : _nativeCapabilities.codecs) { - RTCRtpCodecCapability *object = - [[RTCRtpCodecCapability alloc] initWithNativeCodecCapability:element]; + RTC_OBJC_TYPE(RTCRtpCodecCapability) *object = + [[RTC_OBJC_TYPE(RTCRtpCodecCapability) alloc] initWithNativeCodecCapability:element]; [result addObject:object]; } diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm index f310bf6829..35d21054b0 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm @@ -54,11 +54,11 @@ - (void)setName:(NSString *)name { } - (RTCRtpMediaType)kind { - return [RTCRtpReceiver mediaTypeForNativeMediaType:_nativeCodecCapability.kind]; + return [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:_nativeCodecCapability.kind]; } - (void)setKind:(RTCRtpMediaType)kind { - _nativeCodecCapability.kind = [RTCRtpReceiver nativeMediaTypeForMediaType:kind]; + _nativeCodecCapability.kind = [RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType:kind]; } - (NSNumber *)clockRate { diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index acb1b8032a..b7cc37c2f8 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -71,7 +71,7 @@ - (void)setCodecPreferences:(NSArray *)c std::vector objects; - for (RTCRtpCodecCapability *object in codecPreferences) { + for (RTC_OBJC_TYPE(RTCRtpCodecCapability) *object in codecPreferences) { objects.push_back(object.nativeCodecCapability); } @@ -90,7 +90,7 @@ - (void)setCodecPreferences:(NSArray *)c std::vector capabilities = _nativeRtpTransceiver->codec_preferences(); for (auto & element : capabilities) { - RTCRtpCodecCapability *object = [[RTCRtpCodecCapability alloc] initWithNativeCodecCapability: element]; + RTC_OBJC_TYPE(RTCRtpCodecCapability) *object = [[RTC_OBJC_TYPE(RTCRtpCodecCapability) alloc] initWithNativeCodecCapability: element]; [result addObject: object]; } diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index df294d2f3e..546ec80a61 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -53,7 +53,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto } - (void)dealloc { - for (RTCVideoRendererAdapter *adapter in _adapters) { + for (RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter in _adapters) { self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer); } } @@ -85,18 +85,17 @@ - (void)addRenderer:(id)renderer { } // Make sure we don't have this renderer yet. - for (RTCVideoRendererAdapter *adapter in _adapters) { + for (RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter in _adapters) { if (adapter.videoRenderer == renderer) { RTC_LOG(LS_INFO) << "|renderer| is already attached to this track"; return; } } // Create a wrapper that provides a native pointer for us. - RTCVideoRendererAdapter* adapter = - [[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer]; + RTC_OBJC_TYPE(RTCVideoRendererAdapter) *adapter = + [[RTC_OBJC_TYPE(RTCVideoRendererAdapter) alloc] initWithNativeRenderer:renderer]; [_adapters addObject:adapter]; - self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer, - rtc::VideoSinkWants()); + self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer, rtc::VideoSinkWants()); } - (void)removeRenderer:(id)renderer { @@ -105,9 +104,8 @@ - (void)removeRenderer:(id)renderer { return; } __block NSUInteger indexToRemove = NSNotFound; - [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter, - NSUInteger idx, - BOOL *stop) { + [_adapters enumerateObjectsUsingBlock:^( + RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter, NSUInteger idx, BOOL * stop) { if (adapter.videoRenderer == renderer) { indexToRemove = idx; *stop = YES; @@ -117,8 +115,7 @@ - (void)removeRenderer:(id)renderer { RTC_LOG(LS_INFO) << "removeRenderer called with a renderer that has not been previously added"; return; } - RTCVideoRendererAdapter *adapterToRemove = - [_adapters objectAtIndex:indexToRemove]; + RTC_OBJC_TYPE(RTCVideoRendererAdapter) *adapterToRemove = [_adapters objectAtIndex:indexToRemove]; self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer); [_adapters removeObjectAtIndex:indexToRemove]; } diff --git a/sdk/objc/base/RTCAudioRenderer.h b/sdk/objc/base/RTCAudioRenderer.h index def20eac3c..3669831fca 100644 --- a/sdk/objc/base/RTCAudioRenderer.h +++ b/sdk/objc/base/RTCAudioRenderer.h @@ -23,11 +23,9 @@ NS_ASSUME_NONNULL_BEGIN -RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE -(RTCAudioRenderer) +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RTCAudioRenderer) - - (void)renderSampleBuffer : (CMSampleBufferRef)sampleBuffer - NS_SWIFT_NAME(render(sampleBuffer:)); +- (void)renderSampleBuffer: (CMSampleBufferRef)sampleBuffer NS_SWIFT_NAME(render(sampleBuffer:)); @end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h index a9dc3d8400..9995b58abb 100644 --- a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h @@ -22,21 +22,21 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCAudioCustomProcessingAdapter () +@interface RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) () // Thread safe set/get with os_unfair_lock. -@property(nonatomic, weak, nullable) id +@property(nonatomic, weak, nullable) id audioCustomProcessingDelegate; // Direct read access without lock. -@property(nonatomic, readonly, weak, nullable) id +@property(nonatomic, readonly, weak, nullable) id rawAudioCustomProcessingDelegate; @property(nonatomic, readonly) std::unique_ptr nativeAudioCustomProcessingModule; - (instancetype)initWithDelegate: - (nullable id)audioCustomProcessingDelegate; + (nullable id)audioCustomProcessingDelegate; @end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h index 24239eac2d..3230c19323 100644 --- a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h @@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCAudioCustomProcessingAdapter : NSObject +@interface RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm index c8d1dfe4f6..c0f297c786 100644 --- a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm @@ -31,7 +31,7 @@ int sample_rate_hz_; int num_channels_; - AudioCustomProcessingAdapter(RTCAudioCustomProcessingAdapter *adapter, os_unfair_lock *lock) { + AudioCustomProcessingAdapter(RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *adapter, os_unfair_lock *lock) { RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter init"; adapter_ = adapter; @@ -45,14 +45,14 @@ RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter dealloc"; os_unfair_lock_lock(lock_); - id delegate = adapter_.rawAudioCustomProcessingDelegate; + id delegate = adapter_.rawAudioCustomProcessingDelegate; [delegate audioProcessingRelease]; os_unfair_lock_unlock(lock_); } void Initialize(int sample_rate_hz, int num_channels) override { os_unfair_lock_lock(lock_); - id delegate = adapter_.rawAudioCustomProcessingDelegate; + id delegate = adapter_.rawAudioCustomProcessingDelegate; [delegate audioProcessingInitializeWithSampleRate:sample_rate_hz channels:num_channels]; is_initialized_ = true; sample_rate_hz_ = sample_rate_hz; @@ -68,9 +68,9 @@ void Process(AudioBuffer *audio_buffer) override { return; } - id delegate = adapter_.rawAudioCustomProcessingDelegate; + id delegate = adapter_.rawAudioCustomProcessingDelegate; if (delegate != nil) { - RTCAudioBuffer *audioBuffer = [[RTCAudioBuffer alloc] initWithNativeType:audio_buffer]; + RTC_OBJC_TYPE(RTCAudioBuffer) *audioBuffer = [[RTC_OBJC_TYPE(RTCAudioBuffer) alloc] initWithNativeType:audio_buffer]; [delegate audioProcessingProcess:audioBuffer]; } os_unfair_lock_unlock(lock_); @@ -79,12 +79,12 @@ void Process(AudioBuffer *audio_buffer) override { std::string ToString() const override { return "AudioCustomProcessingAdapter"; } private: - __weak RTCAudioCustomProcessingAdapter *adapter_; + __weak RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *adapter_; os_unfair_lock *lock_; }; } // namespace webrtc -@implementation RTCAudioCustomProcessingAdapter { +@implementation RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) { webrtc::AudioCustomProcessingAdapter *_adapter; os_unfair_lock _lock; } @@ -109,14 +109,14 @@ - (void)dealloc { #pragma mark - Getter & Setter for audioCustomProcessingDelegate -- (nullable id)audioCustomProcessingDelegate { +- (nullable id)audioCustomProcessingDelegate { os_unfair_lock_lock(&_lock); - id delegate = _rawAudioCustomProcessingDelegate; + id delegate = _rawAudioCustomProcessingDelegate; os_unfair_lock_unlock(&_lock); return delegate; } -- (void)setAudioCustomProcessingDelegate:(nullable id)delegate { +- (void)setAudioCustomProcessingDelegate:(nullable id)delegate { os_unfair_lock_lock(&_lock); if (_rawAudioCustomProcessingDelegate != nil && _adapter->is_initialized_) { [_rawAudioCustomProcessingDelegate audioProcessingRelease]; diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h index 6a2fec9433..a8e4981fbc 100644 --- a/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h @@ -36,7 +36,7 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate) -- (void)applyConfig: (RTCAudioProcessingConfig *)config; +- (void)applyConfig: (RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config; // TODO: Implement... diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h index 917d584d48..2047b3f797 100644 --- a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h @@ -22,18 +22,17 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCAudioProcessingConfig); -@protocol RTC_OBJC_TYPE -(RTCAudioCustomProcessingDelegate); +@protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate); RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) : NSObject -- (instancetype)initWithConfig: (nullable RTCAudioProcessingConfig *)config +- (instancetype)initWithConfig: (nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config capturePostProcessingDelegate: (nullable id)capturePostProcessingDelegate renderPreProcessingDelegate: (nullable id)renderPreProcessingDelegate NS_SWIFT_NAME(init(config:capturePostProcessingDelegate:renderPreProcessingDelegate:)) NS_DESIGNATED_INITIALIZER; -- (void)applyConfig:(RTCAudioProcessingConfig *)config; +- (void)applyConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config; // Dynamically update delegates at runtime diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm index 3875dec533..2f592cefa4 100644 --- a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm @@ -23,8 +23,8 @@ @implementation RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) { rtc::scoped_refptr _nativeAudioProcessingModule; // Custom processing adapters... - RTCAudioCustomProcessingAdapter *_capturePostProcessingAdapter; - RTCAudioCustomProcessingAdapter *_renderPreProcessingAdapter; + RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *_capturePostProcessingAdapter; + RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *_renderPreProcessingAdapter; } - (instancetype)init { @@ -33,7 +33,7 @@ - (instancetype)init { renderPreProcessingDelegate:nil]; } -- (instancetype)initWithConfig:(nullable RTCAudioProcessingConfig *)config +- (instancetype)initWithConfig:(nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config capturePostProcessingDelegate: (nullable id)capturePostProcessingDelegate renderPreProcessingDelegate:(nullable id) @@ -48,12 +48,12 @@ - (instancetype)initWithConfig:(nullable RTCAudioProcessingConfig *)config } _capturePostProcessingAdapter = - [[RTCAudioCustomProcessingAdapter alloc] initWithDelegate:capturePostProcessingDelegate]; + [[RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) alloc] initWithDelegate:capturePostProcessingDelegate]; builder.SetCapturePostProcessing( _capturePostProcessingAdapter.nativeAudioCustomProcessingModule); _renderPreProcessingAdapter = - [[RTCAudioCustomProcessingAdapter alloc] initWithDelegate:renderPreProcessingDelegate]; + [[RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) alloc] initWithDelegate:renderPreProcessingDelegate]; builder.SetRenderPreProcessing(_renderPreProcessingAdapter.nativeAudioCustomProcessingModule); _nativeAudioProcessingModule = builder.Create(); @@ -83,7 +83,7 @@ - (void)setRenderPreProcessingDelegate: #pragma mark - RTCAudioProcessingModule protocol -- (void)applyConfig:(RTCAudioProcessingConfig *)config { +- (void)applyConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config { _nativeAudioProcessingModule->ApplyConfig(config.nativeAudioProcessingConfig); } diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h index 6a75f01479..1799c11415 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h @@ -19,7 +19,7 @@ class AudioSessionObserver; /** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate * methods on the AudioSessionObserver. */ -@interface RTCNativeAudioSessionDelegateAdapter : NSObject +@interface RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm index daddf314a4..f652ad1e5f 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm @@ -14,7 +14,7 @@ #import "base/RTCLogging.h" -@implementation RTCNativeAudioSessionDelegateAdapter { +@implementation RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) { webrtc::AudioSessionObserver *_observer; } diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h index 7e293a1a56..30aed69d29 100644 --- a/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h +++ b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h @@ -30,7 +30,7 @@ RTC_OBJC_EXPORT -(void)didSourceCaptureError; @end -@interface RTCDesktopCapturer () +@interface RTC_OBJC_TYPE(RTCDesktopCapturer) () @property(nonatomic, readonly)std::shared_ptr nativeCapturer; diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.h b/sdk/objc/components/capturer/RTCDesktopCapturer.h index 160c00d208..b63912acf0 100644 --- a/sdk/objc/components/capturer/RTCDesktopCapturer.h +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.h @@ -23,18 +23,18 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCDesktopCapturer; +@class RTC_OBJC_TYPE(RTCDesktopCapturer); RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCDesktopCapturerDelegate) --(void)didSourceCaptureStart:(RTCDesktopCapturer *) capturer; +-(void)didSourceCaptureStart:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; --(void)didSourceCapturePaused:(RTCDesktopCapturer *) capturer; +-(void)didSourceCapturePaused:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; --(void)didSourceCaptureStop:(RTCDesktopCapturer *) capturer; +-(void)didSourceCaptureStop:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; --(void)didSourceCaptureError:(RTCDesktopCapturer *) capturer; +-(void)didSourceCaptureError:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; @end RTC_OBJC_EXPORT @@ -42,9 +42,9 @@ RTC_OBJC_EXPORT // RTCVideoCapturerDelegate (usually RTCVideoSource). @interface RTC_OBJC_TYPE (RTCDesktopCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer) -@property(nonatomic, readonly) RTCDesktopSource *source; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDesktopSource) *source; -- (instancetype)initWithSource:(RTCDesktopSource*)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; +- (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; - (instancetype)initWithDefaultScreen:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.mm b/sdk/objc/components/capturer/RTCDesktopCapturer.mm index a7d5c60eb2..a1948684d3 100644 --- a/sdk/objc/components/capturer/RTCDesktopCapturer.mm +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.mm @@ -32,7 +32,7 @@ @implementation RTC_OBJC_TYPE (RTCDesktopCapturer) { @synthesize nativeCapturer = _nativeCapturer; @synthesize source = _source; -- (instancetype)initWithSource:(RTCDesktopSource*)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { +- (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { if (self = [super initWithDelegate:captureDelegate]) { webrtc::DesktopType captureType = webrtc::kScreen; if(source.sourceType == RTCDesktopSourceTypeWindow) { diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h index fc3b080ad2..eb1e76ddbb 100644 --- a/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h +++ b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h @@ -23,7 +23,7 @@ namespace webrtc { NS_ASSUME_NONNULL_BEGIN -@interface RTCDesktopMediaList () +@interface RTC_OBJC_TYPE(RTCDesktopMediaList) () @property(nonatomic, readonly)std::shared_ptr nativeMediaList; diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.mm b/sdk/objc/components/capturer/RTCDesktopMediaList.mm index 7aa7dca1be..2bd6c1da0e 100644 --- a/sdk/objc/components/capturer/RTCDesktopMediaList.mm +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.mm @@ -19,9 +19,9 @@ #import "RTCDesktopSource+Private.h" #import "RTCDesktopMediaList+Private.h" -@implementation RTCDesktopMediaList { +@implementation RTC_OBJC_TYPE(RTCDesktopMediaList) { RTCDesktopSourceType _sourceType; - NSMutableArray* _sources; + NSMutableArray* _sources; __weak id _delegate; } @@ -45,24 +45,24 @@ - (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateT return _nativeMediaList->UpdateSourceList(forceReload, updateThumbnail); } --(NSArray*) getSources { +-(NSArray*) getSources { _sources = [NSMutableArray array]; int sourceCount = _nativeMediaList->GetSourceCount(); for (int i = 0; i < sourceCount; i++) { webrtc::MediaSource *mediaSource = _nativeMediaList->GetSource(i); - [_sources addObject:[[RTCDesktopSource alloc] initWithNativeSource:mediaSource sourceType:_sourceType]]; + [_sources addObject:[[RTC_OBJC_TYPE(RTCDesktopSource) alloc] initWithNativeSource:mediaSource sourceType:_sourceType]]; } return _sources; } -(void)mediaSourceAdded:(webrtc::MediaSource *) source { - RTCDesktopSource *desktopSource = [[RTCDesktopSource alloc] initWithNativeSource:source sourceType:_sourceType]; + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [[RTC_OBJC_TYPE(RTCDesktopSource) alloc] initWithNativeSource:source sourceType:_sourceType]; [_sources addObject:desktopSource]; [_delegate didDesktopSourceAdded:desktopSource]; } -(void)mediaSourceRemoved:(webrtc::MediaSource *) source { - RTCDesktopSource *desktopSource = [self getSourceById:source]; + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; if(desktopSource != nil) { [_sources removeObject:desktopSource]; [_delegate didDesktopSourceRemoved:desktopSource]; @@ -70,7 +70,7 @@ -(void)mediaSourceRemoved:(webrtc::MediaSource *) source { } -(void)mediaSourceNameChanged:(webrtc::MediaSource *) source { - RTCDesktopSource *desktopSource = [self getSourceById:source]; + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; if(desktopSource != nil) { [desktopSource setName:source->name().c_str()]; [_delegate didDesktopSourceNameChanged:desktopSource]; @@ -78,16 +78,16 @@ -(void)mediaSourceNameChanged:(webrtc::MediaSource *) source { } -(void)mediaSourceThumbnailChanged:(webrtc::MediaSource *) source { - RTCDesktopSource *desktopSource = [self getSourceById:source]; + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; if(desktopSource != nil) { [desktopSource setThumbnail:source->thumbnail()]; [_delegate didDesktopSourceThumbnailChanged:desktopSource]; } } --(RTCDesktopSource *)getSourceById:(webrtc::MediaSource *) source { +-(RTC_OBJC_TYPE(RTCDesktopSource) *)getSourceById:(webrtc::MediaSource *) source { NSEnumerator *enumerator = [_sources objectEnumerator]; - RTCDesktopSource *object; + RTC_OBJC_TYPE(RTCDesktopSource) *object; while ((object = enumerator.nextObject) != nil) { if(object.nativeMediaSource == source) { return object; diff --git a/sdk/objc/components/capturer/RTCDesktopSource+Private.h b/sdk/objc/components/capturer/RTCDesktopSource+Private.h index f5a0d14e0f..3f4c4ef25f 100644 --- a/sdk/objc/components/capturer/RTCDesktopSource+Private.h +++ b/sdk/objc/components/capturer/RTCDesktopSource+Private.h @@ -21,7 +21,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCDesktopSource () +@interface RTC_OBJC_TYPE(RTCDesktopSource) () - (instancetype)initWithNativeSource:(webrtc::MediaSource*) nativeSource sourceType:(RTCDesktopSourceType) sourceType; diff --git a/sdk/objc/components/capturer/RTCDesktopSource.mm b/sdk/objc/components/capturer/RTCDesktopSource.mm index 3f5f23894b..e1bdc6893a 100644 --- a/sdk/objc/components/capturer/RTCDesktopSource.mm +++ b/sdk/objc/components/capturer/RTCDesktopSource.mm @@ -19,7 +19,7 @@ #import "RTCDesktopSource.h" #import "RTCDesktopSource+Private.h" -@implementation RTCDesktopSource { +@implementation RTC_OBJC_TYPE(RTCDesktopSource) { NSString *_sourceId; NSString *_name; NSImage *_thumbnail; diff --git a/sdk/objc/components/network/RTCNetworkMonitor+Private.h b/sdk/objc/components/network/RTCNetworkMonitor+Private.h index b5c786be18..f3761f7ba3 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor+Private.h +++ b/sdk/objc/components/network/RTCNetworkMonitor+Private.h @@ -9,16 +9,18 @@ */ #import "RTCNetworkMonitor.h" +#import "RTCMacros.h" #include "sdk/objc/native/src/network_monitor_observer.h" -@interface RTCNetworkMonitor () +@interface RTC_OBJC_TYPE (RTCNetworkMonitor) +() -/** `observer` is a raw pointer and should be kept alive - * for this object's lifetime. - */ -- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer - NS_DESIGNATED_INITIALIZER; + /** `observer` is a raw pointer and should be kept alive + * for this object's lifetime. + */ + - (instancetype)initWithObserver + : (webrtc::NetworkMonitorObserver *)observer NS_DESIGNATED_INITIALIZER; /** Stops the receiver from posting updates to `observer`. */ - (void)stop; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.h b/sdk/objc/components/network/RTCNetworkMonitor.h index 21d22f5463..4b0cb4baf0 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.h +++ b/sdk/objc/components/network/RTCNetworkMonitor.h @@ -10,12 +10,14 @@ #import +#import "RTCMacros.h" + NS_ASSUME_NONNULL_BEGIN /** Listens for NWPathMonitor updates and forwards the results to a C++ * observer. */ -@interface RTCNetworkMonitor : NSObject +@interface RTC_OBJC_TYPE (RTCNetworkMonitor): NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.mm b/sdk/objc/components/network/RTCNetworkMonitor.mm index 7e75b2b4c0..2e42ab5290 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.mm +++ b/sdk/objc/components/network/RTCNetworkMonitor.mm @@ -46,7 +46,7 @@ } // namespace -@implementation RTCNetworkMonitor { +@implementation RTC_OBJC_TYPE (RTCNetworkMonitor) { webrtc::NetworkMonitorObserver *_observer; nw_path_monitor_t _pathMonitor; dispatch_queue_t _monitorQueue; @@ -63,12 +63,12 @@ - (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer { return nil; } RTCLog(@"NW path monitor created."); - __weak RTCNetworkMonitor *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCNetworkMonitor) *weakSelf = self; nw_path_monitor_set_update_handler(_pathMonitor, ^(nw_path_t path) { if (weakSelf == nil) { return; } - RTCNetworkMonitor *strongSelf = weakSelf; + RTC_OBJC_TYPE(RTCNetworkMonitor) *strongSelf = weakSelf; RTCLog(@"NW path monitor: updated."); nw_path_status_t status = nw_path_get_status(path); if (status == nw_path_status_invalid) { diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h index e5987fe22a..c4e2724042 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h @@ -13,5 +13,5 @@ #import "RTCMTLRenderer.h" NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLI420Renderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE(RTCMTLI420Renderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm index f4c76fa313..963f36c62a 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm @@ -70,7 +70,7 @@ fragment half4 fragmentColorConversion( return half4(out); }); -@implementation RTCMTLI420Renderer { +@implementation RTC_OBJC_TYPE(RTCMTLI420Renderer) { // Textures. id _yTexture; id _uTexture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h index d30b83037f..330533bef0 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h @@ -8,5 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import "RTCMacros.h" + // Deprecated: Use RTCMTLVideoView instead -@compatibility_alias RTCMTLNSVideoView RTCMTLVideoView; +@compatibility_alias RTC_OBJC_TYPE(RTCMTLNSVideoView) RTC_OBJC_TYPE(RTCMTLVideoView); diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h index 866b7ea17e..125612a269 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h @@ -13,6 +13,6 @@ #import "RTCMTLRenderer.h" NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLNV12Renderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE(RTCMTLNV12Renderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm index 7b037c6dbc..c4000b1b1d 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm @@ -60,7 +60,7 @@ fragment half4 fragmentColorConversion( return half4(out); }); -@implementation RTCMTLNV12Renderer { +@implementation RTC_OBJC_TYPE(RTCMTLNV12Renderer) { // Textures. CVMetalTextureCacheRef _textureCache; id _yTexture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h index 9db422cd22..5e355a8504 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h @@ -11,12 +11,13 @@ #import #import "RTCMTLRenderer.h" +#import "RTCMacros.h" /** @abstract RGB/BGR renderer. * @discussion This renderer handles both kCVPixelFormatType_32BGRA and * kCVPixelFormatType_32ARGB. */ NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLRGBRenderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE (RTCMTLRGBRenderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm index e5dc4ef80a..6ca4a4000d 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm @@ -30,12 +30,12 @@ } Vertex; typedef struct { - float4 position[[position]]; + float4 position [[position]]; float2 texcoord; } VertexIO; - vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]], - uint vid[[vertex_id]]) { + vertex VertexIO vertexPassthrough(constant Vertex * verticies [[buffer(0)]], + uint vid [[vertex_id]]) { VertexIO out; constant Vertex &v = verticies[vid]; out.position = float4(float2(v.position), 0.0, 1.0); @@ -43,9 +43,9 @@ vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]], return out; } - fragment half4 fragmentColorConversion(VertexIO in[[stage_in]], - texture2d texture[[texture(0)]], - constant bool &isARGB[[buffer(0)]]) { + fragment half4 fragmentColorConversion(VertexIO in [[stage_in]], + texture2d texture [[texture(0)]], + constant bool &isARGB [[buffer(0)]]) { constexpr sampler s(address::clamp_to_edge, filter::linear); half4 out = texture.sample(s, in.texcoord); @@ -56,7 +56,7 @@ fragment half4 fragmentColorConversion(VertexIO in[[stage_in]], return out; }); -@implementation RTCMTLRGBRenderer { +@implementation RTC_OBJC_TYPE (RTCMTLRGBRenderer) { // Textures. CVMetalTextureCacheRef _textureCache; id _texture; @@ -73,8 +73,8 @@ - (BOOL)addRenderingDestination:(__kindof MTKView *)view { } - (BOOL)initializeTextureCache { - CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice], - nil, &_textureCache); + CVReturn status = CVMetalTextureCacheCreate( + kCFAllocatorDefault, nil, [self currentMetalDevice], nil, &_textureCache); if (status != kCVReturnSuccess) { RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status); return NO; @@ -130,9 +130,15 @@ - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { return NO; } - CVReturn result = CVMetalTextureCacheCreateTextureFromImage( - kCFAllocatorDefault, _textureCache, pixelBuffer, nil, mtlPixelFormat, - width, height, 0, &textureOut); + CVReturn result = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + _textureCache, + pixelBuffer, + nil, + mtlPixelFormat, + width, + height, + 0, + &textureOut); if (result == kCVReturnSuccess) { gpuTexture = CVMetalTextureGetTexture(textureOut); } diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h index 916d4d4430..f6a82db56a 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMTLRenderer (Private) +@interface RTC_OBJC_TYPE(RTCMTLRenderer) (Private) - (nullable id)currentMetalDevice; - (NSString *)shaderSource; - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h index aa31545973..6bbca3d985 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h @@ -21,7 +21,7 @@ NS_ASSUME_NONNULL_BEGIN /** * Protocol defining ability to render RTCVideoFrame in Metal enabled views. */ -@protocol RTCMTLRenderer +@protocol RTC_OBJC_TYPE(RTCMTLRenderer) /** * Method to be implemented to perform actual rendering of the provided frame. @@ -49,7 +49,7 @@ NS_ASSUME_NONNULL_BEGIN * Implementation of RTCMTLRenderer protocol. */ NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLRenderer : NSObject +@interface RTC_OBJC_TYPE(RTCMTLRenderer) : NSObject /** @abstract A wrapped RTCVideoRotation, or nil. @discussion When not nil, the rotation of the actual frame is ignored when rendering. diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm index 410590a7b1..ca3fcc3e51 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm @@ -87,7 +87,7 @@ static inline void getCubeVertexData(int cropX, // In future we might use triple buffering method if it improves performance. static const NSInteger kMaxInflightBuffers = 1; -@implementation RTCMTLRenderer { +@implementation RTC_OBJC_TYPE(RTCMTLRenderer) { __kindof MTKView *_view; // Controller. diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index 8b2ec1aaa3..d4d98a0bf4 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -22,17 +22,12 @@ #import "RTCMTLNV12Renderer.h" #import "RTCMTLRGBRenderer.h" -// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime. -// Linking errors occur when compiling for architectures that don't support Metal. -#define MTKViewClass NSClassFromString(@"MTKView") -#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer") -#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer") -#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer") - -@interface RTC_OBJC_TYPE (RTCMTLVideoView) -() @property(nonatomic) RTCMTLI420Renderer *rendererI420; -@property(nonatomic) RTCMTLNV12Renderer *rendererNV12; -@property(nonatomic) RTCMTLRGBRenderer *rendererRGB; +#import "RTCMTLRenderer+Private.h" + +@interface RTC_OBJC_TYPE (RTCMTLVideoView) () +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLI420Renderer) *rendererI420; +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLNV12Renderer) * rendererNV12; +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLRGBRenderer) * rendererRGB; @property(nonatomic) MTKView *metalView; @property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @property(nonatomic) CGSize videoFrameSize; @@ -96,19 +91,19 @@ - (void)setVideoContentMode:(UIViewContentMode)mode { #pragma mark - Private + (MTKView *)createMetalView:(CGRect)frame { - return [[MTKViewClass alloc] initWithFrame:frame]; + return [[MTKView alloc] initWithFrame:frame]; } -+ (RTCMTLNV12Renderer *)createNV12Renderer { - return [[RTCMTLNV12RendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLNV12Renderer) *)createNV12Renderer { + return [[RTC_OBJC_TYPE(RTCMTLNV12Renderer) alloc] init]; } -+ (RTCMTLI420Renderer *)createI420Renderer { - return [[RTCMTLI420RendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLI420Renderer) *)createI420Renderer { + return [[RTC_OBJC_TYPE(RTCMTLI420Renderer) alloc] init]; } -+ (RTCMTLRGBRenderer *)createRGBRenderer { - return [[RTCMTLRGBRendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLRGBRenderer) *)createRGBRenderer { + return [[RTC_OBJC_TYPE(RTCMTLRGBRenderer) alloc] init]; } - (void)configure { @@ -159,7 +154,7 @@ - (void)drawInMTKView:(nonnull MTKView *)view { return; } - RTCMTLRenderer *renderer; + RTC_OBJC_TYPE(RTCMTLRenderer) * renderer; if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer; const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h index 71a073ab21..b00cf8047d 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN * and RTCEAGLVideoView if no external shader is specified. This shader will render * the video in a rectangle without any color or geometric transformations. */ -@interface RTCDefaultShader : NSObject +@interface RTC_OBJC_TYPE(RTCDefaultShader) : NSObject @end diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm b/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm index 51dca3223d..b0c111293f 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm @@ -69,7 +69,7 @@ " 1.0);\n" " }\n"; -@implementation RTCDefaultShader { +@implementation RTC_OBJC_TYPE(RTCDefaultShader) { GLuint _vertexBuffer; GLuint _vertexArray; // Store current rotation and only upload new vertex data when rotation changes. diff --git a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h index b78501e9e6..1c5b64fdfc 100644 --- a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h +++ b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h @@ -10,11 +10,13 @@ #import +#import "RTCMacros.h" + // RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen // refreshes, which should be 30fps. We wrap the display link in order to avoid // a retain cycle since CADisplayLink takes a strong reference onto its target. // The timer is paused by default. -@interface RTCDisplayLinkTimer : NSObject +@interface RTC_OBJC_TYPE (RTCDisplayLinkTimer): NSObject @property(nonatomic) BOOL isPaused; diff --git a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m index 906bb898d6..f4cf03304d 100644 --- a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m +++ b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m @@ -12,7 +12,7 @@ #import -@implementation RTCDisplayLinkTimer { +@implementation RTC_OBJC_TYPE (RTCDisplayLinkTimer) { CADisplayLink *_displayLink; void (^_timerHandler)(void); } @@ -21,17 +21,15 @@ - (instancetype)initWithTimerHandler:(void (^)(void))timerHandler { NSParameterAssert(timerHandler); if (self = [super init]) { _timerHandler = timerHandler; - _displayLink = - [CADisplayLink displayLinkWithTarget:self - selector:@selector(displayLinkDidFire:)]; + _displayLink = [CADisplayLink displayLinkWithTarget:self + selector:@selector(displayLinkDidFire:)]; _displayLink.paused = YES; #if __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0 _displayLink.preferredFramesPerSecond = 30; #else [_displayLink setFrameInterval:2]; #endif - [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] - forMode:NSRunLoopCommonModes]; + [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes]; } return self; } diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m index 89e62d2ce7..0a00494d2d 100644 --- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m @@ -42,14 +42,14 @@ @interface RTC_OBJC_TYPE (RTCEAGLVideoView) @end @implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { - RTCDisplayLinkTimer *_timer; + RTC_OBJC_TYPE(RTCDisplayLinkTimer) * _timer; EAGLContext *_glContext; // This flag should only be set and read on the main thread (e.g. by // setNeedsDisplay) BOOL _isDirty; id _shader; - RTCNV12TextureCache *_nv12TextureCache; - RTCI420TextureCache *_i420TextureCache; + RTC_OBJC_TYPE(RTCNV12TextureCache) *_nv12TextureCache; + RTC_OBJC_TYPE(RTCI420TextureCache) *_i420TextureCache; // As timestamps should be unique between frames, will store last // drawn frame timestamp instead of the whole frame to reduce memory usage. int64_t _lastDrawnFrameTimeStampNs; @@ -61,11 +61,11 @@ @implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { @synthesize rotationOverride = _rotationOverride; - (instancetype)initWithFrame:(CGRect)frame { - return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]]; + return [self initWithFrame:frame shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithCoder:(NSCoder *)aDecoder { - return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]]; + return [self initWithCoder:aDecoder shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithFrame:(CGRect)frame shader:(id)shader { @@ -90,8 +90,7 @@ - (instancetype)initWithCoder:(NSCoder *)aDecoder } - (BOOL)configure { - EAGLContext *glContext = - [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; + EAGLContext *glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; if (!glContext) { glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; } @@ -102,8 +101,7 @@ - (BOOL)configure { _glContext = glContext; // GLKView manages a framebuffer for us. - _glkView = [[GLKView alloc] initWithFrame:CGRectZero - context:_glContext]; + _glkView = [[GLKView alloc] initWithFrame:CGRectZero context:_glContext]; _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888; _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone; _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone; @@ -115,8 +113,7 @@ - (BOOL)configure { // Listen to application state in order to clean up OpenGL before app goes // away. - NSNotificationCenter *notificationCenter = - [NSNotificationCenter defaultCenter]; + NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter]; [notificationCenter addObserver:self selector:@selector(willResignActive) name:UIApplicationWillResignActiveNotification @@ -130,7 +127,7 @@ - (BOOL)configure { // using a refresh rate proportional to screen refresh frequency. This // occurs on the main thread. __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; - _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{ + _timer = [[RTC_OBJC_TYPE(RTCDisplayLinkTimer) alloc] initWithTimerHandler:^{ RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; [strongSelf displayLinkTimerDidFire]; }]; @@ -141,14 +138,13 @@ - (BOOL)configure { } - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { - [super setMultipleTouchEnabled:multipleTouchEnabled]; - _glkView.multipleTouchEnabled = multipleTouchEnabled; + [super setMultipleTouchEnabled:multipleTouchEnabled]; + _glkView.multipleTouchEnabled = multipleTouchEnabled; } - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self]; - UIApplicationState appState = - [UIApplication sharedApplication].applicationState; + UIApplicationState appState = [UIApplication sharedApplication].applicationState; if (appState == UIApplicationStateActive) { [self teardownGL]; } @@ -189,14 +185,14 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { return; } RTCVideoRotation rotation = frame.rotation; - if(_rotationOverride != nil) { - [_rotationOverride getValue: &rotation]; + if (_rotationOverride != nil) { + [_rotationOverride getValue:&rotation]; } [self ensureGLContext]; glClear(GL_COLOR_BUFFER_BIT); if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { if (!_nv12TextureCache) { - _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext]; + _nv12TextureCache = [[RTC_OBJC_TYPE(RTCNV12TextureCache) alloc] initWithContext:_glContext]; } if (_nv12TextureCache) { [_nv12TextureCache uploadFrameToTextures:frame]; @@ -211,7 +207,7 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { } } else { if (!_i420TextureCache) { - _i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext]; + _i420TextureCache = [[RTC_OBJC_TYPE(RTCI420TextureCache) alloc] initWithContext:_glContext]; } [_i420TextureCache uploadFrameToTextures:frame]; [_shader applyShadingForFrameWithWidth:frame.width diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h index 9fdcc5a695..2c2319d043 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h @@ -11,7 +11,7 @@ #import "RTCOpenGLDefines.h" #import "base/RTCVideoFrame.h" -@interface RTCI420TextureCache : NSObject +@interface RTC_OBJC_TYPE(RTCI420TextureCache) : NSObject @property(nonatomic, readonly) GLuint yTexture; @property(nonatomic, readonly) GLuint uTexture; diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm index 5dccd4bf6a..0ed19a842c 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm @@ -28,7 +28,7 @@ static const GLsizei kNumTexturesPerSet = 3; static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; -@implementation RTCI420TextureCache { +@implementation RTC_OBJC_TYPE(RTCI420TextureCache) { BOOL _hasUnpackRowLength; GLint _currentTextureSet; // Handles for OpenGL constructs. diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m index 168c73126f..97957faf24 100644 --- a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m @@ -29,7 +29,7 @@ @interface RTC_OBJC_TYPE (RTCNSGLVideoView) // from the display link callback so atomicity is required. @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; -@property(atomic, strong) RTCI420TextureCache *i420TextureCache; +@property(atomic, strong) RTC_OBJC_TYPE(RTCI420TextureCache) *i420TextureCache; - (void)drawFrame; @end @@ -57,7 +57,7 @@ @implementation RTC_OBJC_TYPE (RTCNSGLVideoView) { @synthesize i420TextureCache = _i420TextureCache; - (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format { - return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]]; + return [self initWithFrame:frame pixelFormat:format shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithFrame:(NSRect)frame @@ -140,9 +140,9 @@ - (void)drawFrame { // TODO(magjed): Add support for NV12 texture cache on OS X. frame = [frame newI420VideoFrame]; if (!self.i420TextureCache) { - self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context]; + self.i420TextureCache = [[RTC_OBJC_TYPE(RTCI420TextureCache) alloc] initWithContext:context]; } - RTCI420TextureCache *i420TextureCache = self.i420TextureCache; + RTC_OBJC_TYPE(RTCI420TextureCache) *i420TextureCache = self.i420TextureCache; if (i420TextureCache) { [i420TextureCache uploadFrameToTextures:frame]; [_shader applyShadingForFrameWithWidth:frame.width diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h index f202b836b5..420490b1ab 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCNV12TextureCache : NSObject +@interface RTC_OBJC_TYPE(RTCNV12TextureCache) : NSObject @property(nonatomic, readonly) GLuint yTexture; @property(nonatomic, readonly) GLuint uvTexture; diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m index a520ac45b4..096767be55 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m @@ -14,7 +14,7 @@ #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -@implementation RTCNV12TextureCache { +@implementation RTC_OBJC_TYPE(RTCNV12TextureCache) { CVOpenGLESTextureCacheRef _textureCache; CVOpenGLESTextureRef _yTextureRef; CVOpenGLESTextureRef _uvTextureRef; diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m index 61da5f4514..3cc92382e6 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m @@ -55,12 +55,12 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) { [result - addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name parameters:nil scalabilityModes:[RTCVideoEncoderVP9 scalabilityModes]]]; + addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderVP9) scalabilityModes]]]; } #if defined(RTC_USE_LIBAOM_AV1_ENCODER) RTC_OBJC_TYPE(RTCVideoCodecInfo) *av1Info = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name parameters:nil scalabilityModes:[RTCVideoEncoderAV1 scalabilityModes]]; + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderAV1) scalabilityModes]]; [result addObject:av1Info]; #endif diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm index e2ca5867c0..55916435a1 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm @@ -28,7 +28,7 @@ - (instancetype)initWithPrimary:(id)prima } - (nullable id)createEncoder: (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { - return [RTCVideoEncoderSimulcast simulcastEncoderWithPrimary: _primary fallback: _fallback videoCodecInfo: info]; + return [RTC_OBJC_TYPE(RTCVideoEncoderSimulcast) simulcastEncoderWithPrimary: _primary fallback: _fallback videoCodecInfo: info]; } - (NSArray *)supportedCodecs { diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm index a7260ab802..d5cf6fd563 100644 --- a/sdk/objc/native/api/video_capturer.mm +++ b/sdk/objc/native/api/video_capturer.mm @@ -20,7 +20,7 @@ RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer, rtc::Thread *signaling_thread, rtc::Thread *worker_thread) { - RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init]; + RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) *adapter = [[RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) alloc] init]; rtc::scoped_refptr objc_video_track_source = rtc::make_ref_counted(adapter); rtc::scoped_refptr video_source = diff --git a/sdk/objc/native/src/audio/audio_device_ios.h b/sdk/objc/native/src/audio/audio_device_ios.h index 605cf402d4..69f04164e7 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.h +++ b/sdk/objc/native/src/audio/audio_device_ios.h @@ -25,7 +25,7 @@ #include "sdk/objc/base/RTCMacros.h" #include "voice_processing_audio_unit.h" -RTC_FWD_DECL_OBJC_CLASS(RTCNativeAudioSessionDelegateAdapter); +RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter)); namespace webrtc { @@ -279,7 +279,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric, bool is_interrupted_; // Audio interruption observer instance. - RTCNativeAudioSessionDelegateAdapter* audio_session_observer_ + RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter)* audio_session_observer_ RTC_GUARDED_BY(thread_); // Set to true if we've activated the audio session. diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index 1689aa39e1..3211ce69d5 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -110,7 +110,7 @@ static void LogDeviceInfo() { io_thread_checker_.Detach(); thread_ = rtc::Thread::Current(); - audio_session_observer_ = [[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this]; + audio_session_observer_ = [[RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) alloc] initWithObserver:this]; } AudioDeviceIOS::~AudioDeviceIOS() { diff --git a/sdk/objc/native/src/objc_audio_device.h b/sdk/objc/native/src/objc_audio_device.h index fcfe7a6e8b..88f6f19f99 100644 --- a/sdk/objc/native/src/objc_audio_device.h +++ b/sdk/objc/native/src/objc_audio_device.h @@ -19,7 +19,7 @@ #include "modules/audio_device/include/audio_device.h" #include "rtc_base/thread.h" -@class ObjCAudioDeviceDelegate; +@class RTC_OBJC_TYPE(ObjCAudioDeviceDelegate); namespace webrtc { @@ -267,7 +267,7 @@ class ObjCAudioDeviceModule : public AudioDeviceModule { rtc::BufferT record_audio_buffer_; // Delegate object provided to RTCAudioDevice during initialization - ObjCAudioDeviceDelegate* audio_device_delegate_; + RTC_OBJC_TYPE(ObjCAudioDeviceDelegate)* audio_device_delegate_; }; } // namespace objc_adm diff --git a/sdk/objc/native/src/objc_audio_device.mm b/sdk/objc/native/src/objc_audio_device.mm index d629fae20f..5fb72d8a5c 100644 --- a/sdk/objc/native/src/objc_audio_device.mm +++ b/sdk/objc/native/src/objc_audio_device.mm @@ -77,7 +77,7 @@ if (![audio_device_ isInitialized]) { if (audio_device_delegate_ == nil) { - audio_device_delegate_ = [[ObjCAudioDeviceDelegate alloc] + audio_device_delegate_ = [[RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) alloc] initWithAudioDeviceModule:rtc::scoped_refptr(this) audioDeviceThread:thread_]; } diff --git a/sdk/objc/native/src/objc_audio_device_delegate.h b/sdk/objc/native/src/objc_audio_device_delegate.h index 3af079dad9..0b546f269c 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.h +++ b/sdk/objc/native/src/objc_audio_device_delegate.h @@ -22,7 +22,7 @@ class ObjCAudioDeviceModule; } // namespace objc_adm } // namespace webrtc -@interface ObjCAudioDeviceDelegate : NSObject +@interface RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) : NSObject - (instancetype)initWithAudioDeviceModule: (rtc::scoped_refptr)audioDeviceModule diff --git a/sdk/objc/native/src/objc_audio_device_delegate.mm b/sdk/objc/native/src/objc_audio_device_delegate.mm index 156d6326a4..f4c8cfb71a 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.mm +++ b/sdk/objc/native/src/objc_audio_device_delegate.mm @@ -55,7 +55,7 @@ } // namespace -@implementation ObjCAudioDeviceDelegate { +@implementation RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) { rtc::scoped_refptr impl_; } diff --git a/sdk/objc/native/src/objc_network_monitor.h b/sdk/objc/native/src/objc_network_monitor.h index 709e9dfbe5..c5440d587b 100644 --- a/sdk/objc/native/src/objc_network_monitor.h +++ b/sdk/objc/native/src/objc_network_monitor.h @@ -59,7 +59,7 @@ class ObjCNetworkMonitor : public rtc::NetworkMonitorInterface, std::map adapter_type_by_name_ RTC_GUARDED_BY(thread_); rtc::scoped_refptr safety_flag_; - RTCNetworkMonitor* network_monitor_ = nil; + RTC_OBJC_TYPE(RTCNetworkMonitor) * network_monitor_ = nil; }; } // namespace webrtc diff --git a/sdk/objc/native/src/objc_network_monitor.mm b/sdk/objc/native/src/objc_network_monitor.mm index 535548c64c..e0785e6d0b 100644 --- a/sdk/objc/native/src/objc_network_monitor.mm +++ b/sdk/objc/native/src/objc_network_monitor.mm @@ -39,7 +39,7 @@ thread_ = rtc::Thread::Current(); RTC_DCHECK_RUN_ON(thread_); safety_flag_->SetAlive(); - network_monitor_ = [[RTCNetworkMonitor alloc] initWithObserver:this]; + network_monitor_ = [[RTC_OBJC_TYPE(RTCNetworkMonitor) alloc] initWithObserver:this]; if (network_monitor_ == nil) { RTC_LOG(LS_WARNING) << "Failed to create RTCNetworkMonitor; not available on this OS?"; } diff --git a/sdk/objc/native/src/objc_video_track_source.h b/sdk/objc/native/src/objc_video_track_source.h index 19a3d6db43..5fe39baade 100644 --- a/sdk/objc/native/src/objc_video_track_source.h +++ b/sdk/objc/native/src/objc_video_track_source.h @@ -19,7 +19,7 @@ RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame)); -@interface RTCObjCVideoSourceAdapter : NSObject +@interface RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) : NSObject @end namespace webrtc { @@ -28,7 +28,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { public: ObjCVideoTrackSource(); explicit ObjCVideoTrackSource(bool is_screencast); - explicit ObjCVideoTrackSource(RTCObjCVideoSourceAdapter* adapter); + explicit ObjCVideoTrackSource(RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter)* adapter); bool is_screencast() const override; @@ -50,7 +50,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { rtc::VideoBroadcaster broadcaster_; rtc::TimestampAligner timestamp_aligner_; - RTCObjCVideoSourceAdapter* adapter_; + RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter)* adapter_; bool is_screencast_; }; diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index 7937e90505..401db1d111 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -17,11 +17,11 @@ #include "api/video/i420_buffer.h" #include "sdk/objc/native/src/objc_frame_buffer.h" -@interface RTCObjCVideoSourceAdapter () +@interface RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) () @property(nonatomic) webrtc::ObjCVideoTrackSource *objCVideoTrackSource; @end -@implementation RTCObjCVideoSourceAdapter +@implementation RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) @synthesize objCVideoTrackSource = _objCVideoTrackSource; @@ -40,7 +40,7 @@ - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer : AdaptedVideoTrackSource(/* required resolution alignment */ 2), is_screencast_(is_screencast) {} -ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) { +ObjCVideoTrackSource::ObjCVideoTrackSource(RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) *adapter) : adapter_(adapter) { adapter_.objCVideoTrackSource = this; } diff --git a/sdk/objc/unittests/RTCMTLVideoView_xctest.m b/sdk/objc/unittests/RTCMTLVideoView_xctest.m index f152eeec91..159025803e 100644 --- a/sdk/objc/unittests/RTCMTLVideoView_xctest.m +++ b/sdk/objc/unittests/RTCMTLVideoView_xctest.m @@ -32,8 +32,8 @@ @interface RTC_OBJC_TYPE (RTCMTLVideoView) + (BOOL)isMetalAvailable; + (UIView *)createMetalView:(CGRect)frame; -+ (id)createNV12Renderer; -+ (id)createI420Renderer; ++ (id)createNV12Renderer; ++ (id)createI420Renderer; - (void)drawInMTKView:(id)view; @end @@ -91,7 +91,7 @@ - (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer { } - (id)rendererMockWithSuccessfulSetup:(BOOL)success { - id rendererMock = OCMClassMock([RTCMTLRenderer class]); + id rendererMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLRenderer) class]); OCMStub([rendererMock addRenderingDestination:[OCMArg any]]).andReturn(success); return rendererMock; } diff --git a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm index 5ba5a52a53..c4dda5aef1 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm +++ b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm @@ -46,7 +46,7 @@ - (void)testBuilder { nativeVideoDecoderFactory:nullptr audioDeviceModule:nullptr audioProcessingModule:nullptr]); - RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder)* builder = [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); @@ -63,7 +63,7 @@ - (void)testDefaultComponentsBuilder { nativeVideoDecoderFactory:nullptr audioDeviceModule:nullptr audioProcessingModule:nullptr]); - RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder]; + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder)* builder = [RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) defaultBuilder]; RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil);