andersc f9f448b32d ObjC: Include additional files in umbrella header.
RTCAudioSession and RTCAudioSessionConfiguration allow users to handle
audio manually and is used by the AppRTCMobile example.

RTCVideoFrameBuffer exposes a protocol that users can implement to
create their own frame buffer formats, as long as they can be converted
into i420.

RTCVideoCapturer and RTCVideoViewShading are imported by other headers
already included by the umbrella header, so they were always accessible
to users. Added them to the umbrella header to make it explicit.

BUG=webrtc:7351, webrtc:8027

Review-Url: https://codereview.webrtc.org/2994253002
Cr-Commit-Position: refs/heads/master@{#19379}
2017-08-17 09:31:55 +00:00

140 lines
4.0 KiB
Plaintext

/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
@implementation RTCVideoFrame {
RTCVideoRotation _rotation;
int64_t _timeStampNs;
}
@synthesize buffer = _buffer;
@synthesize timeStamp;
- (int)width {
return _buffer.width;
}
- (int)height {
return _buffer.height;
}
- (RTCVideoRotation)rotation {
return _rotation;
}
- (const uint8_t *)dataY {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataY;
} else {
return nullptr;
}
}
- (const uint8_t *)dataU {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataU;
} else {
return nullptr;
}
}
- (const uint8_t *)dataV {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).dataV;
} else {
return nullptr;
}
}
- (int)strideY {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideY;
} else {
return 0;
}
}
- (int)strideU {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideU;
} else {
return 0;
}
}
- (int)strideV {
if ([_buffer conformsToProtocol:@protocol(RTCI420Buffer)]) {
return ((id<RTCI420Buffer>)_buffer).strideV;
} else {
return 0;
}
}
- (int64_t)timeStampNs {
return _timeStampNs;
}
- (CVPixelBufferRef)nativeHandle {
if ([_buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
return ((RTCCVPixelBuffer *)_buffer).pixelBuffer;
} else {
return nullptr;
}
}
- (RTCVideoFrame *)newI420VideoFrame {
return [[RTCVideoFrame alloc] initWithBuffer:[_buffer toI420]
rotation:_rotation
timeStampNs:_timeStampNs];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
return [self initWithBuffer:[[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]
rotation:rotation
timeStampNs:timeStampNs];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
scaledWidth:(int)scaledWidth
scaledHeight:(int)scaledHeight
cropWidth:(int)cropWidth
cropHeight:(int)cropHeight
cropX:(int)cropX
cropY:(int)cropY
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer
adaptedWidth:scaledWidth
adaptedHeight:scaledHeight
cropWidth:cropWidth
cropHeight:cropHeight
cropX:cropX
cropY:cropY];
return [self initWithBuffer:rtcPixelBuffer rotation:rotation timeStampNs:timeStampNs];
}
- (instancetype)initWithBuffer:(id<RTCVideoFrameBuffer>)buffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
if (self = [super init]) {
_buffer = buffer;
_rotation = rotation;
_timeStampNs = timeStampNs;
}
return self;
}
@end