Move Objective-C video renderers to webrtc/api/objc.
BUG= R=tkchin@webrtc.org Review URL: https://codereview.webrtc.org/1542473003 . Patch from Jon Hjelle <hjon@andyet.net>. Cr-Commit-Position: refs/heads/master@{#11209}
This commit is contained in:
parent
81028796bc
commit
e799badacc
@ -8,6 +8,15 @@
|
||||
|
||||
import("../build/webrtc.gni")
|
||||
|
||||
config("ios_config") {
|
||||
libs = [
|
||||
"CoreGraphics.framework",
|
||||
"GLKit.framework",
|
||||
"OpenGLES.framework",
|
||||
"QuartzCore.framework",
|
||||
]
|
||||
}
|
||||
|
||||
if (is_ios) {
|
||||
source_set("rtc_api_objc") {
|
||||
deps = [
|
||||
@ -32,6 +41,8 @@ if (is_ios) {
|
||||
"objc/RTCMediaConstraints+Private.h",
|
||||
"objc/RTCMediaConstraints.h",
|
||||
"objc/RTCMediaConstraints.mm",
|
||||
"objc/RTCOpenGLVideoRenderer.h",
|
||||
"objc/RTCOpenGLVideoRenderer.mm",
|
||||
"objc/RTCSessionDescription+Private.h",
|
||||
"objc/RTCSessionDescription.h",
|
||||
"objc/RTCSessionDescription.mm",
|
||||
@ -41,7 +52,22 @@ if (is_ios) {
|
||||
"objc/RTCVideoFrame+Private.h",
|
||||
"objc/RTCVideoFrame.h",
|
||||
"objc/RTCVideoFrame.mm",
|
||||
"objc/RTCVideoRenderer.h",
|
||||
"objc/WebRTC-Prefix.pch",
|
||||
]
|
||||
|
||||
if (is_ios) {
|
||||
sources += [
|
||||
"objc/RTCEAGLVideoView.h",
|
||||
"objc/RTCEAGLVideoView.m",
|
||||
]
|
||||
}
|
||||
|
||||
if (is_mac) {
|
||||
sources += [
|
||||
"objc/RTCNSGLVideoView.h",
|
||||
"objc/RTCNSGLVideoView.m",
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -31,6 +31,8 @@
|
||||
'objc/RTCMediaStreamTrack+Private.h',
|
||||
'objc/RTCMediaStreamTrack.h',
|
||||
'objc/RTCMediaStreamTrack.mm',
|
||||
'objc/RTCOpenGLVideoRenderer.h',
|
||||
'objc/RTCOpenGLVideoRenderer.mm',
|
||||
'objc/RTCSessionDescription+Private.h',
|
||||
'objc/RTCSessionDescription.h',
|
||||
'objc/RTCSessionDescription.mm',
|
||||
@ -40,6 +42,31 @@
|
||||
'objc/RTCVideoFrame+Private.h',
|
||||
'objc/RTCVideoFrame.h',
|
||||
'objc/RTCVideoFrame.mm',
|
||||
'objc/RTCVideoRenderer.h',
|
||||
],
|
||||
'conditions': [
|
||||
['OS=="ios"', {
|
||||
'sources': [
|
||||
'objc/RTCEAGLVideoView.h',
|
||||
'objc/RTCEAGLVideoView.m',
|
||||
],
|
||||
'all_dependent_settings': {
|
||||
'xcode_settings': {
|
||||
'OTHER_LDFLAGS': [
|
||||
'-framework CoreGraphics',
|
||||
'-framework GLKit',
|
||||
'-framework OpenGLES',
|
||||
'-framework QuartzCore',
|
||||
]
|
||||
}
|
||||
}
|
||||
}],
|
||||
['OS=="mac"', {
|
||||
'sources': [
|
||||
'objc/RTCNSGLVideoView.h',
|
||||
'objc/RTCNSGLVideoView.m',
|
||||
],
|
||||
}],
|
||||
],
|
||||
'xcode_settings': {
|
||||
'CLANG_ENABLE_OBJC_ARC': 'YES',
|
||||
|
||||
35
webrtc/api/objc/RTCEAGLVideoView.h
Normal file
35
webrtc/api/objc/RTCEAGLVideoView.h
Normal file
@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
#import "RTCVideoRenderer.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@class RTCEAGLVideoView;
|
||||
@protocol RTCEAGLVideoViewDelegate
|
||||
|
||||
- (void)videoView:(RTCEAGLVideoView *)videoView didChangeVideoSize:(CGSize)size;
|
||||
|
||||
@end
|
||||
|
||||
/**
|
||||
* RTCEAGLVideoView is an RTCVideoRenderer which renders video frames in its
|
||||
* bounds using OpenGLES 2.0.
|
||||
*/
|
||||
@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
|
||||
|
||||
@property(nonatomic, weak) id<RTCEAGLVideoViewDelegate> delegate;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
259
webrtc/api/objc/RTCEAGLVideoView.m
Normal file
259
webrtc/api/objc/RTCEAGLVideoView.m
Normal file
@ -0,0 +1,259 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCEAGLVideoView.h"
|
||||
|
||||
#import <GLKit/GLKit.h>
|
||||
|
||||
#import "RTCVideoFrame.h"
|
||||
#import "RTCOpenGLVideoRenderer.h"
|
||||
|
||||
// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
|
||||
// refreshes, which should be 30fps. We wrap the display link in order to avoid
|
||||
// a retain cycle since CADisplayLink takes a strong reference onto its target.
|
||||
// The timer is paused by default.
|
||||
@interface RTCDisplayLinkTimer : NSObject
|
||||
|
||||
@property(nonatomic) BOOL isPaused;
|
||||
|
||||
- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler;
|
||||
- (void)invalidate;
|
||||
|
||||
@end
|
||||
|
||||
@implementation RTCDisplayLinkTimer {
|
||||
CADisplayLink *_displayLink;
|
||||
void (^_timerHandler)(void);
|
||||
}
|
||||
|
||||
- (instancetype)initWithTimerHandler:(void (^)(void))timerHandler {
|
||||
NSParameterAssert(timerHandler);
|
||||
if (self = [super init]) {
|
||||
_timerHandler = timerHandler;
|
||||
_displayLink =
|
||||
[CADisplayLink displayLinkWithTarget:self
|
||||
selector:@selector(displayLinkDidFire:)];
|
||||
_displayLink.paused = YES;
|
||||
// Set to half of screen refresh, which should be 30fps.
|
||||
[_displayLink setFrameInterval:2];
|
||||
[_displayLink addToRunLoop:[NSRunLoop currentRunLoop]
|
||||
forMode:NSRunLoopCommonModes];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self invalidate];
|
||||
}
|
||||
|
||||
- (BOOL)isPaused {
|
||||
return _displayLink.paused;
|
||||
}
|
||||
|
||||
- (void)setIsPaused:(BOOL)isPaused {
|
||||
_displayLink.paused = isPaused;
|
||||
}
|
||||
|
||||
- (void)invalidate {
|
||||
[_displayLink invalidate];
|
||||
}
|
||||
|
||||
- (void)displayLinkDidFire:(CADisplayLink *)displayLink {
|
||||
_timerHandler();
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
// RTCEAGLVideoView wraps a GLKView which is setup with
|
||||
// enableSetNeedsDisplay = NO for the purpose of gaining control of
|
||||
// exactly when to call -[GLKView display]. This need for extra
|
||||
// control is required to avoid triggering method calls on GLKView
|
||||
// that results in attempting to bind the underlying render buffer
|
||||
// when the drawable size would be empty which would result in the
|
||||
// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
|
||||
// the method that will trigger the binding of the render
|
||||
// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
|
||||
// is disabled for the reasons above, the RTCEAGLVideoView maintains
|
||||
// its own |isDirty| flag.
|
||||
|
||||
@interface RTCEAGLVideoView () <GLKViewDelegate>
|
||||
// |videoFrame| is set when we receive a frame from a worker thread and is read
|
||||
// from the display link callback so atomicity is required.
|
||||
@property(atomic, strong) RTCVideoFrame *videoFrame;
|
||||
@property(nonatomic, readonly) GLKView *glkView;
|
||||
@property(nonatomic, readonly) RTCOpenGLVideoRenderer *glRenderer;
|
||||
@end
|
||||
|
||||
@implementation RTCEAGLVideoView {
|
||||
RTCDisplayLinkTimer *_timer;
|
||||
// This flag should only be set and read on the main thread (e.g. by
|
||||
// setNeedsDisplay)
|
||||
BOOL _isDirty;
|
||||
}
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
@synthesize videoFrame = _videoFrame;
|
||||
@synthesize glkView = _glkView;
|
||||
@synthesize glRenderer = _glRenderer;
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame {
|
||||
if (self = [super initWithFrame:frame]) {
|
||||
[self configure];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
|
||||
if (self = [super initWithCoder:aDecoder]) {
|
||||
[self configure];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)configure {
|
||||
EAGLContext *glContext =
|
||||
[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
|
||||
if (!glContext) {
|
||||
glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
|
||||
}
|
||||
_glRenderer = [[RTCOpenGLVideoRenderer alloc] initWithContext:glContext];
|
||||
|
||||
// GLKView manages a framebuffer for us.
|
||||
_glkView = [[GLKView alloc] initWithFrame:CGRectZero
|
||||
context:glContext];
|
||||
_glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
|
||||
_glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
|
||||
_glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
|
||||
_glkView.drawableMultisample = GLKViewDrawableMultisampleNone;
|
||||
_glkView.delegate = self;
|
||||
_glkView.layer.masksToBounds = YES;
|
||||
_glkView.enableSetNeedsDisplay = NO;
|
||||
[self addSubview:_glkView];
|
||||
|
||||
// Listen to application state in order to clean up OpenGL before app goes
|
||||
// away.
|
||||
NSNotificationCenter *notificationCenter =
|
||||
[NSNotificationCenter defaultCenter];
|
||||
[notificationCenter addObserver:self
|
||||
selector:@selector(willResignActive)
|
||||
name:UIApplicationWillResignActiveNotification
|
||||
object:nil];
|
||||
[notificationCenter addObserver:self
|
||||
selector:@selector(didBecomeActive)
|
||||
name:UIApplicationDidBecomeActiveNotification
|
||||
object:nil];
|
||||
|
||||
// Frames are received on a separate thread, so we poll for current frame
|
||||
// using a refresh rate proportional to screen refresh frequency. This
|
||||
// occurs on the main thread.
|
||||
__weak RTCEAGLVideoView *weakSelf = self;
|
||||
_timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{
|
||||
RTCEAGLVideoView *strongSelf = weakSelf;
|
||||
[strongSelf displayLinkTimerDidFire];
|
||||
}];
|
||||
[self setupGL];
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||
UIApplicationState appState =
|
||||
[UIApplication sharedApplication].applicationState;
|
||||
if (appState == UIApplicationStateActive) {
|
||||
[self teardownGL];
|
||||
}
|
||||
[_timer invalidate];
|
||||
}
|
||||
|
||||
#pragma mark - UIView
|
||||
|
||||
- (void)setNeedsDisplay {
|
||||
[super setNeedsDisplay];
|
||||
_isDirty = YES;
|
||||
}
|
||||
|
||||
- (void)setNeedsDisplayInRect:(CGRect)rect {
|
||||
[super setNeedsDisplayInRect:rect];
|
||||
_isDirty = YES;
|
||||
}
|
||||
|
||||
- (void)layoutSubviews {
|
||||
[super layoutSubviews];
|
||||
_glkView.frame = self.bounds;
|
||||
}
|
||||
|
||||
#pragma mark - GLKViewDelegate
|
||||
|
||||
// This method is called when the GLKView's content is dirty and needs to be
|
||||
// redrawn. This occurs on main thread.
|
||||
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect {
|
||||
// The renderer will draw the frame to the framebuffer corresponding to the
|
||||
// one used by |view|.
|
||||
[_glRenderer drawFrame:self.videoFrame];
|
||||
}
|
||||
|
||||
#pragma mark - RTCVideoRenderer
|
||||
|
||||
// These methods may be called on non-main thread.
|
||||
- (void)setSize:(CGSize)size {
|
||||
__weak RTCEAGLVideoView *weakSelf = self;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
RTCEAGLVideoView *strongSelf = weakSelf;
|
||||
[strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)renderFrame:(RTCVideoFrame *)frame {
|
||||
self.videoFrame = frame;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (void)displayLinkTimerDidFire {
|
||||
// Don't render unless video frame have changed or the view content
|
||||
// has explicitly been marked dirty.
|
||||
if (!_isDirty && _glRenderer.lastDrawnFrame == self.videoFrame) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Always reset isDirty at this point, even if -[GLKView display]
|
||||
// won't be called in the case the drawable size is empty.
|
||||
_isDirty = NO;
|
||||
|
||||
// Only call -[GLKView display] if the drawable size is
|
||||
// non-empty. Calling display will make the GLKView setup its
|
||||
// render buffer if necessary, but that will fail with error
|
||||
// GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT if size is empty.
|
||||
if (self.bounds.size.width > 0 && self.bounds.size.height > 0) {
|
||||
[_glkView display];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setupGL {
|
||||
self.videoFrame = nil;
|
||||
[_glRenderer setupGL];
|
||||
_timer.isPaused = NO;
|
||||
}
|
||||
|
||||
- (void)teardownGL {
|
||||
self.videoFrame = nil;
|
||||
_timer.isPaused = YES;
|
||||
[_glkView deleteDrawable];
|
||||
[_glRenderer teardownGL];
|
||||
}
|
||||
|
||||
- (void)didBecomeActive {
|
||||
[self setupGL];
|
||||
}
|
||||
|
||||
- (void)willResignActive {
|
||||
[self teardownGL];
|
||||
}
|
||||
|
||||
@end
|
||||
34
webrtc/api/objc/RTCNSGLVideoView.h
Normal file
34
webrtc/api/objc/RTCNSGLVideoView.h
Normal file
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
#error "This file targets OSX."
|
||||
#endif
|
||||
|
||||
#import <AppKit/NSOpenGLView.h>
|
||||
|
||||
#import "RTCVideoRenderer.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@class RTCNSGLVideoView;
|
||||
@protocol RTCNSGLVideoViewDelegate
|
||||
|
||||
- (void)videoView:(RTCNSGLVideoView *)videoView didChangeVideoSize:(CGSize)size;
|
||||
|
||||
@end
|
||||
|
||||
@interface RTCNSGLVideoView : NSOpenGLView <RTCVideoRenderer>
|
||||
|
||||
@property(nonatomic, weak) id<RTCNSGLVideoViewDelegate> delegate;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
141
webrtc/api/objc/RTCNSGLVideoView.m
Normal file
141
webrtc/api/objc/RTCNSGLVideoView.m
Normal file
@ -0,0 +1,141 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCNSGLVideoView.h"
|
||||
|
||||
#import <CoreVideo/CVDisplayLink.h>
|
||||
#import <OpenGL/gl3.h>
|
||||
#import "RTCVideoFrame.h"
|
||||
#import "RTCOpenGLVideoRenderer.h"
|
||||
|
||||
@interface RTCNSGLVideoView ()
|
||||
// |videoFrame| is set when we receive a frame from a worker thread and is read
|
||||
// from the display link callback so atomicity is required.
|
||||
@property(atomic, strong) RTCVideoFrame *videoFrame;
|
||||
@property(atomic, strong) RTCOpenGLVideoRenderer *glRenderer;
|
||||
- (void)drawFrame;
|
||||
@end
|
||||
|
||||
static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
|
||||
const CVTimeStamp *now,
|
||||
const CVTimeStamp *outputTime,
|
||||
CVOptionFlags flagsIn,
|
||||
CVOptionFlags *flagsOut,
|
||||
void *displayLinkContext) {
|
||||
RTCNSGLVideoView *view = (__bridge RTCNSGLVideoView *)displayLinkContext;
|
||||
[view drawFrame];
|
||||
return kCVReturnSuccess;
|
||||
}
|
||||
|
||||
@implementation RTCNSGLVideoView {
|
||||
CVDisplayLinkRef _displayLink;
|
||||
}
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
@synthesize videoFrame = _videoFrame;
|
||||
@synthesize glRenderer = _glRenderer;
|
||||
|
||||
- (void)dealloc {
|
||||
[self teardownDisplayLink];
|
||||
}
|
||||
|
||||
- (void)drawRect:(NSRect)rect {
|
||||
[self drawFrame];
|
||||
}
|
||||
|
||||
- (void)reshape {
|
||||
[super reshape];
|
||||
NSRect frame = [self frame];
|
||||
CGLLockContext([[self openGLContext] CGLContextObj]);
|
||||
glViewport(0, 0, frame.size.width, frame.size.height);
|
||||
CGLUnlockContext([[self openGLContext] CGLContextObj]);
|
||||
}
|
||||
|
||||
- (void)lockFocus {
|
||||
NSOpenGLContext *context = [self openGLContext];
|
||||
[super lockFocus];
|
||||
if ([context view] != self) {
|
||||
[context setView:self];
|
||||
}
|
||||
[context makeCurrentContext];
|
||||
}
|
||||
|
||||
- (void)prepareOpenGL {
|
||||
[super prepareOpenGL];
|
||||
if (!self.glRenderer) {
|
||||
self.glRenderer =
|
||||
[[RTCOpenGLVideoRenderer alloc] initWithContext:[self openGLContext]];
|
||||
}
|
||||
[self.glRenderer setupGL];
|
||||
[self setupDisplayLink];
|
||||
}
|
||||
|
||||
- (void)clearGLContext {
|
||||
[self.glRenderer teardownGL];
|
||||
self.glRenderer = nil;
|
||||
[super clearGLContext];
|
||||
}
|
||||
|
||||
#pragma mark - RTCVideoRenderer
|
||||
|
||||
// These methods may be called on non-main thread.
|
||||
- (void)setSize:(CGSize)size {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
[self.delegate videoView:self didChangeVideoSize:size];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)renderFrame:(RTCVideoFrame *)frame {
|
||||
self.videoFrame = frame;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (void)drawFrame {
|
||||
RTCVideoFrame *videoFrame = self.videoFrame;
|
||||
if (self.glRenderer.lastDrawnFrame != videoFrame) {
|
||||
// This method may be called from CVDisplayLink callback which isn't on the
|
||||
// main thread so we have to lock the GL context before drawing.
|
||||
CGLLockContext([[self openGLContext] CGLContextObj]);
|
||||
[self.glRenderer drawFrame:videoFrame];
|
||||
CGLUnlockContext([[self openGLContext] CGLContextObj]);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setupDisplayLink {
|
||||
if (_displayLink) {
|
||||
return;
|
||||
}
|
||||
// Synchronize buffer swaps with vertical refresh rate.
|
||||
GLint swapInt = 1;
|
||||
[[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
|
||||
|
||||
// Create display link.
|
||||
CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
|
||||
CVDisplayLinkSetOutputCallback(_displayLink,
|
||||
&OnDisplayLinkFired,
|
||||
(__bridge void *)self);
|
||||
// Set the display link for the current renderer.
|
||||
CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
|
||||
CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
|
||||
CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
|
||||
_displayLink, cglContext, cglPixelFormat);
|
||||
CVDisplayLinkStart(_displayLink);
|
||||
}
|
||||
|
||||
- (void)teardownDisplayLink {
|
||||
if (!_displayLink) {
|
||||
return;
|
||||
}
|
||||
CVDisplayLinkRelease(_displayLink);
|
||||
_displayLink = NULL;
|
||||
}
|
||||
|
||||
@end
|
||||
58
webrtc/api/objc/RTCOpenGLVideoRenderer.h
Normal file
58
webrtc/api/objc/RTCOpenGLVideoRenderer.h
Normal file
@ -0,0 +1,58 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#if TARGET_OS_IPHONE
|
||||
#import <GLKit/GLKit.h>
|
||||
#else
|
||||
#import <AppKit/NSOpenGL.h>
|
||||
#endif
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@class RTCVideoFrame;
|
||||
|
||||
// RTCOpenGLVideoRenderer issues appropriate OpenGL commands to draw a frame to
|
||||
// the currently bound framebuffer. Supports OpenGL 3.2 and OpenGLES 2.0. OpenGL
|
||||
// framebuffer creation and management should be handled elsewhere using the
|
||||
// same context used to initialize this class.
|
||||
@interface RTCOpenGLVideoRenderer : NSObject
|
||||
|
||||
// The last successfully drawn frame. Used to avoid drawing frames unnecessarily
|
||||
// hence saving battery life by reducing load.
|
||||
@property(nonatomic, readonly) RTCVideoFrame *lastDrawnFrame;
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
- (instancetype)initWithContext:(EAGLContext *)context
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
#else
|
||||
- (instancetype)initWithContext:(NSOpenGLContext *)context
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
#endif
|
||||
|
||||
// Draws |frame| onto the currently bound OpenGL framebuffer. |setupGL| must be
|
||||
// called before this function will succeed.
|
||||
- (BOOL)drawFrame:(RTCVideoFrame *)frame;
|
||||
|
||||
// The following methods are used to manage OpenGL resources. On iOS
|
||||
// applications should release resources when placed in background for use in
|
||||
// the foreground application. In fact, attempting to call OpenGLES commands
|
||||
// while in background will result in application termination.
|
||||
|
||||
// Sets up the OpenGL state needed for rendering.
|
||||
- (void)setupGL;
|
||||
// Tears down the OpenGL state created by |setupGL|.
|
||||
- (void)teardownGL;
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
485
webrtc/api/objc/RTCOpenGLVideoRenderer.mm
Normal file
485
webrtc/api/objc/RTCOpenGLVideoRenderer.mm
Normal file
@ -0,0 +1,485 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "RTCOpenGLVideoRenderer.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include "webrtc/base/scoped_ptr.h"
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
#import <OpenGLES/ES3/gl.h>
|
||||
#else
|
||||
#import <OpenGL/gl3.h>
|
||||
#endif
|
||||
|
||||
#import "RTCVideoFrame.h"
|
||||
|
||||
// TODO(tkchin): check and log openGL errors. Methods here return BOOLs in
|
||||
// anticipation of that happening in the future.
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
#define RTC_PIXEL_FORMAT GL_LUMINANCE
|
||||
#define SHADER_VERSION
|
||||
#define VERTEX_SHADER_IN "attribute"
|
||||
#define VERTEX_SHADER_OUT "varying"
|
||||
#define FRAGMENT_SHADER_IN "varying"
|
||||
#define FRAGMENT_SHADER_OUT
|
||||
#define FRAGMENT_SHADER_COLOR "gl_FragColor"
|
||||
#define FRAGMENT_SHADER_TEXTURE "texture2D"
|
||||
#else
|
||||
#define RTC_PIXEL_FORMAT GL_RED
|
||||
#define SHADER_VERSION "#version 150\n"
|
||||
#define VERTEX_SHADER_IN "in"
|
||||
#define VERTEX_SHADER_OUT "out"
|
||||
#define FRAGMENT_SHADER_IN "in"
|
||||
#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n"
|
||||
#define FRAGMENT_SHADER_COLOR "fragColor"
|
||||
#define FRAGMENT_SHADER_TEXTURE "texture"
|
||||
#endif
|
||||
|
||||
// Vertex shader doesn't do anything except pass coordinates through.
|
||||
static const char kVertexShaderSource[] =
|
||||
SHADER_VERSION
|
||||
VERTEX_SHADER_IN " vec2 position;\n"
|
||||
VERTEX_SHADER_IN " vec2 texcoord;\n"
|
||||
VERTEX_SHADER_OUT " vec2 v_texcoord;\n"
|
||||
"void main() {\n"
|
||||
" gl_Position = vec4(position.x, position.y, 0.0, 1.0);\n"
|
||||
" v_texcoord = texcoord;\n"
|
||||
"}\n";
|
||||
|
||||
// Fragment shader converts YUV values from input textures into a final RGB
|
||||
// pixel. The conversion formula is from http://www.fourcc.org/fccyvrgb.php.
|
||||
static const char kFragmentShaderSource[] =
|
||||
SHADER_VERSION
|
||||
"precision highp float;"
|
||||
FRAGMENT_SHADER_IN " vec2 v_texcoord;\n"
|
||||
"uniform lowp sampler2D s_textureY;\n"
|
||||
"uniform lowp sampler2D s_textureU;\n"
|
||||
"uniform lowp sampler2D s_textureV;\n"
|
||||
FRAGMENT_SHADER_OUT
|
||||
"void main() {\n"
|
||||
" float y, u, v, r, g, b;\n"
|
||||
" y = " FRAGMENT_SHADER_TEXTURE "(s_textureY, v_texcoord).r;\n"
|
||||
" u = " FRAGMENT_SHADER_TEXTURE "(s_textureU, v_texcoord).r;\n"
|
||||
" v = " FRAGMENT_SHADER_TEXTURE "(s_textureV, v_texcoord).r;\n"
|
||||
" u = u - 0.5;\n"
|
||||
" v = v - 0.5;\n"
|
||||
" r = y + 1.403 * v;\n"
|
||||
" g = y - 0.344 * u - 0.714 * v;\n"
|
||||
" b = y + 1.770 * u;\n"
|
||||
" " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
|
||||
" }\n";
|
||||
|
||||
// Compiles a shader of the given |type| with GLSL source |source| and returns
|
||||
// the shader handle or 0 on error.
|
||||
GLuint CreateShader(GLenum type, const GLchar *source) {
|
||||
GLuint shader = glCreateShader(type);
|
||||
if (!shader) {
|
||||
return 0;
|
||||
}
|
||||
glShaderSource(shader, 1, &source, NULL);
|
||||
glCompileShader(shader);
|
||||
GLint compileStatus = GL_FALSE;
|
||||
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
|
||||
if (compileStatus == GL_FALSE) {
|
||||
glDeleteShader(shader);
|
||||
shader = 0;
|
||||
}
|
||||
return shader;
|
||||
}
|
||||
|
||||
// Links a shader program with the given vertex and fragment shaders and
|
||||
// returns the program handle or 0 on error.
|
||||
GLuint CreateProgram(GLuint vertexShader, GLuint fragmentShader) {
|
||||
if (vertexShader == 0 || fragmentShader == 0) {
|
||||
return 0;
|
||||
}
|
||||
GLuint program = glCreateProgram();
|
||||
if (!program) {
|
||||
return 0;
|
||||
}
|
||||
glAttachShader(program, vertexShader);
|
||||
glAttachShader(program, fragmentShader);
|
||||
glLinkProgram(program);
|
||||
GLint linkStatus = GL_FALSE;
|
||||
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
|
||||
if (linkStatus == GL_FALSE) {
|
||||
glDeleteProgram(program);
|
||||
program = 0;
|
||||
}
|
||||
return program;
|
||||
}
|
||||
|
||||
// When modelview and projection matrices are identity (default) the world is
|
||||
// contained in the square around origin with unit size 2. Drawing to these
|
||||
// coordinates is equivalent to drawing to the entire screen. The texture is
|
||||
// stretched over that square using texture coordinates (u, v) that range
|
||||
// from (0, 0) to (1, 1) inclusive. Texture coordinates are flipped vertically
|
||||
// here because the incoming frame has origin in upper left hand corner but
|
||||
// OpenGL expects origin in bottom left corner.
|
||||
const GLfloat gVertices[] = {
|
||||
// X, Y, U, V.
|
||||
-1, -1, 0, 1, // Bottom left.
|
||||
1, -1, 1, 1, // Bottom right.
|
||||
1, 1, 1, 0, // Top right.
|
||||
-1, 1, 0, 0, // Top left.
|
||||
};
|
||||
|
||||
// |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets
|
||||
// of 3 textures are used here, one for each of the Y, U and V planes. Having
|
||||
// two sets alleviates CPU blockage in the event that the GPU is asked to render
|
||||
// to a texture that is already in use.
|
||||
static const GLsizei kNumTextureSets = 2;
|
||||
static const GLsizei kNumTextures = 3 * kNumTextureSets;
|
||||
|
||||
@implementation RTCOpenGLVideoRenderer {
|
||||
#if TARGET_OS_IPHONE
|
||||
EAGLContext *_context;
|
||||
#else
|
||||
NSOpenGLContext *_context;
|
||||
#endif
|
||||
BOOL _isInitialized;
|
||||
NSUInteger _currentTextureSet;
|
||||
// Handles for OpenGL constructs.
|
||||
GLuint _textures[kNumTextures];
|
||||
GLuint _program;
|
||||
#if !TARGET_OS_IPHONE
|
||||
GLuint _vertexArray;
|
||||
#endif
|
||||
GLuint _vertexBuffer;
|
||||
GLint _position;
|
||||
GLint _texcoord;
|
||||
GLint _ySampler;
|
||||
GLint _uSampler;
|
||||
GLint _vSampler;
|
||||
// Used to create a non-padded plane for GPU upload when we receive padded
|
||||
// frames.
|
||||
rtc::scoped_ptr<uint8_t[]> _planeBuffer;
|
||||
}
|
||||
|
||||
@synthesize lastDrawnFrame = _lastDrawnFrame;
|
||||
|
||||
+ (void)initialize {
|
||||
// Disable dithering for performance.
|
||||
glDisable(GL_DITHER);
|
||||
}
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
- (instancetype)initWithContext:(EAGLContext *)context {
|
||||
#else
|
||||
- (instancetype)initWithContext:(NSOpenGLContext *)context {
|
||||
#endif
|
||||
NSAssert(context != nil, @"context cannot be nil");
|
||||
if (self = [super init]) {
|
||||
_context = context;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (BOOL)drawFrame:(RTCVideoFrame *)frame {
|
||||
if (!_isInitialized) {
|
||||
return NO;
|
||||
}
|
||||
if (_lastDrawnFrame == frame) {
|
||||
return NO;
|
||||
}
|
||||
[self ensureGLContext];
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
if (frame) {
|
||||
if (![self updateTextureSizesForFrame:frame] ||
|
||||
![self updateTextureDataForFrame:frame]) {
|
||||
return NO;
|
||||
}
|
||||
#if !TARGET_OS_IPHONE
|
||||
glBindVertexArray(_vertexArray);
|
||||
#endif
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
||||
}
|
||||
#if !TARGET_OS_IPHONE
|
||||
[_context flushBuffer];
|
||||
#endif
|
||||
_lastDrawnFrame = frame;
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)setupGL {
|
||||
if (_isInitialized) {
|
||||
return;
|
||||
}
|
||||
[self ensureGLContext];
|
||||
if (![self setupProgram]) {
|
||||
return;
|
||||
}
|
||||
if (![self setupTextures]) {
|
||||
return;
|
||||
}
|
||||
if (![self setupVertices]) {
|
||||
return;
|
||||
}
|
||||
glUseProgram(_program);
|
||||
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
|
||||
_isInitialized = YES;
|
||||
}
|
||||
|
||||
- (void)teardownGL {
|
||||
if (!_isInitialized) {
|
||||
return;
|
||||
}
|
||||
[self ensureGLContext];
|
||||
glDeleteProgram(_program);
|
||||
_program = 0;
|
||||
glDeleteTextures(kNumTextures, _textures);
|
||||
glDeleteBuffers(1, &_vertexBuffer);
|
||||
_vertexBuffer = 0;
|
||||
#if !TARGET_OS_IPHONE
|
||||
glDeleteVertexArrays(1, &_vertexArray);
|
||||
#endif
|
||||
_isInitialized = NO;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (void)ensureGLContext {
|
||||
NSAssert(_context, @"context shouldn't be nil");
|
||||
#if TARGET_OS_IPHONE
|
||||
if ([EAGLContext currentContext] != _context) {
|
||||
[EAGLContext setCurrentContext:_context];
|
||||
}
|
||||
#else
|
||||
if ([NSOpenGLContext currentContext] != _context) {
|
||||
[_context makeCurrentContext];
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
- (BOOL)setupProgram {
|
||||
NSAssert(!_program, @"program already set up");
|
||||
GLuint vertexShader = CreateShader(GL_VERTEX_SHADER, kVertexShaderSource);
|
||||
NSAssert(vertexShader, @"failed to create vertex shader");
|
||||
GLuint fragmentShader =
|
||||
CreateShader(GL_FRAGMENT_SHADER, kFragmentShaderSource);
|
||||
NSAssert(fragmentShader, @"failed to create fragment shader");
|
||||
_program = CreateProgram(vertexShader, fragmentShader);
|
||||
// Shaders are created only to generate program.
|
||||
if (vertexShader) {
|
||||
glDeleteShader(vertexShader);
|
||||
}
|
||||
if (fragmentShader) {
|
||||
glDeleteShader(fragmentShader);
|
||||
}
|
||||
if (!_program) {
|
||||
return NO;
|
||||
}
|
||||
_position = glGetAttribLocation(_program, "position");
|
||||
_texcoord = glGetAttribLocation(_program, "texcoord");
|
||||
_ySampler = glGetUniformLocation(_program, "s_textureY");
|
||||
_uSampler = glGetUniformLocation(_program, "s_textureU");
|
||||
_vSampler = glGetUniformLocation(_program, "s_textureV");
|
||||
if (_position < 0 || _texcoord < 0 || _ySampler < 0 || _uSampler < 0 ||
|
||||
_vSampler < 0) {
|
||||
return NO;
|
||||
}
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)setupTextures {
|
||||
glGenTextures(kNumTextures, _textures);
|
||||
// Set parameters for each of the textures we created.
|
||||
for (GLsizei i = 0; i < kNumTextures; i++) {
|
||||
glActiveTexture(GL_TEXTURE0 + i);
|
||||
glBindTexture(GL_TEXTURE_2D, _textures[i]);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)updateTextureSizesForFrame:(RTCVideoFrame *)frame {
|
||||
if (frame.height == _lastDrawnFrame.height &&
|
||||
frame.width == _lastDrawnFrame.width &&
|
||||
frame.chromaWidth == _lastDrawnFrame.chromaWidth &&
|
||||
frame.chromaHeight == _lastDrawnFrame.chromaHeight) {
|
||||
return YES;
|
||||
}
|
||||
GLsizei lumaWidth = frame.width;
|
||||
GLsizei lumaHeight = frame.height;
|
||||
GLsizei chromaWidth = frame.chromaWidth;
|
||||
GLsizei chromaHeight = frame.chromaHeight;
|
||||
for (GLint i = 0; i < kNumTextureSets; i++) {
|
||||
glActiveTexture(GL_TEXTURE0 + i * 3);
|
||||
glTexImage2D(GL_TEXTURE_2D,
|
||||
0,
|
||||
RTC_PIXEL_FORMAT,
|
||||
lumaWidth,
|
||||
lumaHeight,
|
||||
0,
|
||||
RTC_PIXEL_FORMAT,
|
||||
GL_UNSIGNED_BYTE,
|
||||
0);
|
||||
glActiveTexture(GL_TEXTURE0 + i * 3 + 1);
|
||||
glTexImage2D(GL_TEXTURE_2D,
|
||||
0,
|
||||
RTC_PIXEL_FORMAT,
|
||||
chromaWidth,
|
||||
chromaHeight,
|
||||
0,
|
||||
RTC_PIXEL_FORMAT,
|
||||
GL_UNSIGNED_BYTE,
|
||||
0);
|
||||
glActiveTexture(GL_TEXTURE0 + i * 3 + 2);
|
||||
glTexImage2D(GL_TEXTURE_2D,
|
||||
0,
|
||||
RTC_PIXEL_FORMAT,
|
||||
chromaWidth,
|
||||
chromaHeight,
|
||||
0,
|
||||
RTC_PIXEL_FORMAT,
|
||||
GL_UNSIGNED_BYTE,
|
||||
0);
|
||||
}
|
||||
if ((NSUInteger)frame.yPitch != frame.width ||
|
||||
(NSUInteger)frame.uPitch != frame.chromaWidth ||
|
||||
(NSUInteger)frame.vPitch != frame.chromaWidth) {
|
||||
_planeBuffer.reset(new uint8_t[frame.width * frame.height]);
|
||||
} else {
|
||||
_planeBuffer.reset();
|
||||
}
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)uploadPlane:(const uint8_t *)plane
|
||||
sampler:(GLint)sampler
|
||||
offset:(NSUInteger)offset
|
||||
width:(size_t)width
|
||||
height:(size_t)height
|
||||
stride:(int32_t)stride {
|
||||
glActiveTexture(GL_TEXTURE0 + offset);
|
||||
// When setting texture sampler uniforms, the texture index is used not
|
||||
// the texture handle.
|
||||
glUniform1i(sampler, offset);
|
||||
#if TARGET_OS_IPHONE
|
||||
BOOL hasUnpackRowLength = _context.API == kEAGLRenderingAPIOpenGLES3;
|
||||
#else
|
||||
BOOL hasUnpackRowLength = YES;
|
||||
#endif
|
||||
const uint8_t *uploadPlane = plane;
|
||||
if ((size_t)stride != width) {
|
||||
if (hasUnpackRowLength) {
|
||||
// GLES3 allows us to specify stride.
|
||||
glPixelStorei(GL_UNPACK_ROW_LENGTH, stride);
|
||||
glTexImage2D(GL_TEXTURE_2D,
|
||||
0,
|
||||
RTC_PIXEL_FORMAT,
|
||||
width,
|
||||
height,
|
||||
0,
|
||||
RTC_PIXEL_FORMAT,
|
||||
GL_UNSIGNED_BYTE,
|
||||
uploadPlane);
|
||||
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
|
||||
return;
|
||||
} else {
|
||||
// Make an unpadded copy and upload that instead. Quick profiling showed
|
||||
// that this is faster than uploading row by row using glTexSubImage2D.
|
||||
uint8_t *unpaddedPlane = _planeBuffer.get();
|
||||
for (size_t y = 0; y < height; ++y) {
|
||||
memcpy(unpaddedPlane + y * width, plane + y * stride, width);
|
||||
}
|
||||
uploadPlane = unpaddedPlane;
|
||||
}
|
||||
}
|
||||
glTexImage2D(GL_TEXTURE_2D,
|
||||
0,
|
||||
RTC_PIXEL_FORMAT,
|
||||
width,
|
||||
height,
|
||||
0,
|
||||
RTC_PIXEL_FORMAT,
|
||||
GL_UNSIGNED_BYTE,
|
||||
uploadPlane);
|
||||
}
|
||||
|
||||
- (BOOL)updateTextureDataForFrame:(RTCVideoFrame *)frame {
|
||||
NSUInteger textureOffset = _currentTextureSet * 3;
|
||||
NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset");
|
||||
|
||||
[self uploadPlane:frame.yPlane
|
||||
sampler:_ySampler
|
||||
offset:textureOffset
|
||||
width:frame.width
|
||||
height:frame.height
|
||||
stride:frame.yPitch];
|
||||
|
||||
[self uploadPlane:frame.uPlane
|
||||
sampler:_uSampler
|
||||
offset:textureOffset + 1
|
||||
width:frame.chromaWidth
|
||||
height:frame.chromaHeight
|
||||
stride:frame.uPitch];
|
||||
|
||||
[self uploadPlane:frame.vPlane
|
||||
sampler:_vSampler
|
||||
offset:textureOffset + 2
|
||||
width:frame.chromaWidth
|
||||
height:frame.chromaHeight
|
||||
stride:frame.vPitch];
|
||||
|
||||
_currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets;
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (BOOL)setupVertices {
|
||||
#if !TARGET_OS_IPHONE
|
||||
NSAssert(!_vertexArray, @"vertex array already set up");
|
||||
glGenVertexArrays(1, &_vertexArray);
|
||||
if (!_vertexArray) {
|
||||
return NO;
|
||||
}
|
||||
glBindVertexArray(_vertexArray);
|
||||
#endif
|
||||
NSAssert(!_vertexBuffer, @"vertex buffer already set up");
|
||||
glGenBuffers(1, &_vertexBuffer);
|
||||
if (!_vertexBuffer) {
|
||||
#if !TARGET_OS_IPHONE
|
||||
glDeleteVertexArrays(1, &_vertexArray);
|
||||
_vertexArray = 0;
|
||||
#endif
|
||||
return NO;
|
||||
}
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer);
|
||||
glBufferData(GL_ARRAY_BUFFER, sizeof(gVertices), gVertices, GL_DYNAMIC_DRAW);
|
||||
|
||||
// Read position attribute from |gVertices| with size of 2 and stride of 4
|
||||
// beginning at the start of the array. The last argument indicates offset
|
||||
// of data within |gVertices| as supplied to the vertex buffer.
|
||||
glVertexAttribPointer(
|
||||
_position, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void *)0);
|
||||
glEnableVertexAttribArray(_position);
|
||||
|
||||
// Read texcoord attribute from |gVertices| with size of 2 and stride of 4
|
||||
// beginning at the first texcoord in the array. The last argument indicates
|
||||
// offset of data within |gVertices| as supplied to the vertex buffer.
|
||||
glVertexAttribPointer(_texcoord,
|
||||
2,
|
||||
GL_FLOAT,
|
||||
GL_FALSE,
|
||||
4 * sizeof(GLfloat),
|
||||
(void *)(2 * sizeof(GLfloat)));
|
||||
glEnableVertexAttribArray(_texcoord);
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
@end
|
||||
30
webrtc/api/objc/RTCVideoRenderer.h
Normal file
30
webrtc/api/objc/RTCVideoRenderer.h
Normal file
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#if TARGET_OS_IPHONE
|
||||
#import <UIKit/UIKit.h>
|
||||
#endif
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@class RTCVideoFrame;
|
||||
|
||||
@protocol RTCVideoRenderer <NSObject>
|
||||
|
||||
/** The size of the frame. */
|
||||
- (void)setSize:(CGSize)size;
|
||||
|
||||
/** The frame to be displayed. */
|
||||
- (void)renderFrame:(RTCVideoFrame *)frame;
|
||||
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
Loading…
x
Reference in New Issue
Block a user