webrtc_m130/webrtc/api/videosource.h

100 lines
3.2 KiB
C
Raw Normal View History

/*
* Copyright 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_API_VIDEOSOURCE_H_
#define WEBRTC_API_VIDEOSOURCE_H_
#include <list>
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/notifier.h"
#include "webrtc/api/videosourceinterface.h"
#include "webrtc/api/videotrackrenderers.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/media/base/videosinkinterface.h"
Move talk/media to webrtc/media I removed the 'libjingle' target in talk/libjingle.gyp and replaced all users of it with base/base.gyp:rtc_base. It seems the jsoncpp and expat dependencies were not used by it's previous references. The files in talk/media/testdata were uploaded to Google Storage and added .sha1 files in resources/media instead of simply moving them. The previously disabled warnings that were inherited from talk/build/common.gypi are now replaced by target-specific disabling of only the failing warnings. Additional disabling was needed since the stricter compilation warnings that applies to code in webrtc/. License headers will be updated in a follow-up CL in order to not break Git history. Other modifications: * Updated the header guards. * Sorted the includes using chromium/src/tools/sort-headers.py except for these files: talk/app/webrtc/peerconnectionendtoend_unittest.cc talk/app/webrtc/java/jni/androidmediadecoder_jni.cc talk/app/webrtc/java/jni/androidmediaencoder_jni.cc webrtc/media/devices/win32devicemanager.cc. * Unused GYP reference to libjingle_tests_additional_deps was removed. * Removed duplicated GYP entries of webrtc/base/testutils.cc webrtc/base/testutils.h The HAVE_WEBRTC_VIDEO and HAVE_WEBRTC_VOICE defines were used by only talk/media, so they were moved to the media.gyp. I also checked that none of EXPAT_RELATIVE_PATH, FEATURE_ENABLE_VOICEMAIL, GTEST_RELATIVE_PATH, JSONCPP_RELATIVE_PATH, LOGGING=1, SRTP_RELATIVE_PATH, FEATURE_ENABLE_SSL, FEATURE_ENABLE_VOICEMAIL, FEATURE_ENABLE_PSTN, HAVE_SCTP, HAVE_SRTP, are used by the talk/media code. For Chromium, the following changes will need to be applied to the roll CL that updates the DEPS for WebRTC and libjingle: https://codereview.chromium.org/1604303002/ BUG=webrtc:5420 NOPRESUBMIT=True TBR=tommi@webrtc.org Review URL: https://codereview.webrtc.org/1587193006 Cr-Commit-Position: refs/heads/master@{#11495}
2016-02-04 23:52:28 -08:00
#include "webrtc/media/base/videocapturer.h"
#include "webrtc/media/base/videocommon.h"
// VideoSource implements VideoSourceInterface. It owns a
// cricket::VideoCapturer and make sure the camera is started at a resolution
// that honors the constraints.
// The state is set depending on the result of starting the capturer.
// If the constraint can't be met or the capturer fails to start, the state
// transition to kEnded, otherwise it transitions to kLive.
namespace cricket {
class ChannelManager;
} // namespace cricket
namespace webrtc {
class MediaConstraintsInterface;
class VideoSource : public Notifier<VideoSourceInterface>,
public sigslot::has_slots<> {
public:
// Creates an instance of VideoSource.
// VideoSource take ownership of |capturer|.
// |constraints| can be NULL and in that case the camera is opened using a
// default resolution.
static rtc::scoped_refptr<VideoSource> Create(
cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer,
const webrtc::MediaConstraintsInterface* constraints,
bool remote);
SourceState state() const override { return state_; }
bool remote() const override { return remote_; }
virtual const cricket::VideoOptions* options() const { return &options_; }
virtual cricket::VideoCapturer* GetVideoCapturer() {
return video_capturer_.get();
}
void Stop() override;
void Restart() override;
// |output| will be served video frames as long as the underlying capturer
// is running video frames.
virtual void AddSink(rtc::VideoSinkInterface<cricket::VideoFrame>* output);
virtual void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* output);
protected:
VideoSource(cricket::ChannelManager* channel_manager,
cricket::VideoCapturer* capturer,
bool remote);
virtual ~VideoSource();
void Initialize(const webrtc::MediaConstraintsInterface* constraints);
private:
void OnStateChange(cricket::VideoCapturer* capturer,
cricket::CaptureState capture_state);
void SetState(SourceState new_state);
cricket::ChannelManager* channel_manager_;
rtc::scoped_ptr<cricket::VideoCapturer> video_capturer_;
rtc::scoped_ptr<cricket::VideoRenderer> frame_input_;
std::list<rtc::VideoSinkInterface<cricket::VideoFrame>*> sinks_;
cricket::VideoFormat format_;
cricket::VideoOptions options_;
SourceState state_;
const bool remote_;
};
} // namespace webrtc
#endif // WEBRTC_API_VIDEOSOURCE_H_