diff --git a/talk/media/webrtc/webrtcvideoengine.cc b/talk/media/webrtc/webrtcvideoengine.cc index 7afe5de6b..ef8f703eb 100644 --- a/talk/media/webrtc/webrtcvideoengine.cc +++ b/talk/media/webrtc/webrtcvideoengine.cc @@ -1178,8 +1178,13 @@ bool WebRtcVideoEngine::CanSendCodec(const VideoCodec& requested, out->preference = requested.preference; out->params = requested.params; out->framerate = rtc::_min(requested.framerate, local_max->framerate); +#ifdef ECOVATE_NO_SCALING + out->width = requested.width; + out->height = requested.height; +#else out->width = 0; - out->height = 0; + out->height = 0; +#endif out->params = requested.params; out->feedback_params = requested.feedback_params; @@ -1570,7 +1575,11 @@ WebRtcVideoMediaChannel::WebRtcVideoMediaChannel( send_fec_type_(-1), sending_(false), ratio_w_(0), - ratio_h_(0) { + ratio_h_(0), + listener_(ecovate::listener) { + if (listener_) { + listener_->incrementReferenceCount(); + } engine->RegisterChannel(this); } @@ -1604,6 +1613,10 @@ WebRtcVideoMediaChannel::~WebRtcVideoMediaChannel() { if (worker_thread()) { worker_thread()->Clear(this); } + + if (listener_) { + listener_->decrementReferenceCount(); + } } bool WebRtcVideoMediaChannel::SetRecvCodecs( @@ -2148,6 +2161,9 @@ bool WebRtcVideoMediaChannel::StartSend() { success = false; } } + if (success && listener_) { + listener_->onSendMedia(); + } return success; } diff --git a/talk/media/webrtc/webrtcvideoengine.h b/talk/media/webrtc/webrtcvideoengine.h index f467b9757..f8b4432dd 100644 --- a/talk/media/webrtc/webrtcvideoengine.h +++ b/talk/media/webrtc/webrtcvideoengine.h @@ -45,6 +45,24 @@ #error "Bogus include." #endif +namespace ecovate { + +struct Listener { + virtual ~Listener() { } + + virtual void incrementReferenceCount() = 0; + + virtual void decrementReferenceCount() = 0; + + virtual void onSendQueueEmpty() = 0; + + virtual void onSendMedia() = 0; +}; + +extern Listener* listener; + +} // namespace ecovate + namespace webrtc { class VideoCaptureModule; class VideoDecoder; @@ -460,6 +478,8 @@ class WebRtcVideoMediaChannel : public rtc::MessageHandler, // aspect ratio int ratio_w_; int ratio_h_; + + ecovate::Listener* listener_; }; } // namespace cricket diff --git a/talk/session/media/srtpfilter.h b/talk/session/media/srtpfilter.h index 51600236a..dbdc08e8b 100644 --- a/talk/session/media/srtpfilter.h +++ b/talk/session/media/srtpfilter.h @@ -227,12 +227,16 @@ class SrtpSession { sigslot::repeater3 SignalSrtpError; + // This was previously private, but because it's not thread-safe, we + // need to make it public and call it from application code prior to + // opening several WebRTC peer connections in parallel: + static bool Init(); + private: bool SetKey(int type, const std::string& cs, const uint8* key, int len); // Returns send stream current packet index from srtp db. bool GetSendStreamPacketIndex(void* data, int in_len, int64* index); - static bool Init(); void HandleEvent(const srtp_event_data_t* ev); static void HandleEventThunk(srtp_event_data_t* ev); diff --git a/webrtc/modules/pacing/include/paced_sender.h b/webrtc/modules/pacing/include/paced_sender.h index ddd8e53b7..86b3ba26c 100644 --- a/webrtc/modules/pacing/include/paced_sender.h +++ b/webrtc/modules/pacing/include/paced_sender.h @@ -19,6 +19,24 @@ #include "webrtc/system_wrappers/interface/thread_annotations.h" #include "webrtc/typedefs.h" +namespace ecovate { + +struct Listener { + virtual ~Listener() { } + + virtual void incrementReferenceCount() = 0; + + virtual void decrementReferenceCount() = 0; + + virtual void onSendQueueEmpty() = 0; + + virtual void onSendMedia() = 0; +}; + +extern Listener* listener; + +} // namespace ecovate + namespace webrtc { class Clock; class CriticalSectionWrapper; @@ -157,6 +175,8 @@ class PacedSender : public Module { GUARDED_BY(critsect_); scoped_ptr low_priority_packets_ GUARDED_BY(critsect_); + + ecovate::Listener* listener_; }; } // namespace webrtc #endif // WEBRTC_MODULES_PACED_SENDER_H_ diff --git a/webrtc/modules/pacing/paced_sender.cc b/webrtc/modules/pacing/paced_sender.cc index 6204a9a06..e6df46bda 100644 --- a/webrtc/modules/pacing/paced_sender.cc +++ b/webrtc/modules/pacing/paced_sender.cc @@ -20,6 +20,12 @@ #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/trace_event.h" +namespace ecovate { + +Listener* listener = 0; + +} // namespace ecovate + namespace { // Time limit in milliseconds between packet bursts. const int kMinPacketLimitMs = 5; @@ -146,11 +152,21 @@ PacedSender::PacedSender(Clock* clock, capture_time_ms_last_sent_(0), high_priority_packets_(new paced_sender::PacketList), normal_priority_packets_(new paced_sender::PacketList), - low_priority_packets_(new paced_sender::PacketList) { + low_priority_packets_(new paced_sender::PacketList), + listener_(ecovate::listener) +{ + if (listener_) { + listener_->incrementReferenceCount(); + } + UpdateBytesPerInterval(kMinPacketLimitMs); } -PacedSender::~PacedSender() {} +PacedSender::~PacedSender() { + if (listener_) { + listener_->decrementReferenceCount(); + } +} void PacedSender::Pause() { CriticalSectionScoped cs(critsect_.get()); @@ -372,6 +388,9 @@ bool PacedSender::ShouldSendNextPacket(paced_sender::PacketList** packet_list) { *packet_list = low_priority_packets_.get(); return true; } + if (listener_) { + listener_->onSendQueueEmpty(); + } return false; } diff --git a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc index 4901edff3..f7d5b09f3 100644 --- a/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc +++ b/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc @@ -185,6 +185,7 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst, config_->rc_target_bitrate = inst->startBitrate; // in kbit/s temporal_layers_->ConfigureBitrates(inst->startBitrate, inst->maxBitrate, inst->maxFramerate, config_); +#ifndef ECOVATE_USE_VP8_DEFAULTS // setting the time base of the codec config_->g_timebase.num = 1; config_->g_timebase.den = 90000; @@ -252,6 +253,7 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst, } else { config_->kf_mode = VPX_KF_DISABLED; } +#endif // not ECOVATE_USE_VP8_DEFAULTS switch (inst->codecSpecific.VP8.complexity) { case kComplexityHigh: cpu_speed_ = -5; diff --git a/webrtc/modules/video_coding/main/source/receiver.cc b/webrtc/modules/video_coding/main/source/receiver.cc index e179423a7..1dacbcdc3 100644 --- a/webrtc/modules/video_coding/main/source/receiver.cc +++ b/webrtc/modules/video_coding/main/source/receiver.cc @@ -197,6 +197,7 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding( timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms); } } + return frame; } diff --git a/webrtc/modules/video_coding/main/source/video_sender.cc b/webrtc/modules/video_coding/main/source/video_sender.cc index 38ecc5479..f8d1d5d7c 100644 --- a/webrtc/modules/video_coding/main/source/video_sender.cc +++ b/webrtc/modules/video_coding/main/source/video_sender.cc @@ -359,9 +359,12 @@ int32_t VideoSender::AddVideoFrame(const I420VideoFrame& videoFrame, if (_nextFrameTypes[0] == kFrameEmpty) { return VCM_OK; } +#ifndef ECOVATE_NO_FRAME_DROP if (_mediaOpt.DropFrame()) { return VCM_OK; } +#endif + _mediaOpt.UpdateContentData(contentMetrics); int32_t ret = _encoder->Encode(videoFrame, codecSpecificInfo, _nextFrameTypes); diff --git a/webrtc/modules/video_processing/main/source/spatial_resampler.cc b/webrtc/modules/video_processing/main/source/spatial_resampler.cc index fd90c8f76..d12a2aa70 100644 --- a/webrtc/modules/video_processing/main/source/spatial_resampler.cc +++ b/webrtc/modules/video_processing/main/source/spatial_resampler.cc @@ -14,7 +14,11 @@ namespace webrtc { VPMSimpleSpatialResampler::VPMSimpleSpatialResampler() +#ifdef ECOVATE_NO_SCALING + : resampling_mode_(kNoRescaling), +#else : resampling_mode_(kFastRescaling), +#endif target_width_(0), target_height_(0), scaler_() {} diff --git a/webrtc/modules/video_render/video_render_frames.cc b/webrtc/modules/video_render/video_render_frames.cc index d790877e3..60b949f25 100644 --- a/webrtc/modules/video_render/video_render_frames.cc +++ b/webrtc/modules/video_render/video_render_frames.cc @@ -105,7 +105,12 @@ I420VideoFrame* VideoRenderFrames::FrameToRender() { FrameList::iterator iter = incoming_frames_.begin(); while(iter != incoming_frames_.end()) { I420VideoFrame* oldest_frame_in_list = *iter; - if (oldest_frame_in_list->render_time_ms() <= +#ifdef ECOVATE_NO_VIDEO_DELAY + bool delay = false; +#else + bool delay = true; +#endif + if ((! delay) || oldest_frame_in_list->render_time_ms() <= TickTime::MillisecondTimestamp() + render_delay_ms_) { // This is the oldest one so far and it's OK to render. if (render_frame) { diff --git a/webrtc/video_engine/vie_capturer.cc b/webrtc/video_engine/vie_capturer.cc index 231dcfb3b..6bc703676 100644 --- a/webrtc/video_engine/vie_capturer.cc +++ b/webrtc/video_engine/vie_capturer.cc @@ -346,6 +346,9 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id, TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(), "render_time", video_frame.render_time_ms()); +#ifdef ECOVATE_NO_FRAME_DROP + DeliverI420Frame(&video_frame); +#else if (video_frame.native_handle() != NULL) { captured_frame_.reset(video_frame.CloneFrame()); } else { @@ -356,6 +359,7 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id, capture_event_.Set(); overuse_detector_->FrameCaptured(captured_frame_->width(), captured_frame_->height()); +#endif } void ViECapturer::OnCaptureDelayChanged(const int32_t id,