Home | History | Annotate | Download | only in voice_engine
      1 /*
      2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
      3  *
      4  *  Use of this source code is governed by a BSD-style license
      5  *  that can be found in the LICENSE file in the root of the source
      6  *  tree. An additional intellectual property rights grant can be found
      7  *  in the file PATENTS.  All contributing project authors may
      8  *  be found in the AUTHORS file in the root of the source tree.
      9  */
     10 
     11 #include "webrtc/voice_engine/channel.h"
     12 
     13 #include "webrtc/base/timeutils.h"
     14 #include "webrtc/common.h"
     15 #include "webrtc/modules/audio_device/include/audio_device.h"
     16 #include "webrtc/modules/audio_processing/include/audio_processing.h"
     17 #include "webrtc/modules/interface/module_common_types.h"
     18 #include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
     19 #include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
     20 #include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
     21 #include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
     22 #include "webrtc/modules/utility/interface/audio_frame_operations.h"
     23 #include "webrtc/modules/utility/interface/process_thread.h"
     24 #include "webrtc/modules/utility/interface/rtp_dump.h"
     25 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
     26 #include "webrtc/system_wrappers/interface/logging.h"
     27 #include "webrtc/system_wrappers/interface/trace.h"
     28 #include "webrtc/video_engine/include/vie_network.h"
     29 #include "webrtc/voice_engine/include/voe_base.h"
     30 #include "webrtc/voice_engine/include/voe_external_media.h"
     31 #include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
     32 #include "webrtc/voice_engine/output_mixer.h"
     33 #include "webrtc/voice_engine/statistics.h"
     34 #include "webrtc/voice_engine/transmit_mixer.h"
     35 #include "webrtc/voice_engine/utility.h"
     36 
     37 #if defined(_WIN32)
     38 #include <Qos.h>
     39 #endif
     40 
     41 namespace webrtc {
     42 namespace voe {
     43 
     44 // Extend the default RTCP statistics struct with max_jitter, defined as the
     45 // maximum jitter value seen in an RTCP report block.
     46 struct ChannelStatistics : public RtcpStatistics {
     47   ChannelStatistics() : rtcp(), max_jitter(0) {}
     48 
     49   RtcpStatistics rtcp;
     50   uint32_t max_jitter;
     51 };
     52 
     53 // Statistics callback, called at each generation of a new RTCP report block.
     54 class StatisticsProxy : public RtcpStatisticsCallback {
     55  public:
     56   StatisticsProxy(uint32_t ssrc)
     57    : stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
     58      ssrc_(ssrc) {}
     59   virtual ~StatisticsProxy() {}
     60 
     61   virtual void StatisticsUpdated(const RtcpStatistics& statistics,
     62                                  uint32_t ssrc) OVERRIDE {
     63     if (ssrc != ssrc_)
     64       return;
     65 
     66     CriticalSectionScoped cs(stats_lock_.get());
     67     stats_.rtcp = statistics;
     68     if (statistics.jitter > stats_.max_jitter) {
     69       stats_.max_jitter = statistics.jitter;
     70     }
     71   }
     72 
     73   void ResetStatistics() {
     74     CriticalSectionScoped cs(stats_lock_.get());
     75     stats_ = ChannelStatistics();
     76   }
     77 
     78   ChannelStatistics GetStats() {
     79     CriticalSectionScoped cs(stats_lock_.get());
     80     return stats_;
     81   }
     82 
     83  private:
     84   // StatisticsUpdated calls are triggered from threads in the RTP module,
     85   // while GetStats calls can be triggered from the public voice engine API,
     86   // hence synchronization is needed.
     87   scoped_ptr<CriticalSectionWrapper> stats_lock_;
     88   const uint32_t ssrc_;
     89   ChannelStatistics stats_;
     90 };
     91 
     92 class VoEBitrateObserver : public BitrateObserver {
     93  public:
     94   explicit VoEBitrateObserver(Channel* owner)
     95       : owner_(owner) {}
     96   virtual ~VoEBitrateObserver() {}
     97 
     98   // Implements BitrateObserver.
     99   virtual void OnNetworkChanged(const uint32_t bitrate_bps,
    100                                 const uint8_t fraction_lost,
    101                                 const uint32_t rtt) OVERRIDE {
    102     // |fraction_lost| has a scale of 0 - 255.
    103     owner_->OnNetworkChanged(bitrate_bps, fraction_lost, rtt);
    104   }
    105 
    106  private:
    107   Channel* owner_;
    108 };
    109 
    110 int32_t
    111 Channel::SendData(FrameType frameType,
    112                   uint8_t   payloadType,
    113                   uint32_t  timeStamp,
    114                   const uint8_t*  payloadData,
    115                   uint16_t  payloadSize,
    116                   const RTPFragmentationHeader* fragmentation)
    117 {
    118     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    119                  "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
    120                  " payloadSize=%u, fragmentation=0x%x)",
    121                  frameType, payloadType, timeStamp, payloadSize, fragmentation);
    122 
    123     if (_includeAudioLevelIndication)
    124     {
    125         // Store current audio level in the RTP/RTCP module.
    126         // The level will be used in combination with voice-activity state
    127         // (frameType) to add an RTP header extension
    128         _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
    129     }
    130 
    131     // Push data from ACM to RTP/RTCP-module to deliver audio frame for
    132     // packetization.
    133     // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
    134     if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
    135                                         payloadType,
    136                                         timeStamp,
    137                                         // Leaving the time when this frame was
    138                                         // received from the capture device as
    139                                         // undefined for voice for now.
    140                                         -1,
    141                                         payloadData,
    142                                         payloadSize,
    143                                         fragmentation) == -1)
    144     {
    145         _engineStatisticsPtr->SetLastError(
    146             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
    147             "Channel::SendData() failed to send data to RTP/RTCP module");
    148         return -1;
    149     }
    150 
    151     _lastLocalTimeStamp = timeStamp;
    152     _lastPayloadType = payloadType;
    153 
    154     return 0;
    155 }
    156 
    157 int32_t
    158 Channel::InFrameType(int16_t frameType)
    159 {
    160     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
    161                  "Channel::InFrameType(frameType=%d)", frameType);
    162 
    163     CriticalSectionScoped cs(&_callbackCritSect);
    164     // 1 indicates speech
    165     _sendFrameType = (frameType == 1) ? 1 : 0;
    166     return 0;
    167 }
    168 
    169 int32_t
    170 Channel::OnRxVadDetected(int vadDecision)
    171 {
    172     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
    173                  "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
    174 
    175     CriticalSectionScoped cs(&_callbackCritSect);
    176     if (_rxVadObserverPtr)
    177     {
    178         _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
    179     }
    180 
    181     return 0;
    182 }
    183 
    184 int
    185 Channel::SendPacket(int channel, const void *data, int len)
    186 {
    187     channel = VoEChannelId(channel);
    188     assert(channel == _channelId);
    189 
    190     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    191                  "Channel::SendPacket(channel=%d, len=%d)", channel, len);
    192 
    193     CriticalSectionScoped cs(&_callbackCritSect);
    194 
    195     if (_transportPtr == NULL)
    196     {
    197         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
    198                      "Channel::SendPacket() failed to send RTP packet due to"
    199                      " invalid transport object");
    200         return -1;
    201     }
    202 
    203     uint8_t* bufferToSendPtr = (uint8_t*)data;
    204     int32_t bufferLength = len;
    205 
    206     // Dump the RTP packet to a file (if RTP dump is enabled).
    207     if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
    208     {
    209         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
    210                      VoEId(_instanceId,_channelId),
    211                      "Channel::SendPacket() RTP dump to output file failed");
    212     }
    213 
    214     int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
    215                                       bufferLength);
    216     if (n < 0) {
    217       std::string transport_name =
    218           _externalTransport ? "external transport" : "WebRtc sockets";
    219       WEBRTC_TRACE(kTraceError, kTraceVoice,
    220                    VoEId(_instanceId,_channelId),
    221                    "Channel::SendPacket() RTP transmission using %s failed",
    222                    transport_name.c_str());
    223       return -1;
    224     }
    225     return n;
    226 }
    227 
    228 int
    229 Channel::SendRTCPPacket(int channel, const void *data, int len)
    230 {
    231     channel = VoEChannelId(channel);
    232     assert(channel == _channelId);
    233 
    234     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    235                  "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
    236 
    237     CriticalSectionScoped cs(&_callbackCritSect);
    238     if (_transportPtr == NULL)
    239     {
    240         WEBRTC_TRACE(kTraceError, kTraceVoice,
    241                      VoEId(_instanceId,_channelId),
    242                      "Channel::SendRTCPPacket() failed to send RTCP packet"
    243                      " due to invalid transport object");
    244         return -1;
    245     }
    246 
    247     uint8_t* bufferToSendPtr = (uint8_t*)data;
    248     int32_t bufferLength = len;
    249 
    250     // Dump the RTCP packet to a file (if RTP dump is enabled).
    251     if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
    252     {
    253         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
    254                      VoEId(_instanceId,_channelId),
    255                      "Channel::SendPacket() RTCP dump to output file failed");
    256     }
    257 
    258     int n = _transportPtr->SendRTCPPacket(channel,
    259                                           bufferToSendPtr,
    260                                           bufferLength);
    261     if (n < 0) {
    262       std::string transport_name =
    263           _externalTransport ? "external transport" : "WebRtc sockets";
    264       WEBRTC_TRACE(kTraceInfo, kTraceVoice,
    265                    VoEId(_instanceId,_channelId),
    266                    "Channel::SendRTCPPacket() transmission using %s failed",
    267                    transport_name.c_str());
    268       return -1;
    269     }
    270     return n;
    271 }
    272 
    273 void
    274 Channel::OnPlayTelephoneEvent(int32_t id,
    275                               uint8_t event,
    276                               uint16_t lengthMs,
    277                               uint8_t volume)
    278 {
    279     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    280                  "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
    281                  " volume=%u)", id, event, lengthMs, volume);
    282 
    283     if (!_playOutbandDtmfEvent || (event > 15))
    284     {
    285         // Ignore callback since feedback is disabled or event is not a
    286         // Dtmf tone event.
    287         return;
    288     }
    289 
    290     assert(_outputMixerPtr != NULL);
    291 
    292     // Start playing out the Dtmf tone (if playout is enabled).
    293     // Reduce length of tone with 80ms to the reduce risk of echo.
    294     _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
    295 }
    296 
    297 void
    298 Channel::OnIncomingSSRCChanged(int32_t id, uint32_t ssrc)
    299 {
    300     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
    301                  "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
    302                  id, ssrc);
    303 
    304     // Update ssrc so that NTP for AV sync can be updated.
    305     _rtpRtcpModule->SetRemoteSSRC(ssrc);
    306 }
    307 
    308 void Channel::OnIncomingCSRCChanged(int32_t id,
    309                                     uint32_t CSRC,
    310                                     bool added)
    311 {
    312     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
    313                  "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
    314                  id, CSRC, added);
    315 }
    316 
    317 void Channel::ResetStatistics(uint32_t ssrc) {
    318   StreamStatistician* statistician =
    319       rtp_receive_statistics_->GetStatistician(ssrc);
    320   if (statistician) {
    321     statistician->ResetStatistics();
    322   }
    323   statistics_proxy_->ResetStatistics();
    324 }
    325 
    326 void
    327 Channel::OnApplicationDataReceived(int32_t id,
    328                                    uint8_t subType,
    329                                    uint32_t name,
    330                                    uint16_t length,
    331                                    const uint8_t* data)
    332 {
    333     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
    334                  "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
    335                  " name=%u, length=%u)",
    336                  id, subType, name, length);
    337 
    338     int32_t channel = VoEChannelId(id);
    339     assert(channel == _channelId);
    340 
    341     if (_rtcpObserver)
    342     {
    343         CriticalSectionScoped cs(&_callbackCritSect);
    344 
    345         if (_rtcpObserverPtr)
    346         {
    347             _rtcpObserverPtr->OnApplicationDataReceived(channel,
    348                                                         subType,
    349                                                         name,
    350                                                         data,
    351                                                         length);
    352         }
    353     }
    354 }
    355 
    356 int32_t
    357 Channel::OnInitializeDecoder(
    358     int32_t id,
    359     int8_t payloadType,
    360     const char payloadName[RTP_PAYLOAD_NAME_SIZE],
    361     int frequency,
    362     uint8_t channels,
    363     uint32_t rate)
    364 {
    365     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
    366                  "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
    367                  "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
    368                  id, payloadType, payloadName, frequency, channels, rate);
    369 
    370     assert(VoEChannelId(id) == _channelId);
    371 
    372     CodecInst receiveCodec = {0};
    373     CodecInst dummyCodec = {0};
    374 
    375     receiveCodec.pltype = payloadType;
    376     receiveCodec.plfreq = frequency;
    377     receiveCodec.channels = channels;
    378     receiveCodec.rate = rate;
    379     strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
    380 
    381     audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
    382     receiveCodec.pacsize = dummyCodec.pacsize;
    383 
    384     // Register the new codec to the ACM
    385     if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1)
    386     {
    387         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
    388                      VoEId(_instanceId, _channelId),
    389                      "Channel::OnInitializeDecoder() invalid codec ("
    390                      "pt=%d, name=%s) received - 1", payloadType, payloadName);
    391         _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
    392         return -1;
    393     }
    394 
    395     return 0;
    396 }
    397 
    398 void
    399 Channel::OnPacketTimeout(int32_t id)
    400 {
    401     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
    402                  "Channel::OnPacketTimeout(id=%d)", id);
    403 
    404     CriticalSectionScoped cs(_callbackCritSectPtr);
    405     if (_voiceEngineObserverPtr)
    406     {
    407         if (channel_state_.Get().receiving || _externalTransport)
    408         {
    409             int32_t channel = VoEChannelId(id);
    410             assert(channel == _channelId);
    411             // Ensure that next OnReceivedPacket() callback will trigger
    412             // a VE_PACKET_RECEIPT_RESTARTED callback.
    413             _rtpPacketTimedOut = true;
    414             // Deliver callback to the observer
    415             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
    416                          VoEId(_instanceId,_channelId),
    417                          "Channel::OnPacketTimeout() => "
    418                          "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
    419             _voiceEngineObserverPtr->CallbackOnError(channel,
    420                                                      VE_RECEIVE_PACKET_TIMEOUT);
    421         }
    422     }
    423 }
    424 
    425 void
    426 Channel::OnReceivedPacket(int32_t id,
    427                           RtpRtcpPacketType packetType)
    428 {
    429     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
    430                  "Channel::OnReceivedPacket(id=%d, packetType=%d)",
    431                  id, packetType);
    432 
    433     assert(VoEChannelId(id) == _channelId);
    434 
    435     // Notify only for the case when we have restarted an RTP session.
    436     if (_rtpPacketTimedOut && (kPacketRtp == packetType))
    437     {
    438         CriticalSectionScoped cs(_callbackCritSectPtr);
    439         if (_voiceEngineObserverPtr)
    440         {
    441             int32_t channel = VoEChannelId(id);
    442             assert(channel == _channelId);
    443             // Reset timeout mechanism
    444             _rtpPacketTimedOut = false;
    445             // Deliver callback to the observer
    446             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
    447                          VoEId(_instanceId,_channelId),
    448                          "Channel::OnPacketTimeout() =>"
    449                          " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
    450             _voiceEngineObserverPtr->CallbackOnError(
    451                 channel,
    452                 VE_PACKET_RECEIPT_RESTARTED);
    453         }
    454     }
    455 }
    456 
    457 void
    458 Channel::OnPeriodicDeadOrAlive(int32_t id,
    459                                RTPAliveType alive)
    460 {
    461     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
    462                  "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
    463 
    464     {
    465         CriticalSectionScoped cs(&_callbackCritSect);
    466         if (!_connectionObserver)
    467             return;
    468     }
    469 
    470     int32_t channel = VoEChannelId(id);
    471     assert(channel == _channelId);
    472 
    473     // Use Alive as default to limit risk of false Dead detections
    474     bool isAlive(true);
    475 
    476     // Always mark the connection as Dead when the module reports kRtpDead
    477     if (kRtpDead == alive)
    478     {
    479         isAlive = false;
    480     }
    481 
    482     // It is possible that the connection is alive even if no RTP packet has
    483     // been received for a long time since the other side might use VAD/DTX
    484     // and a low SID-packet update rate.
    485     if ((kRtpNoRtp == alive) && channel_state_.Get().playing)
    486     {
    487         // Detect Alive for all NetEQ states except for the case when we are
    488         // in PLC_CNG state.
    489         // PLC_CNG <=> background noise only due to long expand or error.
    490         // Note that, the case where the other side stops sending during CNG
    491         // state will be detected as Alive. Dead is is not set until after
    492         // missing RTCP packets for at least twelve seconds (handled
    493         // internally by the RTP/RTCP module).
    494         isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
    495     }
    496 
    497     // Send callback to the registered observer
    498     if (_connectionObserver)
    499     {
    500         CriticalSectionScoped cs(&_callbackCritSect);
    501         if (_connectionObserverPtr)
    502         {
    503             _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
    504         }
    505     }
    506 }
    507 
    508 int32_t
    509 Channel::OnReceivedPayloadData(const uint8_t* payloadData,
    510                                uint16_t payloadSize,
    511                                const WebRtcRTPHeader* rtpHeader)
    512 {
    513     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    514                  "Channel::OnReceivedPayloadData(payloadSize=%d,"
    515                  " payloadType=%u, audioChannel=%u)",
    516                  payloadSize,
    517                  rtpHeader->header.payloadType,
    518                  rtpHeader->type.Audio.channel);
    519 
    520     if (!channel_state_.Get().playing)
    521     {
    522         // Avoid inserting into NetEQ when we are not playing. Count the
    523         // packet as discarded.
    524         WEBRTC_TRACE(kTraceStream, kTraceVoice,
    525                      VoEId(_instanceId, _channelId),
    526                      "received packet is discarded since playing is not"
    527                      " activated");
    528         _numberOfDiscardedPackets++;
    529         return 0;
    530     }
    531 
    532     // Push the incoming payload (parsed and ready for decoding) into the ACM
    533     if (audio_coding_->IncomingPacket(payloadData,
    534                                       payloadSize,
    535                                       *rtpHeader) != 0)
    536     {
    537         _engineStatisticsPtr->SetLastError(
    538             VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
    539             "Channel::OnReceivedPayloadData() unable to push data to the ACM");
    540         return -1;
    541     }
    542 
    543     // Update the packet delay.
    544     UpdatePacketDelay(rtpHeader->header.timestamp,
    545                       rtpHeader->header.sequenceNumber);
    546 
    547     uint16_t round_trip_time = 0;
    548     _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
    549                         NULL, NULL, NULL);
    550 
    551     std::vector<uint16_t> nack_list = audio_coding_->GetNackList(
    552         round_trip_time);
    553     if (!nack_list.empty()) {
    554       // Can't use nack_list.data() since it's not supported by all
    555       // compilers.
    556       ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
    557     }
    558     return 0;
    559 }
    560 
    561 bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
    562                                 int rtp_packet_length) {
    563   RTPHeader header;
    564   if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
    565     WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
    566                  "IncomingPacket invalid RTP header");
    567     return false;
    568   }
    569   header.payload_type_frequency =
    570       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
    571   if (header.payload_type_frequency < 0)
    572     return false;
    573   return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
    574 }
    575 
    576 int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
    577 {
    578     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    579                  "Channel::GetAudioFrame(id=%d)", id);
    580 
    581     // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
    582     if (audio_coding_->PlayoutData10Ms(audioFrame.sample_rate_hz_,
    583                                        &audioFrame) == -1)
    584     {
    585         WEBRTC_TRACE(kTraceError, kTraceVoice,
    586                      VoEId(_instanceId,_channelId),
    587                      "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
    588         // In all likelihood, the audio in this frame is garbage. We return an
    589         // error so that the audio mixer module doesn't add it to the mix. As
    590         // a result, it won't be played out and the actions skipped here are
    591         // irrelevant.
    592         return -1;
    593     }
    594 
    595     if (_RxVadDetection)
    596     {
    597         UpdateRxVadDetection(audioFrame);
    598     }
    599 
    600     // Convert module ID to internal VoE channel ID
    601     audioFrame.id_ = VoEChannelId(audioFrame.id_);
    602     // Store speech type for dead-or-alive detection
    603     _outputSpeechType = audioFrame.speech_type_;
    604 
    605     ChannelState::State state = channel_state_.Get();
    606 
    607     if (state.rx_apm_is_enabled) {
    608       int err = rx_audioproc_->ProcessStream(&audioFrame);
    609       if (err) {
    610         LOG(LS_ERROR) << "ProcessStream() error: " << err;
    611         assert(false);
    612       }
    613     }
    614 
    615     float output_gain = 1.0f;
    616     float left_pan =  1.0f;
    617     float right_pan =  1.0f;
    618     {
    619       CriticalSectionScoped cs(&volume_settings_critsect_);
    620       output_gain = _outputGain;
    621       left_pan = _panLeft;
    622       right_pan= _panRight;
    623     }
    624 
    625     // Output volume scaling
    626     if (output_gain < 0.99f || output_gain > 1.01f)
    627     {
    628         AudioFrameOperations::ScaleWithSat(output_gain, audioFrame);
    629     }
    630 
    631     // Scale left and/or right channel(s) if stereo and master balance is
    632     // active
    633 
    634     if (left_pan != 1.0f || right_pan != 1.0f)
    635     {
    636         if (audioFrame.num_channels_ == 1)
    637         {
    638             // Emulate stereo mode since panning is active.
    639             // The mono signal is copied to both left and right channels here.
    640             AudioFrameOperations::MonoToStereo(&audioFrame);
    641         }
    642         // For true stereo mode (when we are receiving a stereo signal), no
    643         // action is needed.
    644 
    645         // Do the panning operation (the audio frame contains stereo at this
    646         // stage)
    647         AudioFrameOperations::Scale(left_pan, right_pan, audioFrame);
    648     }
    649 
    650     // Mix decoded PCM output with file if file mixing is enabled
    651     if (state.output_file_playing)
    652     {
    653         MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
    654     }
    655 
    656     // External media
    657     if (_outputExternalMedia)
    658     {
    659         CriticalSectionScoped cs(&_callbackCritSect);
    660         const bool isStereo = (audioFrame.num_channels_ == 2);
    661         if (_outputExternalMediaCallbackPtr)
    662         {
    663             _outputExternalMediaCallbackPtr->Process(
    664                 _channelId,
    665                 kPlaybackPerChannel,
    666                 (int16_t*)audioFrame.data_,
    667                 audioFrame.samples_per_channel_,
    668                 audioFrame.sample_rate_hz_,
    669                 isStereo);
    670         }
    671     }
    672 
    673     // Record playout if enabled
    674     {
    675         CriticalSectionScoped cs(&_fileCritSect);
    676 
    677         if (_outputFileRecording && _outputFileRecorderPtr)
    678         {
    679             _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
    680         }
    681     }
    682 
    683     // Measure audio level (0-9)
    684     _outputAudioLevel.ComputeLevel(audioFrame);
    685 
    686     if (capture_start_rtp_time_stamp_ < 0 && audioFrame.timestamp_ != 0) {
    687       // The first frame with a valid rtp timestamp.
    688       capture_start_rtp_time_stamp_ = audioFrame.timestamp_;
    689     }
    690 
    691     if (capture_start_rtp_time_stamp_ >= 0) {
    692       // audioFrame.timestamp_ should be valid from now on.
    693 
    694       // Compute elapsed time.
    695       int64_t unwrap_timestamp =
    696           rtp_ts_wraparound_handler_->Unwrap(audioFrame.timestamp_);
    697       audioFrame.elapsed_time_ms_ =
    698           (unwrap_timestamp - capture_start_rtp_time_stamp_) /
    699           (GetPlayoutFrequency() / 1000);
    700 
    701       {
    702         CriticalSectionScoped lock(ts_stats_lock_.get());
    703         // Compute ntp time.
    704         audioFrame.ntp_time_ms_ = ntp_estimator_.Estimate(
    705             audioFrame.timestamp_);
    706         // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
    707         if (audioFrame.ntp_time_ms_ > 0) {
    708           // Compute |capture_start_ntp_time_ms_| so that
    709           // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
    710           capture_start_ntp_time_ms_ =
    711               audioFrame.ntp_time_ms_ - audioFrame.elapsed_time_ms_;
    712         }
    713       }
    714     }
    715 
    716     return 0;
    717 }
    718 
    719 int32_t
    720 Channel::NeededFrequency(int32_t id)
    721 {
    722     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    723                  "Channel::NeededFrequency(id=%d)", id);
    724 
    725     int highestNeeded = 0;
    726 
    727     // Determine highest needed receive frequency
    728     int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
    729 
    730     // Return the bigger of playout and receive frequency in the ACM.
    731     if (audio_coding_->PlayoutFrequency() > receiveFrequency)
    732     {
    733         highestNeeded = audio_coding_->PlayoutFrequency();
    734     }
    735     else
    736     {
    737         highestNeeded = receiveFrequency;
    738     }
    739 
    740     // Special case, if we're playing a file on the playout side
    741     // we take that frequency into consideration as well
    742     // This is not needed on sending side, since the codec will
    743     // limit the spectrum anyway.
    744     if (channel_state_.Get().output_file_playing)
    745     {
    746         CriticalSectionScoped cs(&_fileCritSect);
    747         if (_outputFilePlayerPtr)
    748         {
    749             if(_outputFilePlayerPtr->Frequency()>highestNeeded)
    750             {
    751                 highestNeeded=_outputFilePlayerPtr->Frequency();
    752             }
    753         }
    754     }
    755 
    756     return(highestNeeded);
    757 }
    758 
    759 int32_t
    760 Channel::CreateChannel(Channel*& channel,
    761                        int32_t channelId,
    762                        uint32_t instanceId,
    763                        const Config& config)
    764 {
    765     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
    766                  "Channel::CreateChannel(channelId=%d, instanceId=%d)",
    767         channelId, instanceId);
    768 
    769     channel = new Channel(channelId, instanceId, config);
    770     if (channel == NULL)
    771     {
    772         WEBRTC_TRACE(kTraceMemory, kTraceVoice,
    773                      VoEId(instanceId,channelId),
    774                      "Channel::CreateChannel() unable to allocate memory for"
    775                      " channel");
    776         return -1;
    777     }
    778     return 0;
    779 }
    780 
    781 void
    782 Channel::PlayNotification(int32_t id, uint32_t durationMs)
    783 {
    784     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    785                  "Channel::PlayNotification(id=%d, durationMs=%d)",
    786                  id, durationMs);
    787 
    788     // Not implement yet
    789 }
    790 
    791 void
    792 Channel::RecordNotification(int32_t id, uint32_t durationMs)
    793 {
    794     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    795                  "Channel::RecordNotification(id=%d, durationMs=%d)",
    796                  id, durationMs);
    797 
    798     // Not implement yet
    799 }
    800 
    801 void
    802 Channel::PlayFileEnded(int32_t id)
    803 {
    804     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    805                  "Channel::PlayFileEnded(id=%d)", id);
    806 
    807     if (id == _inputFilePlayerId)
    808     {
    809         channel_state_.SetInputFilePlaying(false);
    810         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
    811                      VoEId(_instanceId,_channelId),
    812                      "Channel::PlayFileEnded() => input file player module is"
    813                      " shutdown");
    814     }
    815     else if (id == _outputFilePlayerId)
    816     {
    817         channel_state_.SetOutputFilePlaying(false);
    818         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
    819                      VoEId(_instanceId,_channelId),
    820                      "Channel::PlayFileEnded() => output file player module is"
    821                      " shutdown");
    822     }
    823 }
    824 
    825 void
    826 Channel::RecordFileEnded(int32_t id)
    827 {
    828     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
    829                  "Channel::RecordFileEnded(id=%d)", id);
    830 
    831     assert(id == _outputFileRecorderId);
    832 
    833     CriticalSectionScoped cs(&_fileCritSect);
    834 
    835     _outputFileRecording = false;
    836     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
    837                  VoEId(_instanceId,_channelId),
    838                  "Channel::RecordFileEnded() => output file recorder module is"
    839                  " shutdown");
    840 }
    841 
    842 Channel::Channel(int32_t channelId,
    843                  uint32_t instanceId,
    844                  const Config& config) :
    845     _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
    846     _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
    847     volume_settings_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
    848     _instanceId(instanceId),
    849     _channelId(channelId),
    850     rtp_header_parser_(RtpHeaderParser::Create()),
    851     rtp_payload_registry_(
    852         new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
    853     rtp_receive_statistics_(ReceiveStatistics::Create(
    854         Clock::GetRealTimeClock())),
    855     rtp_receiver_(RtpReceiver::CreateAudioReceiver(
    856         VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
    857         this, this, rtp_payload_registry_.get())),
    858     telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
    859     audio_coding_(AudioCodingModule::Create(
    860         VoEModuleId(instanceId, channelId))),
    861     _rtpDumpIn(*RtpDump::CreateRtpDump()),
    862     _rtpDumpOut(*RtpDump::CreateRtpDump()),
    863     _outputAudioLevel(),
    864     _externalTransport(false),
    865     _audioLevel_dBov(0),
    866     _inputFilePlayerPtr(NULL),
    867     _outputFilePlayerPtr(NULL),
    868     _outputFileRecorderPtr(NULL),
    869     // Avoid conflict with other channels by adding 1024 - 1026,
    870     // won't use as much as 1024 channels.
    871     _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
    872     _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
    873     _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
    874     _outputFileRecording(false),
    875     _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
    876     _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
    877     _outputExternalMedia(false),
    878     _inputExternalMediaCallbackPtr(NULL),
    879     _outputExternalMediaCallbackPtr(NULL),
    880     _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
    881     _sendTelephoneEventPayloadType(106),
    882     ntp_estimator_(Clock::GetRealTimeClock()),
    883     jitter_buffer_playout_timestamp_(0),
    884     playout_timestamp_rtp_(0),
    885     playout_timestamp_rtcp_(0),
    886     playout_delay_ms_(0),
    887     _numberOfDiscardedPackets(0),
    888     send_sequence_number_(0),
    889     ts_stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
    890     rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
    891     capture_start_rtp_time_stamp_(-1),
    892     capture_start_ntp_time_ms_(-1),
    893     _engineStatisticsPtr(NULL),
    894     _outputMixerPtr(NULL),
    895     _transmitMixerPtr(NULL),
    896     _moduleProcessThreadPtr(NULL),
    897     _audioDeviceModulePtr(NULL),
    898     _voiceEngineObserverPtr(NULL),
    899     _callbackCritSectPtr(NULL),
    900     _transportPtr(NULL),
    901     _rxVadObserverPtr(NULL),
    902     _oldVadDecision(-1),
    903     _sendFrameType(0),
    904     _rtcpObserverPtr(NULL),
    905     _externalPlayout(false),
    906     _externalMixing(false),
    907     _mixFileWithMicrophone(false),
    908     _rtcpObserver(false),
    909     _mute(false),
    910     _panLeft(1.0f),
    911     _panRight(1.0f),
    912     _outputGain(1.0f),
    913     _playOutbandDtmfEvent(false),
    914     _playInbandDtmfEvent(false),
    915     _lastLocalTimeStamp(0),
    916     _lastPayloadType(0),
    917     _includeAudioLevelIndication(false),
    918     _rtpPacketTimedOut(false),
    919     _rtpPacketTimeOutIsEnabled(false),
    920     _rtpTimeOutSeconds(0),
    921     _connectionObserver(false),
    922     _connectionObserverPtr(NULL),
    923     _outputSpeechType(AudioFrame::kNormalSpeech),
    924     vie_network_(NULL),
    925     video_channel_(-1),
    926     _average_jitter_buffer_delay_us(0),
    927     least_required_delay_ms_(0),
    928     _previousTimestamp(0),
    929     _recPacketDelayMs(20),
    930     _RxVadDetection(false),
    931     _rxAgcIsEnabled(false),
    932     _rxNsIsEnabled(false),
    933     restored_packet_in_use_(false),
    934     bitrate_controller_(
    935         BitrateController::CreateBitrateController(Clock::GetRealTimeClock(),
    936                                                    true)),
    937     rtcp_bandwidth_observer_(
    938         bitrate_controller_->CreateRtcpBandwidthObserver()),
    939     send_bitrate_observer_(new VoEBitrateObserver(this)),
    940     network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock()))
    941 {
    942     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
    943                  "Channel::Channel() - ctor");
    944     _inbandDtmfQueue.ResetDtmf();
    945     _inbandDtmfGenerator.Init();
    946     _outputAudioLevel.Clear();
    947 
    948     RtpRtcp::Configuration configuration;
    949     configuration.id = VoEModuleId(instanceId, channelId);
    950     configuration.audio = true;
    951     configuration.outgoing_transport = this;
    952     configuration.rtcp_feedback = this;
    953     configuration.audio_messages = this;
    954     configuration.receive_statistics = rtp_receive_statistics_.get();
    955     configuration.bandwidth_callback = rtcp_bandwidth_observer_.get();
    956 
    957     _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
    958 
    959     statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
    960     rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
    961         statistics_proxy_.get());
    962 
    963     Config audioproc_config;
    964     audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
    965     rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
    966 }
    967 
    968 Channel::~Channel()
    969 {
    970     rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
    971     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
    972                  "Channel::~Channel() - dtor");
    973 
    974     if (_outputExternalMedia)
    975     {
    976         DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
    977     }
    978     if (channel_state_.Get().input_external_media)
    979     {
    980         DeRegisterExternalMediaProcessing(kRecordingPerChannel);
    981     }
    982     StopSend();
    983     StopPlayout();
    984 
    985     {
    986         CriticalSectionScoped cs(&_fileCritSect);
    987         if (_inputFilePlayerPtr)
    988         {
    989             _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
    990             _inputFilePlayerPtr->StopPlayingFile();
    991             FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
    992             _inputFilePlayerPtr = NULL;
    993         }
    994         if (_outputFilePlayerPtr)
    995         {
    996             _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
    997             _outputFilePlayerPtr->StopPlayingFile();
    998             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
    999             _outputFilePlayerPtr = NULL;
   1000         }
   1001         if (_outputFileRecorderPtr)
   1002         {
   1003             _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
   1004             _outputFileRecorderPtr->StopRecording();
   1005             FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
   1006             _outputFileRecorderPtr = NULL;
   1007         }
   1008     }
   1009 
   1010     // The order to safely shutdown modules in a channel is:
   1011     // 1. De-register callbacks in modules
   1012     // 2. De-register modules in process thread
   1013     // 3. Destroy modules
   1014     if (audio_coding_->RegisterTransportCallback(NULL) == -1)
   1015     {
   1016         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   1017                      VoEId(_instanceId,_channelId),
   1018                      "~Channel() failed to de-register transport callback"
   1019                      " (Audio coding module)");
   1020     }
   1021     if (audio_coding_->RegisterVADCallback(NULL) == -1)
   1022     {
   1023         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   1024                      VoEId(_instanceId,_channelId),
   1025                      "~Channel() failed to de-register VAD callback"
   1026                      " (Audio coding module)");
   1027     }
   1028     // De-register modules in process thread
   1029     if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
   1030     {
   1031         WEBRTC_TRACE(kTraceInfo, kTraceVoice,
   1032                      VoEId(_instanceId,_channelId),
   1033                      "~Channel() failed to deregister RTP/RTCP module");
   1034     }
   1035     // End of modules shutdown
   1036 
   1037     // Delete other objects
   1038     if (vie_network_) {
   1039       vie_network_->Release();
   1040       vie_network_ = NULL;
   1041     }
   1042     RtpDump::DestroyRtpDump(&_rtpDumpIn);
   1043     RtpDump::DestroyRtpDump(&_rtpDumpOut);
   1044     delete &_callbackCritSect;
   1045     delete &_fileCritSect;
   1046     delete &volume_settings_critsect_;
   1047 }
   1048 
   1049 int32_t
   1050 Channel::Init()
   1051 {
   1052     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1053                  "Channel::Init()");
   1054 
   1055     channel_state_.Reset();
   1056 
   1057     // --- Initial sanity
   1058 
   1059     if ((_engineStatisticsPtr == NULL) ||
   1060         (_moduleProcessThreadPtr == NULL))
   1061     {
   1062         WEBRTC_TRACE(kTraceError, kTraceVoice,
   1063                      VoEId(_instanceId,_channelId),
   1064                      "Channel::Init() must call SetEngineInformation() first");
   1065         return -1;
   1066     }
   1067 
   1068     // --- Add modules to process thread (for periodic schedulation)
   1069 
   1070     const bool processThreadFail =
   1071         ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
   1072         false);
   1073     if (processThreadFail)
   1074     {
   1075         _engineStatisticsPtr->SetLastError(
   1076             VE_CANNOT_INIT_CHANNEL, kTraceError,
   1077             "Channel::Init() modules not registered");
   1078         return -1;
   1079     }
   1080     // --- ACM initialization
   1081 
   1082     if ((audio_coding_->InitializeReceiver() == -1) ||
   1083 #ifdef WEBRTC_CODEC_AVT
   1084         // out-of-band Dtmf tones are played out by default
   1085         (audio_coding_->SetDtmfPlayoutStatus(true) == -1) ||
   1086 #endif
   1087         (audio_coding_->InitializeSender() == -1))
   1088     {
   1089         _engineStatisticsPtr->SetLastError(
   1090             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   1091             "Channel::Init() unable to initialize the ACM - 1");
   1092         return -1;
   1093     }
   1094 
   1095     // --- RTP/RTCP module initialization
   1096 
   1097     // Ensure that RTCP is enabled by default for the created channel.
   1098     // Note that, the module will keep generating RTCP until it is explicitly
   1099     // disabled by the user.
   1100     // After StopListen (when no sockets exists), RTCP packets will no longer
   1101     // be transmitted since the Transport object will then be invalid.
   1102     telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
   1103     // RTCP is enabled by default.
   1104     if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
   1105     {
   1106         _engineStatisticsPtr->SetLastError(
   1107             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   1108             "Channel::Init() RTP/RTCP module not initialized");
   1109         return -1;
   1110     }
   1111 
   1112      // --- Register all permanent callbacks
   1113     const bool fail =
   1114         (audio_coding_->RegisterTransportCallback(this) == -1) ||
   1115         (audio_coding_->RegisterVADCallback(this) == -1);
   1116 
   1117     if (fail)
   1118     {
   1119         _engineStatisticsPtr->SetLastError(
   1120             VE_CANNOT_INIT_CHANNEL, kTraceError,
   1121             "Channel::Init() callbacks not registered");
   1122         return -1;
   1123     }
   1124 
   1125     // --- Register all supported codecs to the receiving side of the
   1126     // RTP/RTCP module
   1127 
   1128     CodecInst codec;
   1129     const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
   1130 
   1131     for (int idx = 0; idx < nSupportedCodecs; idx++)
   1132     {
   1133         // Open up the RTP/RTCP receiver for all supported codecs
   1134         if ((audio_coding_->Codec(idx, &codec) == -1) ||
   1135             (rtp_receiver_->RegisterReceivePayload(
   1136                 codec.plname,
   1137                 codec.pltype,
   1138                 codec.plfreq,
   1139                 codec.channels,
   1140                 (codec.rate < 0) ? 0 : codec.rate) == -1))
   1141         {
   1142             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   1143                          VoEId(_instanceId,_channelId),
   1144                          "Channel::Init() unable to register %s (%d/%d/%d/%d) "
   1145                          "to RTP/RTCP receiver",
   1146                          codec.plname, codec.pltype, codec.plfreq,
   1147                          codec.channels, codec.rate);
   1148         }
   1149         else
   1150         {
   1151             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
   1152                          VoEId(_instanceId,_channelId),
   1153                          "Channel::Init() %s (%d/%d/%d/%d) has been added to "
   1154                          "the RTP/RTCP receiver",
   1155                          codec.plname, codec.pltype, codec.plfreq,
   1156                          codec.channels, codec.rate);
   1157         }
   1158 
   1159         // Ensure that PCMU is used as default codec on the sending side
   1160         if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
   1161         {
   1162             SetSendCodec(codec);
   1163         }
   1164 
   1165         // Register default PT for outband 'telephone-event'
   1166         if (!STR_CASE_CMP(codec.plname, "telephone-event"))
   1167         {
   1168             if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
   1169                 (audio_coding_->RegisterReceiveCodec(codec) == -1))
   1170             {
   1171                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   1172                              VoEId(_instanceId,_channelId),
   1173                              "Channel::Init() failed to register outband "
   1174                              "'telephone-event' (%d/%d) correctly",
   1175                              codec.pltype, codec.plfreq);
   1176             }
   1177         }
   1178 
   1179         if (!STR_CASE_CMP(codec.plname, "CN"))
   1180         {
   1181             if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
   1182                 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
   1183                 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
   1184             {
   1185                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   1186                              VoEId(_instanceId,_channelId),
   1187                              "Channel::Init() failed to register CN (%d/%d) "
   1188                              "correctly - 1",
   1189                              codec.pltype, codec.plfreq);
   1190             }
   1191         }
   1192 #ifdef WEBRTC_CODEC_RED
   1193         // Register RED to the receiving side of the ACM.
   1194         // We will not receive an OnInitializeDecoder() callback for RED.
   1195         if (!STR_CASE_CMP(codec.plname, "RED"))
   1196         {
   1197             if (audio_coding_->RegisterReceiveCodec(codec) == -1)
   1198             {
   1199                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   1200                              VoEId(_instanceId,_channelId),
   1201                              "Channel::Init() failed to register RED (%d/%d) "
   1202                              "correctly",
   1203                              codec.pltype, codec.plfreq);
   1204             }
   1205         }
   1206 #endif
   1207     }
   1208 
   1209     if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
   1210       LOG_FERR1(LS_ERROR, noise_suppression()->set_level, kDefaultNsMode);
   1211       return -1;
   1212     }
   1213     if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
   1214       LOG_FERR1(LS_ERROR, gain_control()->set_mode, kDefaultRxAgcMode);
   1215       return -1;
   1216     }
   1217 
   1218     return 0;
   1219 }
   1220 
   1221 int32_t
   1222 Channel::SetEngineInformation(Statistics& engineStatistics,
   1223                               OutputMixer& outputMixer,
   1224                               voe::TransmitMixer& transmitMixer,
   1225                               ProcessThread& moduleProcessThread,
   1226                               AudioDeviceModule& audioDeviceModule,
   1227                               VoiceEngineObserver* voiceEngineObserver,
   1228                               CriticalSectionWrapper* callbackCritSect)
   1229 {
   1230     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1231                  "Channel::SetEngineInformation()");
   1232     _engineStatisticsPtr = &engineStatistics;
   1233     _outputMixerPtr = &outputMixer;
   1234     _transmitMixerPtr = &transmitMixer,
   1235     _moduleProcessThreadPtr = &moduleProcessThread;
   1236     _audioDeviceModulePtr = &audioDeviceModule;
   1237     _voiceEngineObserverPtr = voiceEngineObserver;
   1238     _callbackCritSectPtr = callbackCritSect;
   1239     return 0;
   1240 }
   1241 
   1242 int32_t
   1243 Channel::UpdateLocalTimeStamp()
   1244 {
   1245 
   1246     _timeStamp += _audioFrame.samples_per_channel_;
   1247     return 0;
   1248 }
   1249 
   1250 int32_t
   1251 Channel::StartPlayout()
   1252 {
   1253     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1254                  "Channel::StartPlayout()");
   1255     if (channel_state_.Get().playing)
   1256     {
   1257         return 0;
   1258     }
   1259 
   1260     if (!_externalMixing) {
   1261         // Add participant as candidates for mixing.
   1262         if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
   1263         {
   1264             _engineStatisticsPtr->SetLastError(
   1265                 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
   1266                 "StartPlayout() failed to add participant to mixer");
   1267             return -1;
   1268         }
   1269     }
   1270 
   1271     channel_state_.SetPlaying(true);
   1272     if (RegisterFilePlayingToMixer() != 0)
   1273         return -1;
   1274 
   1275     return 0;
   1276 }
   1277 
   1278 int32_t
   1279 Channel::StopPlayout()
   1280 {
   1281     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1282                  "Channel::StopPlayout()");
   1283     if (!channel_state_.Get().playing)
   1284     {
   1285         return 0;
   1286     }
   1287 
   1288     if (!_externalMixing) {
   1289         // Remove participant as candidates for mixing
   1290         if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
   1291         {
   1292             _engineStatisticsPtr->SetLastError(
   1293                 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
   1294                 "StopPlayout() failed to remove participant from mixer");
   1295             return -1;
   1296         }
   1297     }
   1298 
   1299     channel_state_.SetPlaying(false);
   1300     _outputAudioLevel.Clear();
   1301 
   1302     return 0;
   1303 }
   1304 
   1305 int32_t
   1306 Channel::StartSend()
   1307 {
   1308     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1309                  "Channel::StartSend()");
   1310     // Resume the previous sequence number which was reset by StopSend().
   1311     // This needs to be done before |sending| is set to true.
   1312     if (send_sequence_number_)
   1313       SetInitSequenceNumber(send_sequence_number_);
   1314 
   1315     if (channel_state_.Get().sending)
   1316     {
   1317       return 0;
   1318     }
   1319     channel_state_.SetSending(true);
   1320 
   1321     if (_rtpRtcpModule->SetSendingStatus(true) != 0)
   1322     {
   1323         _engineStatisticsPtr->SetLastError(
   1324             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   1325             "StartSend() RTP/RTCP failed to start sending");
   1326         CriticalSectionScoped cs(&_callbackCritSect);
   1327         channel_state_.SetSending(false);
   1328         return -1;
   1329     }
   1330 
   1331     return 0;
   1332 }
   1333 
   1334 int32_t
   1335 Channel::StopSend()
   1336 {
   1337     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1338                  "Channel::StopSend()");
   1339     if (!channel_state_.Get().sending)
   1340     {
   1341       return 0;
   1342     }
   1343     channel_state_.SetSending(false);
   1344 
   1345     // Store the sequence number to be able to pick up the same sequence for
   1346     // the next StartSend(). This is needed for restarting device, otherwise
   1347     // it might cause libSRTP to complain about packets being replayed.
   1348     // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
   1349     // CL is landed. See issue
   1350     // https://code.google.com/p/webrtc/issues/detail?id=2111 .
   1351     send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
   1352 
   1353     // Reset sending SSRC and sequence number and triggers direct transmission
   1354     // of RTCP BYE
   1355     if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
   1356         _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
   1357     {
   1358         _engineStatisticsPtr->SetLastError(
   1359             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
   1360             "StartSend() RTP/RTCP failed to stop sending");
   1361     }
   1362 
   1363     return 0;
   1364 }
   1365 
   1366 int32_t
   1367 Channel::StartReceiving()
   1368 {
   1369     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1370                  "Channel::StartReceiving()");
   1371     if (channel_state_.Get().receiving)
   1372     {
   1373         return 0;
   1374     }
   1375     channel_state_.SetReceiving(true);
   1376     _numberOfDiscardedPackets = 0;
   1377     return 0;
   1378 }
   1379 
   1380 int32_t
   1381 Channel::StopReceiving()
   1382 {
   1383     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1384                  "Channel::StopReceiving()");
   1385     if (!channel_state_.Get().receiving)
   1386     {
   1387         return 0;
   1388     }
   1389 
   1390     channel_state_.SetReceiving(false);
   1391     return 0;
   1392 }
   1393 
   1394 int32_t
   1395 Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
   1396 {
   1397     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1398                  "Channel::RegisterVoiceEngineObserver()");
   1399     CriticalSectionScoped cs(&_callbackCritSect);
   1400 
   1401     if (_voiceEngineObserverPtr)
   1402     {
   1403         _engineStatisticsPtr->SetLastError(
   1404             VE_INVALID_OPERATION, kTraceError,
   1405             "RegisterVoiceEngineObserver() observer already enabled");
   1406         return -1;
   1407     }
   1408     _voiceEngineObserverPtr = &observer;
   1409     return 0;
   1410 }
   1411 
   1412 int32_t
   1413 Channel::DeRegisterVoiceEngineObserver()
   1414 {
   1415     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1416                  "Channel::DeRegisterVoiceEngineObserver()");
   1417     CriticalSectionScoped cs(&_callbackCritSect);
   1418 
   1419     if (!_voiceEngineObserverPtr)
   1420     {
   1421         _engineStatisticsPtr->SetLastError(
   1422             VE_INVALID_OPERATION, kTraceWarning,
   1423             "DeRegisterVoiceEngineObserver() observer already disabled");
   1424         return 0;
   1425     }
   1426     _voiceEngineObserverPtr = NULL;
   1427     return 0;
   1428 }
   1429 
   1430 int32_t
   1431 Channel::GetSendCodec(CodecInst& codec)
   1432 {
   1433     return (audio_coding_->SendCodec(&codec));
   1434 }
   1435 
   1436 int32_t
   1437 Channel::GetRecCodec(CodecInst& codec)
   1438 {
   1439     return (audio_coding_->ReceiveCodec(&codec));
   1440 }
   1441 
   1442 int32_t
   1443 Channel::SetSendCodec(const CodecInst& codec)
   1444 {
   1445     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1446                  "Channel::SetSendCodec()");
   1447 
   1448     if (audio_coding_->RegisterSendCodec(codec) != 0)
   1449     {
   1450         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
   1451                      "SetSendCodec() failed to register codec to ACM");
   1452         return -1;
   1453     }
   1454 
   1455     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
   1456     {
   1457         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
   1458         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
   1459         {
   1460             WEBRTC_TRACE(
   1461                     kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
   1462                     "SetSendCodec() failed to register codec to"
   1463                     " RTP/RTCP module");
   1464             return -1;
   1465         }
   1466     }
   1467 
   1468     if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
   1469     {
   1470         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
   1471                      "SetSendCodec() failed to set audio packet size");
   1472         return -1;
   1473     }
   1474 
   1475     bitrate_controller_->SetBitrateObserver(send_bitrate_observer_.get(),
   1476                                             codec.rate, 0, 0);
   1477 
   1478     return 0;
   1479 }
   1480 
   1481 void
   1482 Channel::OnNetworkChanged(const uint32_t bitrate_bps,
   1483                           const uint8_t fraction_lost,  // 0 - 255.
   1484                           const uint32_t rtt) {
   1485   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1486       "Channel::OnNetworkChanged(bitrate_bps=%d, fration_lost=%d, rtt=%d)",
   1487       bitrate_bps, fraction_lost, rtt);
   1488   // |fraction_lost| from BitrateObserver is short time observation of packet
   1489   // loss rate from past. We use network predictor to make a more reasonable
   1490   // loss rate estimation.
   1491   network_predictor_->UpdatePacketLossRate(fraction_lost);
   1492   uint8_t loss_rate = network_predictor_->GetLossRate();
   1493   // Normalizes rate to 0 - 100.
   1494   if (audio_coding_->SetPacketLossRate(100 * loss_rate / 255) != 0) {
   1495     _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
   1496         kTraceError, "OnNetworkChanged() failed to set packet loss rate");
   1497     assert(false);  // This should not happen.
   1498   }
   1499 }
   1500 
   1501 int32_t
   1502 Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
   1503 {
   1504     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1505                  "Channel::SetVADStatus(mode=%d)", mode);
   1506     // To disable VAD, DTX must be disabled too
   1507     disableDTX = ((enableVAD == false) ? true : disableDTX);
   1508     if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0)
   1509     {
   1510         _engineStatisticsPtr->SetLastError(
   1511             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   1512             "SetVADStatus() failed to set VAD");
   1513         return -1;
   1514     }
   1515     return 0;
   1516 }
   1517 
   1518 int32_t
   1519 Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
   1520 {
   1521     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1522                  "Channel::GetVADStatus");
   1523     if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0)
   1524     {
   1525         _engineStatisticsPtr->SetLastError(
   1526             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   1527             "GetVADStatus() failed to get VAD status");
   1528         return -1;
   1529     }
   1530     disabledDTX = !disabledDTX;
   1531     return 0;
   1532 }
   1533 
   1534 int32_t
   1535 Channel::SetRecPayloadType(const CodecInst& codec)
   1536 {
   1537     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1538                  "Channel::SetRecPayloadType()");
   1539 
   1540     if (channel_state_.Get().playing)
   1541     {
   1542         _engineStatisticsPtr->SetLastError(
   1543             VE_ALREADY_PLAYING, kTraceError,
   1544             "SetRecPayloadType() unable to set PT while playing");
   1545         return -1;
   1546     }
   1547     if (channel_state_.Get().receiving)
   1548     {
   1549         _engineStatisticsPtr->SetLastError(
   1550             VE_ALREADY_LISTENING, kTraceError,
   1551             "SetRecPayloadType() unable to set PT while listening");
   1552         return -1;
   1553     }
   1554 
   1555     if (codec.pltype == -1)
   1556     {
   1557         // De-register the selected codec (RTP/RTCP module and ACM)
   1558 
   1559         int8_t pltype(-1);
   1560         CodecInst rxCodec = codec;
   1561 
   1562         // Get payload type for the given codec
   1563         rtp_payload_registry_->ReceivePayloadType(
   1564             rxCodec.plname,
   1565             rxCodec.plfreq,
   1566             rxCodec.channels,
   1567             (rxCodec.rate < 0) ? 0 : rxCodec.rate,
   1568             &pltype);
   1569         rxCodec.pltype = pltype;
   1570 
   1571         if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
   1572         {
   1573             _engineStatisticsPtr->SetLastError(
   1574                     VE_RTP_RTCP_MODULE_ERROR,
   1575                     kTraceError,
   1576                     "SetRecPayloadType() RTP/RTCP-module deregistration "
   1577                     "failed");
   1578             return -1;
   1579         }
   1580         if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0)
   1581         {
   1582             _engineStatisticsPtr->SetLastError(
   1583                 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   1584                 "SetRecPayloadType() ACM deregistration failed - 1");
   1585             return -1;
   1586         }
   1587         return 0;
   1588     }
   1589 
   1590     if (rtp_receiver_->RegisterReceivePayload(
   1591         codec.plname,
   1592         codec.pltype,
   1593         codec.plfreq,
   1594         codec.channels,
   1595         (codec.rate < 0) ? 0 : codec.rate) != 0)
   1596     {
   1597         // First attempt to register failed => de-register and try again
   1598         rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
   1599         if (rtp_receiver_->RegisterReceivePayload(
   1600             codec.plname,
   1601             codec.pltype,
   1602             codec.plfreq,
   1603             codec.channels,
   1604             (codec.rate < 0) ? 0 : codec.rate) != 0)
   1605         {
   1606             _engineStatisticsPtr->SetLastError(
   1607                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   1608                 "SetRecPayloadType() RTP/RTCP-module registration failed");
   1609             return -1;
   1610         }
   1611     }
   1612     if (audio_coding_->RegisterReceiveCodec(codec) != 0)
   1613     {
   1614         audio_coding_->UnregisterReceiveCodec(codec.pltype);
   1615         if (audio_coding_->RegisterReceiveCodec(codec) != 0)
   1616         {
   1617             _engineStatisticsPtr->SetLastError(
   1618                 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   1619                 "SetRecPayloadType() ACM registration failed - 1");
   1620             return -1;
   1621         }
   1622     }
   1623     return 0;
   1624 }
   1625 
   1626 int32_t
   1627 Channel::GetRecPayloadType(CodecInst& codec)
   1628 {
   1629     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1630                  "Channel::GetRecPayloadType()");
   1631     int8_t payloadType(-1);
   1632     if (rtp_payload_registry_->ReceivePayloadType(
   1633         codec.plname,
   1634         codec.plfreq,
   1635         codec.channels,
   1636         (codec.rate < 0) ? 0 : codec.rate,
   1637         &payloadType) != 0)
   1638     {
   1639         _engineStatisticsPtr->SetLastError(
   1640             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
   1641             "GetRecPayloadType() failed to retrieve RX payload type");
   1642         return -1;
   1643     }
   1644     codec.pltype = payloadType;
   1645     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1646                  "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
   1647     return 0;
   1648 }
   1649 
   1650 int32_t
   1651 Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
   1652 {
   1653     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1654                  "Channel::SetSendCNPayloadType()");
   1655 
   1656     CodecInst codec;
   1657     int32_t samplingFreqHz(-1);
   1658     const int kMono = 1;
   1659     if (frequency == kFreq32000Hz)
   1660         samplingFreqHz = 32000;
   1661     else if (frequency == kFreq16000Hz)
   1662         samplingFreqHz = 16000;
   1663 
   1664     if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1)
   1665     {
   1666         _engineStatisticsPtr->SetLastError(
   1667             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   1668             "SetSendCNPayloadType() failed to retrieve default CN codec "
   1669             "settings");
   1670         return -1;
   1671     }
   1672 
   1673     // Modify the payload type (must be set to dynamic range)
   1674     codec.pltype = type;
   1675 
   1676     if (audio_coding_->RegisterSendCodec(codec) != 0)
   1677     {
   1678         _engineStatisticsPtr->SetLastError(
   1679             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   1680             "SetSendCNPayloadType() failed to register CN to ACM");
   1681         return -1;
   1682     }
   1683 
   1684     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
   1685     {
   1686         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
   1687         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
   1688         {
   1689             _engineStatisticsPtr->SetLastError(
   1690                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   1691                 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
   1692                 "module");
   1693             return -1;
   1694         }
   1695     }
   1696     return 0;
   1697 }
   1698 
   1699 int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
   1700   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   1701                "Channel::SetOpusMaxPlaybackRate()");
   1702 
   1703   if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
   1704     _engineStatisticsPtr->SetLastError(
   1705         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   1706         "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
   1707     return -1;
   1708   }
   1709   return 0;
   1710 }
   1711 
   1712 int32_t Channel::RegisterExternalTransport(Transport& transport)
   1713 {
   1714     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   1715                "Channel::RegisterExternalTransport()");
   1716 
   1717     CriticalSectionScoped cs(&_callbackCritSect);
   1718 
   1719     if (_externalTransport)
   1720     {
   1721         _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
   1722                                            kTraceError,
   1723               "RegisterExternalTransport() external transport already enabled");
   1724        return -1;
   1725     }
   1726     _externalTransport = true;
   1727     _transportPtr = &transport;
   1728     return 0;
   1729 }
   1730 
   1731 int32_t
   1732 Channel::DeRegisterExternalTransport()
   1733 {
   1734     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1735                  "Channel::DeRegisterExternalTransport()");
   1736 
   1737     CriticalSectionScoped cs(&_callbackCritSect);
   1738 
   1739     if (!_transportPtr)
   1740     {
   1741         _engineStatisticsPtr->SetLastError(
   1742             VE_INVALID_OPERATION, kTraceWarning,
   1743             "DeRegisterExternalTransport() external transport already "
   1744             "disabled");
   1745         return 0;
   1746     }
   1747     _externalTransport = false;
   1748     _transportPtr = NULL;
   1749     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1750                  "DeRegisterExternalTransport() all transport is disabled");
   1751     return 0;
   1752 }
   1753 
   1754 int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length,
   1755                                    const PacketTime& packet_time) {
   1756   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
   1757                "Channel::ReceivedRTPPacket()");
   1758 
   1759   // Store playout timestamp for the received RTP packet
   1760   UpdatePlayoutTimestamp(false);
   1761 
   1762   // Dump the RTP packet to a file (if RTP dump is enabled).
   1763   if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
   1764                             (uint16_t)length) == -1) {
   1765     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   1766                  VoEId(_instanceId,_channelId),
   1767                  "Channel::SendPacket() RTP dump to input file failed");
   1768   }
   1769   const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
   1770   RTPHeader header;
   1771   if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
   1772     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
   1773                  "Incoming packet: invalid RTP header");
   1774     return -1;
   1775   }
   1776   header.payload_type_frequency =
   1777       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
   1778   if (header.payload_type_frequency < 0)
   1779     return -1;
   1780   bool in_order = IsPacketInOrder(header);
   1781   rtp_receive_statistics_->IncomingPacket(header, length,
   1782       IsPacketRetransmitted(header, in_order));
   1783   rtp_payload_registry_->SetIncomingPayloadType(header);
   1784 
   1785   // Forward any packets to ViE bandwidth estimator, if enabled.
   1786   {
   1787     CriticalSectionScoped cs(&_callbackCritSect);
   1788     if (vie_network_) {
   1789       int64_t arrival_time_ms;
   1790       if (packet_time.timestamp != -1) {
   1791         arrival_time_ms = (packet_time.timestamp + 500) / 1000;
   1792       } else {
   1793         arrival_time_ms = TickTime::MillisecondTimestamp();
   1794       }
   1795       int payload_length = length - header.headerLength;
   1796       vie_network_->ReceivedBWEPacket(video_channel_, arrival_time_ms,
   1797                                       payload_length, header);
   1798     }
   1799   }
   1800 
   1801   return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
   1802 }
   1803 
   1804 bool Channel::ReceivePacket(const uint8_t* packet,
   1805                             int packet_length,
   1806                             const RTPHeader& header,
   1807                             bool in_order) {
   1808   if (rtp_payload_registry_->IsEncapsulated(header)) {
   1809     return HandleEncapsulation(packet, packet_length, header);
   1810   }
   1811   const uint8_t* payload = packet + header.headerLength;
   1812   int payload_length = packet_length - header.headerLength;
   1813   assert(payload_length >= 0);
   1814   PayloadUnion payload_specific;
   1815   if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
   1816                                                   &payload_specific)) {
   1817     return false;
   1818   }
   1819   return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
   1820                                           payload_specific, in_order);
   1821 }
   1822 
   1823 bool Channel::HandleEncapsulation(const uint8_t* packet,
   1824                                   int packet_length,
   1825                                   const RTPHeader& header) {
   1826   if (!rtp_payload_registry_->IsRtx(header))
   1827     return false;
   1828 
   1829   // Remove the RTX header and parse the original RTP header.
   1830   if (packet_length < header.headerLength)
   1831     return false;
   1832   if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
   1833     return false;
   1834   if (restored_packet_in_use_) {
   1835     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
   1836                  "Multiple RTX headers detected, dropping packet");
   1837     return false;
   1838   }
   1839   uint8_t* restored_packet_ptr = restored_packet_;
   1840   if (!rtp_payload_registry_->RestoreOriginalPacket(
   1841       &restored_packet_ptr, packet, &packet_length, rtp_receiver_->SSRC(),
   1842       header)) {
   1843     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
   1844                  "Incoming RTX packet: invalid RTP header");
   1845     return false;
   1846   }
   1847   restored_packet_in_use_ = true;
   1848   bool ret = OnRecoveredPacket(restored_packet_ptr, packet_length);
   1849   restored_packet_in_use_ = false;
   1850   return ret;
   1851 }
   1852 
   1853 bool Channel::IsPacketInOrder(const RTPHeader& header) const {
   1854   StreamStatistician* statistician =
   1855       rtp_receive_statistics_->GetStatistician(header.ssrc);
   1856   if (!statistician)
   1857     return false;
   1858   return statistician->IsPacketInOrder(header.sequenceNumber);
   1859 }
   1860 
   1861 bool Channel::IsPacketRetransmitted(const RTPHeader& header,
   1862                                     bool in_order) const {
   1863   // Retransmissions are handled separately if RTX is enabled.
   1864   if (rtp_payload_registry_->RtxEnabled())
   1865     return false;
   1866   StreamStatistician* statistician =
   1867       rtp_receive_statistics_->GetStatistician(header.ssrc);
   1868   if (!statistician)
   1869     return false;
   1870   // Check if this is a retransmission.
   1871   uint16_t min_rtt = 0;
   1872   _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
   1873   return !in_order &&
   1874       statistician->IsRetransmitOfOldPacket(header, min_rtt);
   1875 }
   1876 
   1877 int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
   1878   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
   1879                "Channel::ReceivedRTCPPacket()");
   1880   // Store playout timestamp for the received RTCP packet
   1881   UpdatePlayoutTimestamp(true);
   1882 
   1883   // Dump the RTCP packet to a file (if RTP dump is enabled).
   1884   if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
   1885                             (uint16_t)length) == -1) {
   1886     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   1887                  VoEId(_instanceId,_channelId),
   1888                  "Channel::SendPacket() RTCP dump to input file failed");
   1889   }
   1890 
   1891   // Deliver RTCP packet to RTP/RTCP module for parsing
   1892   if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
   1893                                          (uint16_t)length) == -1) {
   1894     _engineStatisticsPtr->SetLastError(
   1895         VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
   1896         "Channel::IncomingRTPPacket() RTCP packet is invalid");
   1897   }
   1898 
   1899   {
   1900     CriticalSectionScoped lock(ts_stats_lock_.get());
   1901     ntp_estimator_.UpdateRtcpTimestamp(rtp_receiver_->SSRC(),
   1902                                        _rtpRtcpModule.get());
   1903   }
   1904   return 0;
   1905 }
   1906 
   1907 int Channel::StartPlayingFileLocally(const char* fileName,
   1908                                      bool loop,
   1909                                      FileFormats format,
   1910                                      int startPosition,
   1911                                      float volumeScaling,
   1912                                      int stopPosition,
   1913                                      const CodecInst* codecInst)
   1914 {
   1915     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1916                  "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
   1917                  " format=%d, volumeScaling=%5.3f, startPosition=%d, "
   1918                  "stopPosition=%d)", fileName, loop, format, volumeScaling,
   1919                  startPosition, stopPosition);
   1920 
   1921     if (channel_state_.Get().output_file_playing)
   1922     {
   1923         _engineStatisticsPtr->SetLastError(
   1924             VE_ALREADY_PLAYING, kTraceError,
   1925             "StartPlayingFileLocally() is already playing");
   1926         return -1;
   1927     }
   1928 
   1929     {
   1930         CriticalSectionScoped cs(&_fileCritSect);
   1931 
   1932         if (_outputFilePlayerPtr)
   1933         {
   1934             _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
   1935             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
   1936             _outputFilePlayerPtr = NULL;
   1937         }
   1938 
   1939         _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
   1940             _outputFilePlayerId, (const FileFormats)format);
   1941 
   1942         if (_outputFilePlayerPtr == NULL)
   1943         {
   1944             _engineStatisticsPtr->SetLastError(
   1945                 VE_INVALID_ARGUMENT, kTraceError,
   1946                 "StartPlayingFileLocally() filePlayer format is not correct");
   1947             return -1;
   1948         }
   1949 
   1950         const uint32_t notificationTime(0);
   1951 
   1952         if (_outputFilePlayerPtr->StartPlayingFile(
   1953                 fileName,
   1954                 loop,
   1955                 startPosition,
   1956                 volumeScaling,
   1957                 notificationTime,
   1958                 stopPosition,
   1959                 (const CodecInst*)codecInst) != 0)
   1960         {
   1961             _engineStatisticsPtr->SetLastError(
   1962                 VE_BAD_FILE, kTraceError,
   1963                 "StartPlayingFile() failed to start file playout");
   1964             _outputFilePlayerPtr->StopPlayingFile();
   1965             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
   1966             _outputFilePlayerPtr = NULL;
   1967             return -1;
   1968         }
   1969         _outputFilePlayerPtr->RegisterModuleFileCallback(this);
   1970         channel_state_.SetOutputFilePlaying(true);
   1971     }
   1972 
   1973     if (RegisterFilePlayingToMixer() != 0)
   1974         return -1;
   1975 
   1976     return 0;
   1977 }
   1978 
   1979 int Channel::StartPlayingFileLocally(InStream* stream,
   1980                                      FileFormats format,
   1981                                      int startPosition,
   1982                                      float volumeScaling,
   1983                                      int stopPosition,
   1984                                      const CodecInst* codecInst)
   1985 {
   1986     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   1987                  "Channel::StartPlayingFileLocally(format=%d,"
   1988                  " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
   1989                  format, volumeScaling, startPosition, stopPosition);
   1990 
   1991     if(stream == NULL)
   1992     {
   1993         _engineStatisticsPtr->SetLastError(
   1994             VE_BAD_FILE, kTraceError,
   1995             "StartPlayingFileLocally() NULL as input stream");
   1996         return -1;
   1997     }
   1998 
   1999 
   2000     if (channel_state_.Get().output_file_playing)
   2001     {
   2002         _engineStatisticsPtr->SetLastError(
   2003             VE_ALREADY_PLAYING, kTraceError,
   2004             "StartPlayingFileLocally() is already playing");
   2005         return -1;
   2006     }
   2007 
   2008     {
   2009       CriticalSectionScoped cs(&_fileCritSect);
   2010 
   2011       // Destroy the old instance
   2012       if (_outputFilePlayerPtr)
   2013       {
   2014           _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
   2015           FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
   2016           _outputFilePlayerPtr = NULL;
   2017       }
   2018 
   2019       // Create the instance
   2020       _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
   2021           _outputFilePlayerId,
   2022           (const FileFormats)format);
   2023 
   2024       if (_outputFilePlayerPtr == NULL)
   2025       {
   2026           _engineStatisticsPtr->SetLastError(
   2027               VE_INVALID_ARGUMENT, kTraceError,
   2028               "StartPlayingFileLocally() filePlayer format isnot correct");
   2029           return -1;
   2030       }
   2031 
   2032       const uint32_t notificationTime(0);
   2033 
   2034       if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
   2035                                                  volumeScaling,
   2036                                                  notificationTime,
   2037                                                  stopPosition, codecInst) != 0)
   2038       {
   2039           _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
   2040                                              "StartPlayingFile() failed to "
   2041                                              "start file playout");
   2042           _outputFilePlayerPtr->StopPlayingFile();
   2043           FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
   2044           _outputFilePlayerPtr = NULL;
   2045           return -1;
   2046       }
   2047       _outputFilePlayerPtr->RegisterModuleFileCallback(this);
   2048       channel_state_.SetOutputFilePlaying(true);
   2049     }
   2050 
   2051     if (RegisterFilePlayingToMixer() != 0)
   2052         return -1;
   2053 
   2054     return 0;
   2055 }
   2056 
   2057 int Channel::StopPlayingFileLocally()
   2058 {
   2059     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2060                  "Channel::StopPlayingFileLocally()");
   2061 
   2062     if (!channel_state_.Get().output_file_playing)
   2063     {
   2064         _engineStatisticsPtr->SetLastError(
   2065             VE_INVALID_OPERATION, kTraceWarning,
   2066             "StopPlayingFileLocally() isnot playing");
   2067         return 0;
   2068     }
   2069 
   2070     {
   2071         CriticalSectionScoped cs(&_fileCritSect);
   2072 
   2073         if (_outputFilePlayerPtr->StopPlayingFile() != 0)
   2074         {
   2075             _engineStatisticsPtr->SetLastError(
   2076                 VE_STOP_RECORDING_FAILED, kTraceError,
   2077                 "StopPlayingFile() could not stop playing");
   2078             return -1;
   2079         }
   2080         _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
   2081         FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
   2082         _outputFilePlayerPtr = NULL;
   2083         channel_state_.SetOutputFilePlaying(false);
   2084     }
   2085     // _fileCritSect cannot be taken while calling
   2086     // SetAnonymousMixibilityStatus. Refer to comments in
   2087     // StartPlayingFileLocally(const char* ...) for more details.
   2088     if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
   2089     {
   2090         _engineStatisticsPtr->SetLastError(
   2091             VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
   2092             "StopPlayingFile() failed to stop participant from playing as"
   2093             "file in the mixer");
   2094         return -1;
   2095     }
   2096 
   2097     return 0;
   2098 }
   2099 
   2100 int Channel::IsPlayingFileLocally() const
   2101 {
   2102     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2103                  "Channel::IsPlayingFileLocally()");
   2104 
   2105     return channel_state_.Get().output_file_playing;
   2106 }
   2107 
   2108 int Channel::RegisterFilePlayingToMixer()
   2109 {
   2110     // Return success for not registering for file playing to mixer if:
   2111     // 1. playing file before playout is started on that channel.
   2112     // 2. starting playout without file playing on that channel.
   2113     if (!channel_state_.Get().playing ||
   2114         !channel_state_.Get().output_file_playing)
   2115     {
   2116         return 0;
   2117     }
   2118 
   2119     // |_fileCritSect| cannot be taken while calling
   2120     // SetAnonymousMixabilityStatus() since as soon as the participant is added
   2121     // frames can be pulled by the mixer. Since the frames are generated from
   2122     // the file, _fileCritSect will be taken. This would result in a deadlock.
   2123     if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
   2124     {
   2125         channel_state_.SetOutputFilePlaying(false);
   2126         CriticalSectionScoped cs(&_fileCritSect);
   2127         _engineStatisticsPtr->SetLastError(
   2128             VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
   2129             "StartPlayingFile() failed to add participant as file to mixer");
   2130         _outputFilePlayerPtr->StopPlayingFile();
   2131         FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
   2132         _outputFilePlayerPtr = NULL;
   2133         return -1;
   2134     }
   2135 
   2136     return 0;
   2137 }
   2138 
   2139 int Channel::StartPlayingFileAsMicrophone(const char* fileName,
   2140                                           bool loop,
   2141                                           FileFormats format,
   2142                                           int startPosition,
   2143                                           float volumeScaling,
   2144                                           int stopPosition,
   2145                                           const CodecInst* codecInst)
   2146 {
   2147     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2148                  "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
   2149                  "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
   2150                  "stopPosition=%d)", fileName, loop, format, volumeScaling,
   2151                  startPosition, stopPosition);
   2152 
   2153     CriticalSectionScoped cs(&_fileCritSect);
   2154 
   2155     if (channel_state_.Get().input_file_playing)
   2156     {
   2157         _engineStatisticsPtr->SetLastError(
   2158             VE_ALREADY_PLAYING, kTraceWarning,
   2159             "StartPlayingFileAsMicrophone() filePlayer is playing");
   2160         return 0;
   2161     }
   2162 
   2163     // Destroy the old instance
   2164     if (_inputFilePlayerPtr)
   2165     {
   2166         _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
   2167         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
   2168         _inputFilePlayerPtr = NULL;
   2169     }
   2170 
   2171     // Create the instance
   2172     _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
   2173         _inputFilePlayerId, (const FileFormats)format);
   2174 
   2175     if (_inputFilePlayerPtr == NULL)
   2176     {
   2177         _engineStatisticsPtr->SetLastError(
   2178             VE_INVALID_ARGUMENT, kTraceError,
   2179             "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
   2180         return -1;
   2181     }
   2182 
   2183     const uint32_t notificationTime(0);
   2184 
   2185     if (_inputFilePlayerPtr->StartPlayingFile(
   2186         fileName,
   2187         loop,
   2188         startPosition,
   2189         volumeScaling,
   2190         notificationTime,
   2191         stopPosition,
   2192         (const CodecInst*)codecInst) != 0)
   2193     {
   2194         _engineStatisticsPtr->SetLastError(
   2195             VE_BAD_FILE, kTraceError,
   2196             "StartPlayingFile() failed to start file playout");
   2197         _inputFilePlayerPtr->StopPlayingFile();
   2198         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
   2199         _inputFilePlayerPtr = NULL;
   2200         return -1;
   2201     }
   2202     _inputFilePlayerPtr->RegisterModuleFileCallback(this);
   2203     channel_state_.SetInputFilePlaying(true);
   2204 
   2205     return 0;
   2206 }
   2207 
   2208 int Channel::StartPlayingFileAsMicrophone(InStream* stream,
   2209                                           FileFormats format,
   2210                                           int startPosition,
   2211                                           float volumeScaling,
   2212                                           int stopPosition,
   2213                                           const CodecInst* codecInst)
   2214 {
   2215     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2216                  "Channel::StartPlayingFileAsMicrophone(format=%d, "
   2217                  "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
   2218                  format, volumeScaling, startPosition, stopPosition);
   2219 
   2220     if(stream == NULL)
   2221     {
   2222         _engineStatisticsPtr->SetLastError(
   2223             VE_BAD_FILE, kTraceError,
   2224             "StartPlayingFileAsMicrophone NULL as input stream");
   2225         return -1;
   2226     }
   2227 
   2228     CriticalSectionScoped cs(&_fileCritSect);
   2229 
   2230     if (channel_state_.Get().input_file_playing)
   2231     {
   2232         _engineStatisticsPtr->SetLastError(
   2233             VE_ALREADY_PLAYING, kTraceWarning,
   2234             "StartPlayingFileAsMicrophone() is playing");
   2235         return 0;
   2236     }
   2237 
   2238     // Destroy the old instance
   2239     if (_inputFilePlayerPtr)
   2240     {
   2241         _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
   2242         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
   2243         _inputFilePlayerPtr = NULL;
   2244     }
   2245 
   2246     // Create the instance
   2247     _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
   2248         _inputFilePlayerId, (const FileFormats)format);
   2249 
   2250     if (_inputFilePlayerPtr == NULL)
   2251     {
   2252         _engineStatisticsPtr->SetLastError(
   2253             VE_INVALID_ARGUMENT, kTraceError,
   2254             "StartPlayingInputFile() filePlayer format isnot correct");
   2255         return -1;
   2256     }
   2257 
   2258     const uint32_t notificationTime(0);
   2259 
   2260     if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
   2261                                               volumeScaling, notificationTime,
   2262                                               stopPosition, codecInst) != 0)
   2263     {
   2264         _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
   2265                                            "StartPlayingFile() failed to start "
   2266                                            "file playout");
   2267         _inputFilePlayerPtr->StopPlayingFile();
   2268         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
   2269         _inputFilePlayerPtr = NULL;
   2270         return -1;
   2271     }
   2272 
   2273     _inputFilePlayerPtr->RegisterModuleFileCallback(this);
   2274     channel_state_.SetInputFilePlaying(true);
   2275 
   2276     return 0;
   2277 }
   2278 
   2279 int Channel::StopPlayingFileAsMicrophone()
   2280 {
   2281     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2282                  "Channel::StopPlayingFileAsMicrophone()");
   2283 
   2284     CriticalSectionScoped cs(&_fileCritSect);
   2285 
   2286     if (!channel_state_.Get().input_file_playing)
   2287     {
   2288         _engineStatisticsPtr->SetLastError(
   2289             VE_INVALID_OPERATION, kTraceWarning,
   2290             "StopPlayingFileAsMicrophone() isnot playing");
   2291         return 0;
   2292     }
   2293 
   2294     if (_inputFilePlayerPtr->StopPlayingFile() != 0)
   2295     {
   2296         _engineStatisticsPtr->SetLastError(
   2297             VE_STOP_RECORDING_FAILED, kTraceError,
   2298             "StopPlayingFile() could not stop playing");
   2299         return -1;
   2300     }
   2301     _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
   2302     FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
   2303     _inputFilePlayerPtr = NULL;
   2304     channel_state_.SetInputFilePlaying(false);
   2305 
   2306     return 0;
   2307 }
   2308 
   2309 int Channel::IsPlayingFileAsMicrophone() const
   2310 {
   2311     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2312                  "Channel::IsPlayingFileAsMicrophone()");
   2313     return channel_state_.Get().input_file_playing;
   2314 }
   2315 
   2316 int Channel::StartRecordingPlayout(const char* fileName,
   2317                                    const CodecInst* codecInst)
   2318 {
   2319     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2320                  "Channel::StartRecordingPlayout(fileName=%s)", fileName);
   2321 
   2322     if (_outputFileRecording)
   2323     {
   2324         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
   2325                      "StartRecordingPlayout() is already recording");
   2326         return 0;
   2327     }
   2328 
   2329     FileFormats format;
   2330     const uint32_t notificationTime(0); // Not supported in VoE
   2331     CodecInst dummyCodec={100,"L16",16000,320,1,320000};
   2332 
   2333     if ((codecInst != NULL) &&
   2334       ((codecInst->channels < 1) || (codecInst->channels > 2)))
   2335     {
   2336         _engineStatisticsPtr->SetLastError(
   2337             VE_BAD_ARGUMENT, kTraceError,
   2338             "StartRecordingPlayout() invalid compression");
   2339         return(-1);
   2340     }
   2341     if(codecInst == NULL)
   2342     {
   2343         format = kFileFormatPcm16kHzFile;
   2344         codecInst=&dummyCodec;
   2345     }
   2346     else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
   2347         (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
   2348         (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
   2349     {
   2350         format = kFileFormatWavFile;
   2351     }
   2352     else
   2353     {
   2354         format = kFileFormatCompressedFile;
   2355     }
   2356 
   2357     CriticalSectionScoped cs(&_fileCritSect);
   2358 
   2359     // Destroy the old instance
   2360     if (_outputFileRecorderPtr)
   2361     {
   2362         _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
   2363         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
   2364         _outputFileRecorderPtr = NULL;
   2365     }
   2366 
   2367     _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
   2368         _outputFileRecorderId, (const FileFormats)format);
   2369     if (_outputFileRecorderPtr == NULL)
   2370     {
   2371         _engineStatisticsPtr->SetLastError(
   2372             VE_INVALID_ARGUMENT, kTraceError,
   2373             "StartRecordingPlayout() fileRecorder format isnot correct");
   2374         return -1;
   2375     }
   2376 
   2377     if (_outputFileRecorderPtr->StartRecordingAudioFile(
   2378         fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
   2379     {
   2380         _engineStatisticsPtr->SetLastError(
   2381             VE_BAD_FILE, kTraceError,
   2382             "StartRecordingAudioFile() failed to start file recording");
   2383         _outputFileRecorderPtr->StopRecording();
   2384         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
   2385         _outputFileRecorderPtr = NULL;
   2386         return -1;
   2387     }
   2388     _outputFileRecorderPtr->RegisterModuleFileCallback(this);
   2389     _outputFileRecording = true;
   2390 
   2391     return 0;
   2392 }
   2393 
   2394 int Channel::StartRecordingPlayout(OutStream* stream,
   2395                                    const CodecInst* codecInst)
   2396 {
   2397     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2398                  "Channel::StartRecordingPlayout()");
   2399 
   2400     if (_outputFileRecording)
   2401     {
   2402         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
   2403                      "StartRecordingPlayout() is already recording");
   2404         return 0;
   2405     }
   2406 
   2407     FileFormats format;
   2408     const uint32_t notificationTime(0); // Not supported in VoE
   2409     CodecInst dummyCodec={100,"L16",16000,320,1,320000};
   2410 
   2411     if (codecInst != NULL && codecInst->channels != 1)
   2412     {
   2413         _engineStatisticsPtr->SetLastError(
   2414             VE_BAD_ARGUMENT, kTraceError,
   2415             "StartRecordingPlayout() invalid compression");
   2416         return(-1);
   2417     }
   2418     if(codecInst == NULL)
   2419     {
   2420         format = kFileFormatPcm16kHzFile;
   2421         codecInst=&dummyCodec;
   2422     }
   2423     else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
   2424         (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
   2425         (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
   2426     {
   2427         format = kFileFormatWavFile;
   2428     }
   2429     else
   2430     {
   2431         format = kFileFormatCompressedFile;
   2432     }
   2433 
   2434     CriticalSectionScoped cs(&_fileCritSect);
   2435 
   2436     // Destroy the old instance
   2437     if (_outputFileRecorderPtr)
   2438     {
   2439         _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
   2440         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
   2441         _outputFileRecorderPtr = NULL;
   2442     }
   2443 
   2444     _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
   2445         _outputFileRecorderId, (const FileFormats)format);
   2446     if (_outputFileRecorderPtr == NULL)
   2447     {
   2448         _engineStatisticsPtr->SetLastError(
   2449             VE_INVALID_ARGUMENT, kTraceError,
   2450             "StartRecordingPlayout() fileRecorder format isnot correct");
   2451         return -1;
   2452     }
   2453 
   2454     if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
   2455                                                         notificationTime) != 0)
   2456     {
   2457         _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
   2458                                            "StartRecordingPlayout() failed to "
   2459                                            "start file recording");
   2460         _outputFileRecorderPtr->StopRecording();
   2461         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
   2462         _outputFileRecorderPtr = NULL;
   2463         return -1;
   2464     }
   2465 
   2466     _outputFileRecorderPtr->RegisterModuleFileCallback(this);
   2467     _outputFileRecording = true;
   2468 
   2469     return 0;
   2470 }
   2471 
   2472 int Channel::StopRecordingPlayout()
   2473 {
   2474     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
   2475                  "Channel::StopRecordingPlayout()");
   2476 
   2477     if (!_outputFileRecording)
   2478     {
   2479         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
   2480                      "StopRecordingPlayout() isnot recording");
   2481         return -1;
   2482     }
   2483 
   2484 
   2485     CriticalSectionScoped cs(&_fileCritSect);
   2486 
   2487     if (_outputFileRecorderPtr->StopRecording() != 0)
   2488     {
   2489         _engineStatisticsPtr->SetLastError(
   2490             VE_STOP_RECORDING_FAILED, kTraceError,
   2491             "StopRecording() could not stop recording");
   2492         return(-1);
   2493     }
   2494     _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
   2495     FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
   2496     _outputFileRecorderPtr = NULL;
   2497     _outputFileRecording = false;
   2498 
   2499     return 0;
   2500 }
   2501 
   2502 void
   2503 Channel::SetMixWithMicStatus(bool mix)
   2504 {
   2505     CriticalSectionScoped cs(&_fileCritSect);
   2506     _mixFileWithMicrophone=mix;
   2507 }
   2508 
   2509 int
   2510 Channel::GetSpeechOutputLevel(uint32_t& level) const
   2511 {
   2512     int8_t currentLevel = _outputAudioLevel.Level();
   2513     level = static_cast<int32_t> (currentLevel);
   2514     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   2515                VoEId(_instanceId,_channelId),
   2516                "GetSpeechOutputLevel() => level=%u", level);
   2517     return 0;
   2518 }
   2519 
   2520 int
   2521 Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
   2522 {
   2523     int16_t currentLevel = _outputAudioLevel.LevelFullRange();
   2524     level = static_cast<int32_t> (currentLevel);
   2525     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   2526                VoEId(_instanceId,_channelId),
   2527                "GetSpeechOutputLevelFullRange() => level=%u", level);
   2528     return 0;
   2529 }
   2530 
   2531 int
   2532 Channel::SetMute(bool enable)
   2533 {
   2534     CriticalSectionScoped cs(&volume_settings_critsect_);
   2535     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2536                "Channel::SetMute(enable=%d)", enable);
   2537     _mute = enable;
   2538     return 0;
   2539 }
   2540 
   2541 bool
   2542 Channel::Mute() const
   2543 {
   2544     CriticalSectionScoped cs(&volume_settings_critsect_);
   2545     return _mute;
   2546 }
   2547 
   2548 int
   2549 Channel::SetOutputVolumePan(float left, float right)
   2550 {
   2551     CriticalSectionScoped cs(&volume_settings_critsect_);
   2552     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2553                "Channel::SetOutputVolumePan()");
   2554     _panLeft = left;
   2555     _panRight = right;
   2556     return 0;
   2557 }
   2558 
   2559 int
   2560 Channel::GetOutputVolumePan(float& left, float& right) const
   2561 {
   2562     CriticalSectionScoped cs(&volume_settings_critsect_);
   2563     left = _panLeft;
   2564     right = _panRight;
   2565     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   2566                VoEId(_instanceId,_channelId),
   2567                "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
   2568     return 0;
   2569 }
   2570 
   2571 int
   2572 Channel::SetChannelOutputVolumeScaling(float scaling)
   2573 {
   2574     CriticalSectionScoped cs(&volume_settings_critsect_);
   2575     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2576                "Channel::SetChannelOutputVolumeScaling()");
   2577     _outputGain = scaling;
   2578     return 0;
   2579 }
   2580 
   2581 int
   2582 Channel::GetChannelOutputVolumeScaling(float& scaling) const
   2583 {
   2584     CriticalSectionScoped cs(&volume_settings_critsect_);
   2585     scaling = _outputGain;
   2586     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   2587                VoEId(_instanceId,_channelId),
   2588                "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
   2589     return 0;
   2590 }
   2591 
   2592 int Channel::SendTelephoneEventOutband(unsigned char eventCode,
   2593                                        int lengthMs, int attenuationDb,
   2594                                        bool playDtmfEvent)
   2595 {
   2596     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   2597                "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
   2598                playDtmfEvent);
   2599 
   2600     _playOutbandDtmfEvent = playDtmfEvent;
   2601 
   2602     if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
   2603                                                  attenuationDb) != 0)
   2604     {
   2605         _engineStatisticsPtr->SetLastError(
   2606             VE_SEND_DTMF_FAILED,
   2607             kTraceWarning,
   2608             "SendTelephoneEventOutband() failed to send event");
   2609         return -1;
   2610     }
   2611     return 0;
   2612 }
   2613 
   2614 int Channel::SendTelephoneEventInband(unsigned char eventCode,
   2615                                          int lengthMs,
   2616                                          int attenuationDb,
   2617                                          bool playDtmfEvent)
   2618 {
   2619     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   2620                "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
   2621                playDtmfEvent);
   2622 
   2623     _playInbandDtmfEvent = playDtmfEvent;
   2624     _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
   2625 
   2626     return 0;
   2627 }
   2628 
   2629 int
   2630 Channel::SetDtmfPlayoutStatus(bool enable)
   2631 {
   2632     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2633                "Channel::SetDtmfPlayoutStatus()");
   2634     if (audio_coding_->SetDtmfPlayoutStatus(enable) != 0)
   2635     {
   2636         _engineStatisticsPtr->SetLastError(
   2637             VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
   2638             "SetDtmfPlayoutStatus() failed to set Dtmf playout");
   2639         return -1;
   2640     }
   2641     return 0;
   2642 }
   2643 
   2644 bool
   2645 Channel::DtmfPlayoutStatus() const
   2646 {
   2647     return audio_coding_->DtmfPlayoutStatus();
   2648 }
   2649 
   2650 int
   2651 Channel::SetSendTelephoneEventPayloadType(unsigned char type)
   2652 {
   2653     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2654                "Channel::SetSendTelephoneEventPayloadType()");
   2655     if (type > 127)
   2656     {
   2657         _engineStatisticsPtr->SetLastError(
   2658             VE_INVALID_ARGUMENT, kTraceError,
   2659             "SetSendTelephoneEventPayloadType() invalid type");
   2660         return -1;
   2661     }
   2662     CodecInst codec = {};
   2663     codec.plfreq = 8000;
   2664     codec.pltype = type;
   2665     memcpy(codec.plname, "telephone-event", 16);
   2666     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
   2667     {
   2668         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
   2669         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
   2670             _engineStatisticsPtr->SetLastError(
   2671                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   2672                 "SetSendTelephoneEventPayloadType() failed to register send"
   2673                 "payload type");
   2674             return -1;
   2675         }
   2676     }
   2677     _sendTelephoneEventPayloadType = type;
   2678     return 0;
   2679 }
   2680 
   2681 int
   2682 Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
   2683 {
   2684     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2685                  "Channel::GetSendTelephoneEventPayloadType()");
   2686     type = _sendTelephoneEventPayloadType;
   2687     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   2688                VoEId(_instanceId,_channelId),
   2689                "GetSendTelephoneEventPayloadType() => type=%u", type);
   2690     return 0;
   2691 }
   2692 
   2693 int
   2694 Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
   2695 {
   2696     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
   2697                  "Channel::UpdateRxVadDetection()");
   2698 
   2699     int vadDecision = 1;
   2700 
   2701     vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
   2702 
   2703     if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
   2704     {
   2705         OnRxVadDetected(vadDecision);
   2706         _oldVadDecision = vadDecision;
   2707     }
   2708 
   2709     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
   2710                  "Channel::UpdateRxVadDetection() => vadDecision=%d",
   2711                  vadDecision);
   2712     return 0;
   2713 }
   2714 
   2715 int
   2716 Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
   2717 {
   2718     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2719                  "Channel::RegisterRxVadObserver()");
   2720     CriticalSectionScoped cs(&_callbackCritSect);
   2721 
   2722     if (_rxVadObserverPtr)
   2723     {
   2724         _engineStatisticsPtr->SetLastError(
   2725             VE_INVALID_OPERATION, kTraceError,
   2726             "RegisterRxVadObserver() observer already enabled");
   2727         return -1;
   2728     }
   2729     _rxVadObserverPtr = &observer;
   2730     _RxVadDetection = true;
   2731     return 0;
   2732 }
   2733 
   2734 int
   2735 Channel::DeRegisterRxVadObserver()
   2736 {
   2737     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2738                  "Channel::DeRegisterRxVadObserver()");
   2739     CriticalSectionScoped cs(&_callbackCritSect);
   2740 
   2741     if (!_rxVadObserverPtr)
   2742     {
   2743         _engineStatisticsPtr->SetLastError(
   2744             VE_INVALID_OPERATION, kTraceWarning,
   2745             "DeRegisterRxVadObserver() observer already disabled");
   2746         return 0;
   2747     }
   2748     _rxVadObserverPtr = NULL;
   2749     _RxVadDetection = false;
   2750     return 0;
   2751 }
   2752 
   2753 int
   2754 Channel::VoiceActivityIndicator(int &activity)
   2755 {
   2756     activity = _sendFrameType;
   2757 
   2758     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2759                  "Channel::VoiceActivityIndicator(indicator=%d)", activity);
   2760     return 0;
   2761 }
   2762 
   2763 #ifdef WEBRTC_VOICE_ENGINE_AGC
   2764 
   2765 int
   2766 Channel::SetRxAgcStatus(bool enable, AgcModes mode)
   2767 {
   2768     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2769                  "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
   2770                  (int)enable, (int)mode);
   2771 
   2772     GainControl::Mode agcMode = kDefaultRxAgcMode;
   2773     switch (mode)
   2774     {
   2775         case kAgcDefault:
   2776             break;
   2777         case kAgcUnchanged:
   2778             agcMode = rx_audioproc_->gain_control()->mode();
   2779             break;
   2780         case kAgcFixedDigital:
   2781             agcMode = GainControl::kFixedDigital;
   2782             break;
   2783         case kAgcAdaptiveDigital:
   2784             agcMode =GainControl::kAdaptiveDigital;
   2785             break;
   2786         default:
   2787             _engineStatisticsPtr->SetLastError(
   2788                 VE_INVALID_ARGUMENT, kTraceError,
   2789                 "SetRxAgcStatus() invalid Agc mode");
   2790             return -1;
   2791     }
   2792 
   2793     if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0)
   2794     {
   2795         _engineStatisticsPtr->SetLastError(
   2796             VE_APM_ERROR, kTraceError,
   2797             "SetRxAgcStatus() failed to set Agc mode");
   2798         return -1;
   2799     }
   2800     if (rx_audioproc_->gain_control()->Enable(enable) != 0)
   2801     {
   2802         _engineStatisticsPtr->SetLastError(
   2803             VE_APM_ERROR, kTraceError,
   2804             "SetRxAgcStatus() failed to set Agc state");
   2805         return -1;
   2806     }
   2807 
   2808     _rxAgcIsEnabled = enable;
   2809     channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
   2810 
   2811     return 0;
   2812 }
   2813 
   2814 int
   2815 Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
   2816 {
   2817     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2818                      "Channel::GetRxAgcStatus(enable=?, mode=?)");
   2819 
   2820     bool enable = rx_audioproc_->gain_control()->is_enabled();
   2821     GainControl::Mode agcMode =
   2822         rx_audioproc_->gain_control()->mode();
   2823 
   2824     enabled = enable;
   2825 
   2826     switch (agcMode)
   2827     {
   2828         case GainControl::kFixedDigital:
   2829             mode = kAgcFixedDigital;
   2830             break;
   2831         case GainControl::kAdaptiveDigital:
   2832             mode = kAgcAdaptiveDigital;
   2833             break;
   2834         default:
   2835             _engineStatisticsPtr->SetLastError(
   2836                 VE_APM_ERROR, kTraceError,
   2837                 "GetRxAgcStatus() invalid Agc mode");
   2838             return -1;
   2839     }
   2840 
   2841     return 0;
   2842 }
   2843 
   2844 int
   2845 Channel::SetRxAgcConfig(AgcConfig config)
   2846 {
   2847     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2848                  "Channel::SetRxAgcConfig()");
   2849 
   2850     if (rx_audioproc_->gain_control()->set_target_level_dbfs(
   2851         config.targetLeveldBOv) != 0)
   2852     {
   2853         _engineStatisticsPtr->SetLastError(
   2854             VE_APM_ERROR, kTraceError,
   2855             "SetRxAgcConfig() failed to set target peak |level|"
   2856             "(or envelope) of the Agc");
   2857         return -1;
   2858     }
   2859     if (rx_audioproc_->gain_control()->set_compression_gain_db(
   2860         config.digitalCompressionGaindB) != 0)
   2861     {
   2862         _engineStatisticsPtr->SetLastError(
   2863             VE_APM_ERROR, kTraceError,
   2864             "SetRxAgcConfig() failed to set the range in |gain| the"
   2865             " digital compression stage may apply");
   2866         return -1;
   2867     }
   2868     if (rx_audioproc_->gain_control()->enable_limiter(
   2869         config.limiterEnable) != 0)
   2870     {
   2871         _engineStatisticsPtr->SetLastError(
   2872             VE_APM_ERROR, kTraceError,
   2873             "SetRxAgcConfig() failed to set hard limiter to the signal");
   2874         return -1;
   2875     }
   2876 
   2877     return 0;
   2878 }
   2879 
   2880 int
   2881 Channel::GetRxAgcConfig(AgcConfig& config)
   2882 {
   2883     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2884                  "Channel::GetRxAgcConfig(config=%?)");
   2885 
   2886     config.targetLeveldBOv =
   2887         rx_audioproc_->gain_control()->target_level_dbfs();
   2888     config.digitalCompressionGaindB =
   2889         rx_audioproc_->gain_control()->compression_gain_db();
   2890     config.limiterEnable =
   2891         rx_audioproc_->gain_control()->is_limiter_enabled();
   2892 
   2893     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   2894                VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
   2895                    "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
   2896                    " limiterEnable=%d",
   2897                    config.targetLeveldBOv,
   2898                    config.digitalCompressionGaindB,
   2899                    config.limiterEnable);
   2900 
   2901     return 0;
   2902 }
   2903 
   2904 #endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
   2905 
   2906 #ifdef WEBRTC_VOICE_ENGINE_NR
   2907 
   2908 int
   2909 Channel::SetRxNsStatus(bool enable, NsModes mode)
   2910 {
   2911     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2912                  "Channel::SetRxNsStatus(enable=%d, mode=%d)",
   2913                  (int)enable, (int)mode);
   2914 
   2915     NoiseSuppression::Level nsLevel = kDefaultNsMode;
   2916     switch (mode)
   2917     {
   2918 
   2919         case kNsDefault:
   2920             break;
   2921         case kNsUnchanged:
   2922             nsLevel = rx_audioproc_->noise_suppression()->level();
   2923             break;
   2924         case kNsConference:
   2925             nsLevel = NoiseSuppression::kHigh;
   2926             break;
   2927         case kNsLowSuppression:
   2928             nsLevel = NoiseSuppression::kLow;
   2929             break;
   2930         case kNsModerateSuppression:
   2931             nsLevel = NoiseSuppression::kModerate;
   2932             break;
   2933         case kNsHighSuppression:
   2934             nsLevel = NoiseSuppression::kHigh;
   2935             break;
   2936         case kNsVeryHighSuppression:
   2937             nsLevel = NoiseSuppression::kVeryHigh;
   2938             break;
   2939     }
   2940 
   2941     if (rx_audioproc_->noise_suppression()->set_level(nsLevel)
   2942         != 0)
   2943     {
   2944         _engineStatisticsPtr->SetLastError(
   2945             VE_APM_ERROR, kTraceError,
   2946             "SetRxNsStatus() failed to set NS level");
   2947         return -1;
   2948     }
   2949     if (rx_audioproc_->noise_suppression()->Enable(enable) != 0)
   2950     {
   2951         _engineStatisticsPtr->SetLastError(
   2952             VE_APM_ERROR, kTraceError,
   2953             "SetRxNsStatus() failed to set NS state");
   2954         return -1;
   2955     }
   2956 
   2957     _rxNsIsEnabled = enable;
   2958     channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
   2959 
   2960     return 0;
   2961 }
   2962 
   2963 int
   2964 Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
   2965 {
   2966     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   2967                  "Channel::GetRxNsStatus(enable=?, mode=?)");
   2968 
   2969     bool enable =
   2970         rx_audioproc_->noise_suppression()->is_enabled();
   2971     NoiseSuppression::Level ncLevel =
   2972         rx_audioproc_->noise_suppression()->level();
   2973 
   2974     enabled = enable;
   2975 
   2976     switch (ncLevel)
   2977     {
   2978         case NoiseSuppression::kLow:
   2979             mode = kNsLowSuppression;
   2980             break;
   2981         case NoiseSuppression::kModerate:
   2982             mode = kNsModerateSuppression;
   2983             break;
   2984         case NoiseSuppression::kHigh:
   2985             mode = kNsHighSuppression;
   2986             break;
   2987         case NoiseSuppression::kVeryHigh:
   2988             mode = kNsVeryHighSuppression;
   2989             break;
   2990     }
   2991 
   2992     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   2993                VoEId(_instanceId,_channelId),
   2994                "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
   2995     return 0;
   2996 }
   2997 
   2998 #endif // #ifdef WEBRTC_VOICE_ENGINE_NR
   2999 
   3000 int
   3001 Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
   3002 {
   3003     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3004                  "Channel::RegisterRTCPObserver()");
   3005     CriticalSectionScoped cs(&_callbackCritSect);
   3006 
   3007     if (_rtcpObserverPtr)
   3008     {
   3009         _engineStatisticsPtr->SetLastError(
   3010             VE_INVALID_OPERATION, kTraceError,
   3011             "RegisterRTCPObserver() observer already enabled");
   3012         return -1;
   3013     }
   3014 
   3015     _rtcpObserverPtr = &observer;
   3016     _rtcpObserver = true;
   3017 
   3018     return 0;
   3019 }
   3020 
   3021 int
   3022 Channel::DeRegisterRTCPObserver()
   3023 {
   3024     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   3025                  "Channel::DeRegisterRTCPObserver()");
   3026     CriticalSectionScoped cs(&_callbackCritSect);
   3027 
   3028     if (!_rtcpObserverPtr)
   3029     {
   3030         _engineStatisticsPtr->SetLastError(
   3031             VE_INVALID_OPERATION, kTraceWarning,
   3032             "DeRegisterRTCPObserver() observer already disabled");
   3033         return 0;
   3034     }
   3035 
   3036     _rtcpObserver = false;
   3037     _rtcpObserverPtr = NULL;
   3038 
   3039     return 0;
   3040 }
   3041 
   3042 int
   3043 Channel::SetLocalSSRC(unsigned int ssrc)
   3044 {
   3045     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   3046                  "Channel::SetLocalSSRC()");
   3047     if (channel_state_.Get().sending)
   3048     {
   3049         _engineStatisticsPtr->SetLastError(
   3050             VE_ALREADY_SENDING, kTraceError,
   3051             "SetLocalSSRC() already sending");
   3052         return -1;
   3053     }
   3054     _rtpRtcpModule->SetSSRC(ssrc);
   3055     return 0;
   3056 }
   3057 
   3058 int
   3059 Channel::GetLocalSSRC(unsigned int& ssrc)
   3060 {
   3061     ssrc = _rtpRtcpModule->SSRC();
   3062     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3063                  VoEId(_instanceId,_channelId),
   3064                  "GetLocalSSRC() => ssrc=%lu", ssrc);
   3065     return 0;
   3066 }
   3067 
   3068 int
   3069 Channel::GetRemoteSSRC(unsigned int& ssrc)
   3070 {
   3071     ssrc = rtp_receiver_->SSRC();
   3072     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3073                  VoEId(_instanceId,_channelId),
   3074                  "GetRemoteSSRC() => ssrc=%lu", ssrc);
   3075     return 0;
   3076 }
   3077 
   3078 int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
   3079   _includeAudioLevelIndication = enable;
   3080   return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
   3081 }
   3082 
   3083 int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
   3084                                                   unsigned char id) {
   3085   rtp_header_parser_->DeregisterRtpHeaderExtension(
   3086       kRtpExtensionAudioLevel);
   3087   if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
   3088           kRtpExtensionAudioLevel, id)) {
   3089     return -1;
   3090   }
   3091   return 0;
   3092 }
   3093 
   3094 int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
   3095   return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
   3096 }
   3097 
   3098 int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
   3099   rtp_header_parser_->DeregisterRtpHeaderExtension(
   3100       kRtpExtensionAbsoluteSendTime);
   3101   if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
   3102       kRtpExtensionAbsoluteSendTime, id)) {
   3103     return -1;
   3104   }
   3105   return 0;
   3106 }
   3107 
   3108 int
   3109 Channel::SetRTCPStatus(bool enable)
   3110 {
   3111     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3112                  "Channel::SetRTCPStatus()");
   3113     if (_rtpRtcpModule->SetRTCPStatus(enable ?
   3114         kRtcpCompound : kRtcpOff) != 0)
   3115     {
   3116         _engineStatisticsPtr->SetLastError(
   3117             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   3118             "SetRTCPStatus() failed to set RTCP status");
   3119         return -1;
   3120     }
   3121     return 0;
   3122 }
   3123 
   3124 int
   3125 Channel::GetRTCPStatus(bool& enabled)
   3126 {
   3127     RTCPMethod method = _rtpRtcpModule->RTCP();
   3128     enabled = (method != kRtcpOff);
   3129     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3130                  VoEId(_instanceId,_channelId),
   3131                  "GetRTCPStatus() => enabled=%d", enabled);
   3132     return 0;
   3133 }
   3134 
   3135 int
   3136 Channel::SetRTCP_CNAME(const char cName[256])
   3137 {
   3138     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   3139                  "Channel::SetRTCP_CNAME()");
   3140     if (_rtpRtcpModule->SetCNAME(cName) != 0)
   3141     {
   3142         _engineStatisticsPtr->SetLastError(
   3143             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   3144             "SetRTCP_CNAME() failed to set RTCP CNAME");
   3145         return -1;
   3146     }
   3147     return 0;
   3148 }
   3149 
   3150 int
   3151 Channel::GetRemoteRTCP_CNAME(char cName[256])
   3152 {
   3153     if (cName == NULL)
   3154     {
   3155         _engineStatisticsPtr->SetLastError(
   3156             VE_INVALID_ARGUMENT, kTraceError,
   3157             "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
   3158         return -1;
   3159     }
   3160     char cname[RTCP_CNAME_SIZE];
   3161     const uint32_t remoteSSRC = rtp_receiver_->SSRC();
   3162     if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
   3163     {
   3164         _engineStatisticsPtr->SetLastError(
   3165             VE_CANNOT_RETRIEVE_CNAME, kTraceError,
   3166             "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
   3167         return -1;
   3168     }
   3169     strcpy(cName, cname);
   3170     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3171                  VoEId(_instanceId, _channelId),
   3172                  "GetRemoteRTCP_CNAME() => cName=%s", cName);
   3173     return 0;
   3174 }
   3175 
   3176 int
   3177 Channel::GetRemoteRTCPData(
   3178     unsigned int& NTPHigh,
   3179     unsigned int& NTPLow,
   3180     unsigned int& timestamp,
   3181     unsigned int& playoutTimestamp,
   3182     unsigned int* jitter,
   3183     unsigned short* fractionLost)
   3184 {
   3185     // --- Information from sender info in received Sender Reports
   3186 
   3187     RTCPSenderInfo senderInfo;
   3188     if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
   3189     {
   3190         _engineStatisticsPtr->SetLastError(
   3191             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   3192             "GetRemoteRTCPData() failed to retrieve sender info for remote "
   3193             "side");
   3194         return -1;
   3195     }
   3196 
   3197     // We only utilize 12 out of 20 bytes in the sender info (ignores packet
   3198     // and octet count)
   3199     NTPHigh = senderInfo.NTPseconds;
   3200     NTPLow = senderInfo.NTPfraction;
   3201     timestamp = senderInfo.RTPtimeStamp;
   3202 
   3203     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3204                  VoEId(_instanceId, _channelId),
   3205                  "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
   3206                  "timestamp=%lu",
   3207                  NTPHigh, NTPLow, timestamp);
   3208 
   3209     // --- Locally derived information
   3210 
   3211     // This value is updated on each incoming RTCP packet (0 when no packet
   3212     // has been received)
   3213     playoutTimestamp = playout_timestamp_rtcp_;
   3214 
   3215     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3216                  VoEId(_instanceId, _channelId),
   3217                  "GetRemoteRTCPData() => playoutTimestamp=%lu",
   3218                  playout_timestamp_rtcp_);
   3219 
   3220     if (NULL != jitter || NULL != fractionLost)
   3221     {
   3222         // Get all RTCP receiver report blocks that have been received on this
   3223         // channel. If we receive RTP packets from a remote source we know the
   3224         // remote SSRC and use the report block from him.
   3225         // Otherwise use the first report block.
   3226         std::vector<RTCPReportBlock> remote_stats;
   3227         if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
   3228             remote_stats.empty()) {
   3229           WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   3230                        VoEId(_instanceId, _channelId),
   3231                        "GetRemoteRTCPData() failed to measure statistics due"
   3232                        " to lack of received RTP and/or RTCP packets");
   3233           return -1;
   3234         }
   3235 
   3236         uint32_t remoteSSRC = rtp_receiver_->SSRC();
   3237         std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
   3238         for (; it != remote_stats.end(); ++it) {
   3239           if (it->remoteSSRC == remoteSSRC)
   3240             break;
   3241         }
   3242 
   3243         if (it == remote_stats.end()) {
   3244           // If we have not received any RTCP packets from this SSRC it probably
   3245           // means that we have not received any RTP packets.
   3246           // Use the first received report block instead.
   3247           it = remote_stats.begin();
   3248           remoteSSRC = it->remoteSSRC;
   3249         }
   3250 
   3251         if (jitter) {
   3252           *jitter = it->jitter;
   3253           WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3254                        VoEId(_instanceId, _channelId),
   3255                        "GetRemoteRTCPData() => jitter = %lu", *jitter);
   3256         }
   3257 
   3258         if (fractionLost) {
   3259           *fractionLost = it->fractionLost;
   3260           WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3261                        VoEId(_instanceId, _channelId),
   3262                        "GetRemoteRTCPData() => fractionLost = %lu",
   3263                        *fractionLost);
   3264         }
   3265     }
   3266     return 0;
   3267 }
   3268 
   3269 int
   3270 Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
   3271                                              unsigned int name,
   3272                                              const char* data,
   3273                                              unsigned short dataLengthInBytes)
   3274 {
   3275     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   3276                  "Channel::SendApplicationDefinedRTCPPacket()");
   3277     if (!channel_state_.Get().sending)
   3278     {
   3279         _engineStatisticsPtr->SetLastError(
   3280             VE_NOT_SENDING, kTraceError,
   3281             "SendApplicationDefinedRTCPPacket() not sending");
   3282         return -1;
   3283     }
   3284     if (NULL == data)
   3285     {
   3286         _engineStatisticsPtr->SetLastError(
   3287             VE_INVALID_ARGUMENT, kTraceError,
   3288             "SendApplicationDefinedRTCPPacket() invalid data value");
   3289         return -1;
   3290     }
   3291     if (dataLengthInBytes % 4 != 0)
   3292     {
   3293         _engineStatisticsPtr->SetLastError(
   3294             VE_INVALID_ARGUMENT, kTraceError,
   3295             "SendApplicationDefinedRTCPPacket() invalid length value");
   3296         return -1;
   3297     }
   3298     RTCPMethod status = _rtpRtcpModule->RTCP();
   3299     if (status == kRtcpOff)
   3300     {
   3301         _engineStatisticsPtr->SetLastError(
   3302             VE_RTCP_ERROR, kTraceError,
   3303             "SendApplicationDefinedRTCPPacket() RTCP is disabled");
   3304         return -1;
   3305     }
   3306 
   3307     // Create and schedule the RTCP APP packet for transmission
   3308     if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
   3309         subType,
   3310         name,
   3311         (const unsigned char*) data,
   3312         dataLengthInBytes) != 0)
   3313     {
   3314         _engineStatisticsPtr->SetLastError(
   3315             VE_SEND_ERROR, kTraceError,
   3316             "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
   3317         return -1;
   3318     }
   3319     return 0;
   3320 }
   3321 
   3322 int
   3323 Channel::GetRTPStatistics(
   3324         unsigned int& averageJitterMs,
   3325         unsigned int& maxJitterMs,
   3326         unsigned int& discardedPackets)
   3327 {
   3328     // The jitter statistics is updated for each received RTP packet and is
   3329     // based on received packets.
   3330     if (_rtpRtcpModule->RTCP() == kRtcpOff) {
   3331       // If RTCP is off, there is no timed thread in the RTCP module regularly
   3332       // generating new stats, trigger the update manually here instead.
   3333       StreamStatistician* statistician =
   3334           rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
   3335       if (statistician) {
   3336         // Don't use returned statistics, use data from proxy instead so that
   3337         // max jitter can be fetched atomically.
   3338         RtcpStatistics s;
   3339         statistician->GetStatistics(&s, true);
   3340       }
   3341     }
   3342 
   3343     ChannelStatistics stats = statistics_proxy_->GetStats();
   3344     const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
   3345     if (playoutFrequency > 0) {
   3346       // Scale RTP statistics given the current playout frequency
   3347       maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
   3348       averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
   3349     }
   3350 
   3351     discardedPackets = _numberOfDiscardedPackets;
   3352 
   3353     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3354                VoEId(_instanceId, _channelId),
   3355                "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
   3356                " discardedPackets = %lu)",
   3357                averageJitterMs, maxJitterMs, discardedPackets);
   3358     return 0;
   3359 }
   3360 
   3361 int Channel::GetRemoteRTCPReportBlocks(
   3362     std::vector<ReportBlock>* report_blocks) {
   3363   if (report_blocks == NULL) {
   3364     _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
   3365       "GetRemoteRTCPReportBlock()s invalid report_blocks.");
   3366     return -1;
   3367   }
   3368 
   3369   // Get the report blocks from the latest received RTCP Sender or Receiver
   3370   // Report. Each element in the vector contains the sender's SSRC and a
   3371   // report block according to RFC 3550.
   3372   std::vector<RTCPReportBlock> rtcp_report_blocks;
   3373   if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
   3374     _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   3375         "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
   3376     return -1;
   3377   }
   3378 
   3379   if (rtcp_report_blocks.empty())
   3380     return 0;
   3381 
   3382   std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
   3383   for (; it != rtcp_report_blocks.end(); ++it) {
   3384     ReportBlock report_block;
   3385     report_block.sender_SSRC = it->remoteSSRC;
   3386     report_block.source_SSRC = it->sourceSSRC;
   3387     report_block.fraction_lost = it->fractionLost;
   3388     report_block.cumulative_num_packets_lost = it->cumulativeLost;
   3389     report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
   3390     report_block.interarrival_jitter = it->jitter;
   3391     report_block.last_SR_timestamp = it->lastSR;
   3392     report_block.delay_since_last_SR = it->delaySinceLastSR;
   3393     report_blocks->push_back(report_block);
   3394   }
   3395   return 0;
   3396 }
   3397 
   3398 int
   3399 Channel::GetRTPStatistics(CallStatistics& stats)
   3400 {
   3401     // --- RtcpStatistics
   3402 
   3403     // The jitter statistics is updated for each received RTP packet and is
   3404     // based on received packets.
   3405     RtcpStatistics statistics;
   3406     StreamStatistician* statistician =
   3407         rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
   3408     if (!statistician || !statistician->GetStatistics(
   3409         &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
   3410       _engineStatisticsPtr->SetLastError(
   3411           VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
   3412           "GetRTPStatistics() failed to read RTP statistics from the "
   3413           "RTP/RTCP module");
   3414     }
   3415 
   3416     stats.fractionLost = statistics.fraction_lost;
   3417     stats.cumulativeLost = statistics.cumulative_lost;
   3418     stats.extendedMax = statistics.extended_max_sequence_number;
   3419     stats.jitterSamples = statistics.jitter;
   3420 
   3421     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3422                  VoEId(_instanceId, _channelId),
   3423                  "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
   3424                  " extendedMax=%lu, jitterSamples=%li)",
   3425                  stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
   3426                  stats.jitterSamples);
   3427 
   3428     // --- RTT
   3429     stats.rttMs = GetRTT();
   3430 
   3431     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3432                  VoEId(_instanceId, _channelId),
   3433                  "GetRTPStatistics() => rttMs=%d", stats.rttMs);
   3434 
   3435     // --- Data counters
   3436 
   3437     uint32_t bytesSent(0);
   3438     uint32_t packetsSent(0);
   3439     uint32_t bytesReceived(0);
   3440     uint32_t packetsReceived(0);
   3441 
   3442     if (statistician) {
   3443       statistician->GetDataCounters(&bytesReceived, &packetsReceived);
   3444     }
   3445 
   3446     if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
   3447                                         &packetsSent) != 0)
   3448     {
   3449         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   3450                      VoEId(_instanceId, _channelId),
   3451                      "GetRTPStatistics() failed to retrieve RTP datacounters =>"
   3452                      " output will not be complete");
   3453     }
   3454 
   3455     stats.bytesSent = bytesSent;
   3456     stats.packetsSent = packetsSent;
   3457     stats.bytesReceived = bytesReceived;
   3458     stats.packetsReceived = packetsReceived;
   3459 
   3460     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3461                  VoEId(_instanceId, _channelId),
   3462                  "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
   3463                  " bytesReceived=%d, packetsReceived=%d)",
   3464                  stats.bytesSent, stats.packetsSent, stats.bytesReceived,
   3465                  stats.packetsReceived);
   3466 
   3467     // --- Timestamps
   3468     {
   3469       CriticalSectionScoped lock(ts_stats_lock_.get());
   3470       stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
   3471     }
   3472     return 0;
   3473 }
   3474 
   3475 int Channel::SetREDStatus(bool enable, int redPayloadtype) {
   3476   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   3477                "Channel::SetREDStatus()");
   3478 
   3479   if (enable) {
   3480     if (redPayloadtype < 0 || redPayloadtype > 127) {
   3481       _engineStatisticsPtr->SetLastError(
   3482           VE_PLTYPE_ERROR, kTraceError,
   3483           "SetREDStatus() invalid RED payload type");
   3484       return -1;
   3485     }
   3486 
   3487     if (SetRedPayloadType(redPayloadtype) < 0) {
   3488       _engineStatisticsPtr->SetLastError(
   3489           VE_CODEC_ERROR, kTraceError,
   3490           "SetSecondarySendCodec() Failed to register RED ACM");
   3491       return -1;
   3492     }
   3493   }
   3494 
   3495   if (audio_coding_->SetREDStatus(enable) != 0) {
   3496     _engineStatisticsPtr->SetLastError(
   3497         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   3498         "SetREDStatus() failed to set RED state in the ACM");
   3499     return -1;
   3500   }
   3501   return 0;
   3502 }
   3503 
   3504 int
   3505 Channel::GetREDStatus(bool& enabled, int& redPayloadtype)
   3506 {
   3507     enabled = audio_coding_->REDStatus();
   3508     if (enabled)
   3509     {
   3510         int8_t payloadType(0);
   3511         if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
   3512         {
   3513             _engineStatisticsPtr->SetLastError(
   3514                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   3515                 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
   3516                 "module");
   3517             return -1;
   3518         }
   3519         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3520                    VoEId(_instanceId, _channelId),
   3521                    "GetREDStatus() => enabled=%d, redPayloadtype=%d",
   3522                    enabled, redPayloadtype);
   3523         return 0;
   3524     }
   3525     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3526                  VoEId(_instanceId, _channelId),
   3527                  "GetREDStatus() => enabled=%d", enabled);
   3528     return 0;
   3529 }
   3530 
   3531 int Channel::SetCodecFECStatus(bool enable) {
   3532   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   3533                "Channel::SetCodecFECStatus()");
   3534 
   3535   if (audio_coding_->SetCodecFEC(enable) != 0) {
   3536     _engineStatisticsPtr->SetLastError(
   3537         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   3538         "SetCodecFECStatus() failed to set FEC state");
   3539     return -1;
   3540   }
   3541   return 0;
   3542 }
   3543 
   3544 bool Channel::GetCodecFECStatus() {
   3545   bool enabled = audio_coding_->CodecFEC();
   3546   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3547                VoEId(_instanceId, _channelId),
   3548                "GetCodecFECStatus() => enabled=%d", enabled);
   3549   return enabled;
   3550 }
   3551 
   3552 void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
   3553   // None of these functions can fail.
   3554   _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
   3555   rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
   3556   rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
   3557   if (enable)
   3558     audio_coding_->EnableNack(maxNumberOfPackets);
   3559   else
   3560     audio_coding_->DisableNack();
   3561 }
   3562 
   3563 // Called when we are missing one or more packets.
   3564 int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
   3565   return _rtpRtcpModule->SendNACK(sequence_numbers, length);
   3566 }
   3567 
   3568 int
   3569 Channel::StartRTPDump(const char fileNameUTF8[1024],
   3570                       RTPDirections direction)
   3571 {
   3572     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   3573                  "Channel::StartRTPDump()");
   3574     if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
   3575     {
   3576         _engineStatisticsPtr->SetLastError(
   3577             VE_INVALID_ARGUMENT, kTraceError,
   3578             "StartRTPDump() invalid RTP direction");
   3579         return -1;
   3580     }
   3581     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
   3582         &_rtpDumpIn : &_rtpDumpOut;
   3583     if (rtpDumpPtr == NULL)
   3584     {
   3585         assert(false);
   3586         return -1;
   3587     }
   3588     if (rtpDumpPtr->IsActive())
   3589     {
   3590         rtpDumpPtr->Stop();
   3591     }
   3592     if (rtpDumpPtr->Start(fileNameUTF8) != 0)
   3593     {
   3594         _engineStatisticsPtr->SetLastError(
   3595             VE_BAD_FILE, kTraceError,
   3596             "StartRTPDump() failed to create file");
   3597         return -1;
   3598     }
   3599     return 0;
   3600 }
   3601 
   3602 int
   3603 Channel::StopRTPDump(RTPDirections direction)
   3604 {
   3605     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
   3606                  "Channel::StopRTPDump()");
   3607     if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
   3608     {
   3609         _engineStatisticsPtr->SetLastError(
   3610             VE_INVALID_ARGUMENT, kTraceError,
   3611             "StopRTPDump() invalid RTP direction");
   3612         return -1;
   3613     }
   3614     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
   3615         &_rtpDumpIn : &_rtpDumpOut;
   3616     if (rtpDumpPtr == NULL)
   3617     {
   3618         assert(false);
   3619         return -1;
   3620     }
   3621     if (!rtpDumpPtr->IsActive())
   3622     {
   3623         return 0;
   3624     }
   3625     return rtpDumpPtr->Stop();
   3626 }
   3627 
   3628 bool
   3629 Channel::RTPDumpIsActive(RTPDirections direction)
   3630 {
   3631     if ((direction != kRtpIncoming) &&
   3632         (direction != kRtpOutgoing))
   3633     {
   3634         _engineStatisticsPtr->SetLastError(
   3635             VE_INVALID_ARGUMENT, kTraceError,
   3636             "RTPDumpIsActive() invalid RTP direction");
   3637         return false;
   3638     }
   3639     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
   3640         &_rtpDumpIn : &_rtpDumpOut;
   3641     return rtpDumpPtr->IsActive();
   3642 }
   3643 
   3644 void Channel::SetVideoEngineBWETarget(ViENetwork* vie_network,
   3645                                       int video_channel) {
   3646   CriticalSectionScoped cs(&_callbackCritSect);
   3647   if (vie_network_) {
   3648     vie_network_->Release();
   3649     vie_network_ = NULL;
   3650   }
   3651   video_channel_ = -1;
   3652 
   3653   if (vie_network != NULL && video_channel != -1) {
   3654     vie_network_ = vie_network;
   3655     video_channel_ = video_channel;
   3656   }
   3657 }
   3658 
   3659 uint32_t
   3660 Channel::Demultiplex(const AudioFrame& audioFrame)
   3661 {
   3662     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
   3663                  "Channel::Demultiplex()");
   3664     _audioFrame.CopyFrom(audioFrame);
   3665     _audioFrame.id_ = _channelId;
   3666     return 0;
   3667 }
   3668 
   3669 void Channel::Demultiplex(const int16_t* audio_data,
   3670                           int sample_rate,
   3671                           int number_of_frames,
   3672                           int number_of_channels) {
   3673   CodecInst codec;
   3674   GetSendCodec(codec);
   3675 
   3676   if (!mono_recording_audio_.get()) {
   3677     // Temporary space for DownConvertToCodecFormat.
   3678     mono_recording_audio_.reset(new int16_t[kMaxMonoDataSizeSamples]);
   3679   }
   3680   DownConvertToCodecFormat(audio_data,
   3681                            number_of_frames,
   3682                            number_of_channels,
   3683                            sample_rate,
   3684                            codec.channels,
   3685                            codec.plfreq,
   3686                            mono_recording_audio_.get(),
   3687                            &input_resampler_,
   3688                            &_audioFrame);
   3689 }
   3690 
   3691 uint32_t
   3692 Channel::PrepareEncodeAndSend(int mixingFrequency)
   3693 {
   3694     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
   3695                  "Channel::PrepareEncodeAndSend()");
   3696 
   3697     if (_audioFrame.samples_per_channel_ == 0)
   3698     {
   3699         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
   3700                      "Channel::PrepareEncodeAndSend() invalid audio frame");
   3701         return 0xFFFFFFFF;
   3702     }
   3703 
   3704     if (channel_state_.Get().input_file_playing)
   3705     {
   3706         MixOrReplaceAudioWithFile(mixingFrequency);
   3707     }
   3708 
   3709     bool is_muted = Mute();  // Cache locally as Mute() takes a lock.
   3710     if (is_muted) {
   3711       AudioFrameOperations::Mute(_audioFrame);
   3712     }
   3713 
   3714     if (channel_state_.Get().input_external_media)
   3715     {
   3716         CriticalSectionScoped cs(&_callbackCritSect);
   3717         const bool isStereo = (_audioFrame.num_channels_ == 2);
   3718         if (_inputExternalMediaCallbackPtr)
   3719         {
   3720             _inputExternalMediaCallbackPtr->Process(
   3721                 _channelId,
   3722                 kRecordingPerChannel,
   3723                (int16_t*)_audioFrame.data_,
   3724                 _audioFrame.samples_per_channel_,
   3725                 _audioFrame.sample_rate_hz_,
   3726                 isStereo);
   3727         }
   3728     }
   3729 
   3730     InsertInbandDtmfTone();
   3731 
   3732     if (_includeAudioLevelIndication) {
   3733       int length = _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
   3734       if (is_muted) {
   3735         rms_level_.ProcessMuted(length);
   3736       } else {
   3737         rms_level_.Process(_audioFrame.data_, length);
   3738       }
   3739     }
   3740 
   3741     return 0;
   3742 }
   3743 
   3744 uint32_t
   3745 Channel::EncodeAndSend()
   3746 {
   3747     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
   3748                  "Channel::EncodeAndSend()");
   3749 
   3750     assert(_audioFrame.num_channels_ <= 2);
   3751     if (_audioFrame.samples_per_channel_ == 0)
   3752     {
   3753         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
   3754                      "Channel::EncodeAndSend() invalid audio frame");
   3755         return 0xFFFFFFFF;
   3756     }
   3757 
   3758     _audioFrame.id_ = _channelId;
   3759 
   3760     // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
   3761 
   3762     // The ACM resamples internally.
   3763     _audioFrame.timestamp_ = _timeStamp;
   3764     if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) != 0)
   3765     {
   3766         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
   3767                      "Channel::EncodeAndSend() ACM encoding failed");
   3768         return 0xFFFFFFFF;
   3769     }
   3770 
   3771     _timeStamp += _audioFrame.samples_per_channel_;
   3772 
   3773     // --- Encode if complete frame is ready
   3774 
   3775     // This call will trigger AudioPacketizationCallback::SendData if encoding
   3776     // is done and payload is ready for packetization and transmission.
   3777     return audio_coding_->Process();
   3778 }
   3779 
   3780 int Channel::RegisterExternalMediaProcessing(
   3781     ProcessingTypes type,
   3782     VoEMediaProcess& processObject)
   3783 {
   3784     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3785                  "Channel::RegisterExternalMediaProcessing()");
   3786 
   3787     CriticalSectionScoped cs(&_callbackCritSect);
   3788 
   3789     if (kPlaybackPerChannel == type)
   3790     {
   3791         if (_outputExternalMediaCallbackPtr)
   3792         {
   3793             _engineStatisticsPtr->SetLastError(
   3794                 VE_INVALID_OPERATION, kTraceError,
   3795                 "Channel::RegisterExternalMediaProcessing() "
   3796                 "output external media already enabled");
   3797             return -1;
   3798         }
   3799         _outputExternalMediaCallbackPtr = &processObject;
   3800         _outputExternalMedia = true;
   3801     }
   3802     else if (kRecordingPerChannel == type)
   3803     {
   3804         if (_inputExternalMediaCallbackPtr)
   3805         {
   3806             _engineStatisticsPtr->SetLastError(
   3807                 VE_INVALID_OPERATION, kTraceError,
   3808                 "Channel::RegisterExternalMediaProcessing() "
   3809                 "output external media already enabled");
   3810             return -1;
   3811         }
   3812         _inputExternalMediaCallbackPtr = &processObject;
   3813         channel_state_.SetInputExternalMedia(true);
   3814     }
   3815     return 0;
   3816 }
   3817 
   3818 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
   3819 {
   3820     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3821                  "Channel::DeRegisterExternalMediaProcessing()");
   3822 
   3823     CriticalSectionScoped cs(&_callbackCritSect);
   3824 
   3825     if (kPlaybackPerChannel == type)
   3826     {
   3827         if (!_outputExternalMediaCallbackPtr)
   3828         {
   3829             _engineStatisticsPtr->SetLastError(
   3830                 VE_INVALID_OPERATION, kTraceWarning,
   3831                 "Channel::DeRegisterExternalMediaProcessing() "
   3832                 "output external media already disabled");
   3833             return 0;
   3834         }
   3835         _outputExternalMedia = false;
   3836         _outputExternalMediaCallbackPtr = NULL;
   3837     }
   3838     else if (kRecordingPerChannel == type)
   3839     {
   3840         if (!_inputExternalMediaCallbackPtr)
   3841         {
   3842             _engineStatisticsPtr->SetLastError(
   3843                 VE_INVALID_OPERATION, kTraceWarning,
   3844                 "Channel::DeRegisterExternalMediaProcessing() "
   3845                 "input external media already disabled");
   3846             return 0;
   3847         }
   3848         channel_state_.SetInputExternalMedia(false);
   3849         _inputExternalMediaCallbackPtr = NULL;
   3850     }
   3851 
   3852     return 0;
   3853 }
   3854 
   3855 int Channel::SetExternalMixing(bool enabled) {
   3856     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3857                  "Channel::SetExternalMixing(enabled=%d)", enabled);
   3858 
   3859     if (channel_state_.Get().playing)
   3860     {
   3861         _engineStatisticsPtr->SetLastError(
   3862             VE_INVALID_OPERATION, kTraceError,
   3863             "Channel::SetExternalMixing() "
   3864             "external mixing cannot be changed while playing.");
   3865         return -1;
   3866     }
   3867 
   3868     _externalMixing = enabled;
   3869 
   3870     return 0;
   3871 }
   3872 
   3873 int
   3874 Channel::GetNetworkStatistics(NetworkStatistics& stats)
   3875 {
   3876     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3877                  "Channel::GetNetworkStatistics()");
   3878     ACMNetworkStatistics acm_stats;
   3879     int return_value = audio_coding_->NetworkStatistics(&acm_stats);
   3880     if (return_value >= 0) {
   3881       memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
   3882     }
   3883     return return_value;
   3884 }
   3885 
   3886 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
   3887   audio_coding_->GetDecodingCallStatistics(stats);
   3888 }
   3889 
   3890 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
   3891                                int* playout_buffer_delay_ms) const {
   3892   if (_average_jitter_buffer_delay_us == 0) {
   3893     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3894                  "Channel::GetDelayEstimate() no valid estimate.");
   3895     return false;
   3896   }
   3897   *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
   3898       _recPacketDelayMs;
   3899   *playout_buffer_delay_ms = playout_delay_ms_;
   3900   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3901                "Channel::GetDelayEstimate()");
   3902   return true;
   3903 }
   3904 
   3905 int Channel::SetInitialPlayoutDelay(int delay_ms)
   3906 {
   3907   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3908                "Channel::SetInitialPlayoutDelay()");
   3909   if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
   3910       (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
   3911   {
   3912     _engineStatisticsPtr->SetLastError(
   3913         VE_INVALID_ARGUMENT, kTraceError,
   3914         "SetInitialPlayoutDelay() invalid min delay");
   3915     return -1;
   3916   }
   3917   if (audio_coding_->SetInitialPlayoutDelay(delay_ms) != 0)
   3918   {
   3919     _engineStatisticsPtr->SetLastError(
   3920         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   3921         "SetInitialPlayoutDelay() failed to set min playout delay");
   3922     return -1;
   3923   }
   3924   return 0;
   3925 }
   3926 
   3927 
   3928 int
   3929 Channel::SetMinimumPlayoutDelay(int delayMs)
   3930 {
   3931     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3932                  "Channel::SetMinimumPlayoutDelay()");
   3933     if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
   3934         (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
   3935     {
   3936         _engineStatisticsPtr->SetLastError(
   3937             VE_INVALID_ARGUMENT, kTraceError,
   3938             "SetMinimumPlayoutDelay() invalid min delay");
   3939         return -1;
   3940     }
   3941     if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0)
   3942     {
   3943         _engineStatisticsPtr->SetLastError(
   3944             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   3945             "SetMinimumPlayoutDelay() failed to set min playout delay");
   3946         return -1;
   3947     }
   3948     return 0;
   3949 }
   3950 
   3951 void Channel::UpdatePlayoutTimestamp(bool rtcp) {
   3952   uint32_t playout_timestamp = 0;
   3953 
   3954   if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1)  {
   3955     // This can happen if this channel has not been received any RTP packet. In
   3956     // this case, NetEq is not capable of computing playout timestamp.
   3957     return;
   3958   }
   3959 
   3960   uint16_t delay_ms = 0;
   3961   if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
   3962     WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
   3963                  "Channel::UpdatePlayoutTimestamp() failed to read playout"
   3964                  " delay from the ADM");
   3965     _engineStatisticsPtr->SetLastError(
   3966         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
   3967         "UpdatePlayoutTimestamp() failed to retrieve playout delay");
   3968     return;
   3969   }
   3970 
   3971   jitter_buffer_playout_timestamp_ = playout_timestamp;
   3972 
   3973   // Remove the playout delay.
   3974   playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
   3975 
   3976   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
   3977                "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
   3978                playout_timestamp);
   3979 
   3980   if (rtcp) {
   3981     playout_timestamp_rtcp_ = playout_timestamp;
   3982   } else {
   3983     playout_timestamp_rtp_ = playout_timestamp;
   3984   }
   3985   playout_delay_ms_ = delay_ms;
   3986 }
   3987 
   3988 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
   3989   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   3990                "Channel::GetPlayoutTimestamp()");
   3991   if (playout_timestamp_rtp_ == 0)  {
   3992     _engineStatisticsPtr->SetLastError(
   3993         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
   3994         "GetPlayoutTimestamp() failed to retrieve timestamp");
   3995     return -1;
   3996   }
   3997   timestamp = playout_timestamp_rtp_;
   3998   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
   3999                VoEId(_instanceId,_channelId),
   4000                "GetPlayoutTimestamp() => timestamp=%u", timestamp);
   4001   return 0;
   4002 }
   4003 
   4004 int
   4005 Channel::SetInitTimestamp(unsigned int timestamp)
   4006 {
   4007     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   4008                "Channel::SetInitTimestamp()");
   4009     if (channel_state_.Get().sending)
   4010     {
   4011         _engineStatisticsPtr->SetLastError(
   4012             VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
   4013         return -1;
   4014     }
   4015     if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
   4016     {
   4017         _engineStatisticsPtr->SetLastError(
   4018             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   4019             "SetInitTimestamp() failed to set timestamp");
   4020         return -1;
   4021     }
   4022     return 0;
   4023 }
   4024 
   4025 int
   4026 Channel::SetInitSequenceNumber(short sequenceNumber)
   4027 {
   4028     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   4029                  "Channel::SetInitSequenceNumber()");
   4030     if (channel_state_.Get().sending)
   4031     {
   4032         _engineStatisticsPtr->SetLastError(
   4033             VE_SENDING, kTraceError,
   4034             "SetInitSequenceNumber() already sending");
   4035         return -1;
   4036     }
   4037     if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
   4038     {
   4039         _engineStatisticsPtr->SetLastError(
   4040             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   4041             "SetInitSequenceNumber() failed to set sequence number");
   4042         return -1;
   4043     }
   4044     return 0;
   4045 }
   4046 
   4047 int
   4048 Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
   4049 {
   4050     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   4051                  "Channel::GetRtpRtcp()");
   4052     *rtpRtcpModule = _rtpRtcpModule.get();
   4053     *rtp_receiver = rtp_receiver_.get();
   4054     return 0;
   4055 }
   4056 
   4057 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
   4058 // a shared helper.
   4059 int32_t
   4060 Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
   4061 {
   4062     scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
   4063     int fileSamples(0);
   4064 
   4065     {
   4066         CriticalSectionScoped cs(&_fileCritSect);
   4067 
   4068         if (_inputFilePlayerPtr == NULL)
   4069         {
   4070             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   4071                          VoEId(_instanceId, _channelId),
   4072                          "Channel::MixOrReplaceAudioWithFile() fileplayer"
   4073                              " doesnt exist");
   4074             return -1;
   4075         }
   4076 
   4077         if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
   4078                                                       fileSamples,
   4079                                                       mixingFrequency) == -1)
   4080         {
   4081             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   4082                          VoEId(_instanceId, _channelId),
   4083                          "Channel::MixOrReplaceAudioWithFile() file mixing "
   4084                          "failed");
   4085             return -1;
   4086         }
   4087         if (fileSamples == 0)
   4088         {
   4089             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   4090                          VoEId(_instanceId, _channelId),
   4091                          "Channel::MixOrReplaceAudioWithFile() file is ended");
   4092             return 0;
   4093         }
   4094     }
   4095 
   4096     assert(_audioFrame.samples_per_channel_ == fileSamples);
   4097 
   4098     if (_mixFileWithMicrophone)
   4099     {
   4100         // Currently file stream is always mono.
   4101         // TODO(xians): Change the code when FilePlayer supports real stereo.
   4102         MixWithSat(_audioFrame.data_,
   4103                    _audioFrame.num_channels_,
   4104                    fileBuffer.get(),
   4105                    1,
   4106                    fileSamples);
   4107     }
   4108     else
   4109     {
   4110         // Replace ACM audio with file.
   4111         // Currently file stream is always mono.
   4112         // TODO(xians): Change the code when FilePlayer supports real stereo.
   4113         _audioFrame.UpdateFrame(_channelId,
   4114                                 0xFFFFFFFF,
   4115                                 fileBuffer.get(),
   4116                                 fileSamples,
   4117                                 mixingFrequency,
   4118                                 AudioFrame::kNormalSpeech,
   4119                                 AudioFrame::kVadUnknown,
   4120                                 1);
   4121 
   4122     }
   4123     return 0;
   4124 }
   4125 
   4126 int32_t
   4127 Channel::MixAudioWithFile(AudioFrame& audioFrame,
   4128                           int mixingFrequency)
   4129 {
   4130     assert(mixingFrequency <= 48000);
   4131 
   4132     scoped_ptr<int16_t[]> fileBuffer(new int16_t[960]);
   4133     int fileSamples(0);
   4134 
   4135     {
   4136         CriticalSectionScoped cs(&_fileCritSect);
   4137 
   4138         if (_outputFilePlayerPtr == NULL)
   4139         {
   4140             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   4141                          VoEId(_instanceId, _channelId),
   4142                          "Channel::MixAudioWithFile() file mixing failed");
   4143             return -1;
   4144         }
   4145 
   4146         // We should get the frequency we ask for.
   4147         if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
   4148                                                        fileSamples,
   4149                                                        mixingFrequency) == -1)
   4150         {
   4151             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   4152                          VoEId(_instanceId, _channelId),
   4153                          "Channel::MixAudioWithFile() file mixing failed");
   4154             return -1;
   4155         }
   4156     }
   4157 
   4158     if (audioFrame.samples_per_channel_ == fileSamples)
   4159     {
   4160         // Currently file stream is always mono.
   4161         // TODO(xians): Change the code when FilePlayer supports real stereo.
   4162         MixWithSat(audioFrame.data_,
   4163                    audioFrame.num_channels_,
   4164                    fileBuffer.get(),
   4165                    1,
   4166                    fileSamples);
   4167     }
   4168     else
   4169     {
   4170         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
   4171             "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
   4172             "fileSamples(%d)",
   4173             audioFrame.samples_per_channel_, fileSamples);
   4174         return -1;
   4175     }
   4176 
   4177     return 0;
   4178 }
   4179 
   4180 int
   4181 Channel::InsertInbandDtmfTone()
   4182 {
   4183     // Check if we should start a new tone.
   4184     if (_inbandDtmfQueue.PendingDtmf() &&
   4185         !_inbandDtmfGenerator.IsAddingTone() &&
   4186         _inbandDtmfGenerator.DelaySinceLastTone() >
   4187         kMinTelephoneEventSeparationMs)
   4188     {
   4189         int8_t eventCode(0);
   4190         uint16_t lengthMs(0);
   4191         uint8_t attenuationDb(0);
   4192 
   4193         eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
   4194         _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
   4195         if (_playInbandDtmfEvent)
   4196         {
   4197             // Add tone to output mixer using a reduced length to minimize
   4198             // risk of echo.
   4199             _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
   4200                                           attenuationDb);
   4201         }
   4202     }
   4203 
   4204     if (_inbandDtmfGenerator.IsAddingTone())
   4205     {
   4206         uint16_t frequency(0);
   4207         _inbandDtmfGenerator.GetSampleRate(frequency);
   4208 
   4209         if (frequency != _audioFrame.sample_rate_hz_)
   4210         {
   4211             // Update sample rate of Dtmf tone since the mixing frequency
   4212             // has changed.
   4213             _inbandDtmfGenerator.SetSampleRate(
   4214                 (uint16_t) (_audioFrame.sample_rate_hz_));
   4215             // Reset the tone to be added taking the new sample rate into
   4216             // account.
   4217             _inbandDtmfGenerator.ResetTone();
   4218         }
   4219 
   4220         int16_t toneBuffer[320];
   4221         uint16_t toneSamples(0);
   4222         // Get 10ms tone segment and set time since last tone to zero
   4223         if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
   4224         {
   4225             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   4226                        VoEId(_instanceId, _channelId),
   4227                        "Channel::EncodeAndSend() inserting Dtmf failed");
   4228             return -1;
   4229         }
   4230 
   4231         // Replace mixed audio with DTMF tone.
   4232         for (int sample = 0;
   4233             sample < _audioFrame.samples_per_channel_;
   4234             sample++)
   4235         {
   4236             for (int channel = 0;
   4237                 channel < _audioFrame.num_channels_;
   4238                 channel++)
   4239             {
   4240                 const int index = sample * _audioFrame.num_channels_ + channel;
   4241                 _audioFrame.data_[index] = toneBuffer[sample];
   4242             }
   4243         }
   4244 
   4245         assert(_audioFrame.samples_per_channel_ == toneSamples);
   4246     } else
   4247     {
   4248         // Add 10ms to "delay-since-last-tone" counter
   4249         _inbandDtmfGenerator.UpdateDelaySinceLastTone();
   4250     }
   4251     return 0;
   4252 }
   4253 
   4254 int32_t
   4255 Channel::SendPacketRaw(const void *data, int len, bool RTCP)
   4256 {
   4257     CriticalSectionScoped cs(&_callbackCritSect);
   4258     if (_transportPtr == NULL)
   4259     {
   4260         return -1;
   4261     }
   4262     if (!RTCP)
   4263     {
   4264         return _transportPtr->SendPacket(_channelId, data, len);
   4265     }
   4266     else
   4267     {
   4268         return _transportPtr->SendRTCPPacket(_channelId, data, len);
   4269     }
   4270 }
   4271 
   4272 // Called for incoming RTP packets after successful RTP header parsing.
   4273 void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
   4274                                 uint16_t sequence_number) {
   4275   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
   4276                "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
   4277                rtp_timestamp, sequence_number);
   4278 
   4279   // Get frequency of last received payload
   4280   int rtp_receive_frequency = GetPlayoutFrequency();
   4281 
   4282   // Update the least required delay.
   4283   least_required_delay_ms_ = audio_coding_->LeastRequiredDelayMs();
   4284 
   4285   // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
   4286   // every incoming packet.
   4287   uint32_t timestamp_diff_ms = (rtp_timestamp -
   4288       jitter_buffer_playout_timestamp_) / (rtp_receive_frequency / 1000);
   4289   if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) ||
   4290       timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
   4291     // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP
   4292     // timestamp, the resulting difference is negative, but is set to zero.
   4293     // This can happen when a network glitch causes a packet to arrive late,
   4294     // and during long comfort noise periods with clock drift.
   4295     timestamp_diff_ms = 0;
   4296   }
   4297 
   4298   uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
   4299       (rtp_receive_frequency / 1000);
   4300 
   4301   _previousTimestamp = rtp_timestamp;
   4302 
   4303   if (timestamp_diff_ms == 0) return;
   4304 
   4305   if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
   4306     _recPacketDelayMs = packet_delay_ms;
   4307   }
   4308 
   4309   if (_average_jitter_buffer_delay_us == 0) {
   4310     _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
   4311     return;
   4312   }
   4313 
   4314   // Filter average delay value using exponential filter (alpha is
   4315   // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
   4316   // risk of rounding error) and compensate for it in GetDelayEstimate()
   4317   // later.
   4318   _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
   4319       1000 * timestamp_diff_ms + 500) / 8;
   4320 }
   4321 
   4322 void
   4323 Channel::RegisterReceiveCodecsToRTPModule()
   4324 {
   4325     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
   4326                  "Channel::RegisterReceiveCodecsToRTPModule()");
   4327 
   4328 
   4329     CodecInst codec;
   4330     const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
   4331 
   4332     for (int idx = 0; idx < nSupportedCodecs; idx++)
   4333     {
   4334         // Open up the RTP/RTCP receiver for all supported codecs
   4335         if ((audio_coding_->Codec(idx, &codec) == -1) ||
   4336             (rtp_receiver_->RegisterReceivePayload(
   4337                 codec.plname,
   4338                 codec.pltype,
   4339                 codec.plfreq,
   4340                 codec.channels,
   4341                 (codec.rate < 0) ? 0 : codec.rate) == -1))
   4342         {
   4343             WEBRTC_TRACE(
   4344                          kTraceWarning,
   4345                          kTraceVoice,
   4346                          VoEId(_instanceId, _channelId),
   4347                          "Channel::RegisterReceiveCodecsToRTPModule() unable"
   4348                          " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
   4349                          codec.plname, codec.pltype, codec.plfreq,
   4350                          codec.channels, codec.rate);
   4351         }
   4352         else
   4353         {
   4354             WEBRTC_TRACE(
   4355                          kTraceInfo,
   4356                          kTraceVoice,
   4357                          VoEId(_instanceId, _channelId),
   4358                          "Channel::RegisterReceiveCodecsToRTPModule() %s "
   4359                          "(%d/%d/%d/%d) has been added to the RTP/RTCP "
   4360                          "receiver",
   4361                          codec.plname, codec.pltype, codec.plfreq,
   4362                          codec.channels, codec.rate);
   4363         }
   4364     }
   4365 }
   4366 
   4367 int Channel::SetSecondarySendCodec(const CodecInst& codec,
   4368                                    int red_payload_type) {
   4369   // Sanity check for payload type.
   4370   if (red_payload_type < 0 || red_payload_type > 127) {
   4371     _engineStatisticsPtr->SetLastError(
   4372         VE_PLTYPE_ERROR, kTraceError,
   4373         "SetRedPayloadType() invalid RED payload type");
   4374     return -1;
   4375   }
   4376 
   4377   if (SetRedPayloadType(red_payload_type) < 0) {
   4378     _engineStatisticsPtr->SetLastError(
   4379         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   4380         "SetSecondarySendCodec() Failed to register RED ACM");
   4381     return -1;
   4382   }
   4383   if (audio_coding_->RegisterSecondarySendCodec(codec) < 0) {
   4384     _engineStatisticsPtr->SetLastError(
   4385         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   4386         "SetSecondarySendCodec() Failed to register secondary send codec in "
   4387         "ACM");
   4388     return -1;
   4389   }
   4390 
   4391   return 0;
   4392 }
   4393 
   4394 void Channel::RemoveSecondarySendCodec() {
   4395   audio_coding_->UnregisterSecondarySendCodec();
   4396 }
   4397 
   4398 int Channel::GetSecondarySendCodec(CodecInst* codec) {
   4399   if (audio_coding_->SecondarySendCodec(codec) < 0) {
   4400     _engineStatisticsPtr->SetLastError(
   4401         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   4402         "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
   4403     return -1;
   4404   }
   4405   return 0;
   4406 }
   4407 
   4408 // Assuming this method is called with valid payload type.
   4409 int Channel::SetRedPayloadType(int red_payload_type) {
   4410   CodecInst codec;
   4411   bool found_red = false;
   4412 
   4413   // Get default RED settings from the ACM database
   4414   const int num_codecs = AudioCodingModule::NumberOfCodecs();
   4415   for (int idx = 0; idx < num_codecs; idx++) {
   4416     audio_coding_->Codec(idx, &codec);
   4417     if (!STR_CASE_CMP(codec.plname, "RED")) {
   4418       found_red = true;
   4419       break;
   4420     }
   4421   }
   4422 
   4423   if (!found_red) {
   4424     _engineStatisticsPtr->SetLastError(
   4425         VE_CODEC_ERROR, kTraceError,
   4426         "SetRedPayloadType() RED is not supported");
   4427     return -1;
   4428   }
   4429 
   4430   codec.pltype = red_payload_type;
   4431   if (audio_coding_->RegisterSendCodec(codec) < 0) {
   4432     _engineStatisticsPtr->SetLastError(
   4433         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
   4434         "SetRedPayloadType() RED registration in ACM module failed");
   4435     return -1;
   4436   }
   4437 
   4438   if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
   4439     _engineStatisticsPtr->SetLastError(
   4440         VE_RTP_RTCP_MODULE_ERROR, kTraceError,
   4441         "SetRedPayloadType() RED registration in RTP/RTCP module failed");
   4442     return -1;
   4443   }
   4444   return 0;
   4445 }
   4446 
   4447 int Channel::SetSendRtpHeaderExtension(bool enable, RTPExtensionType type,
   4448                                        unsigned char id) {
   4449   int error = 0;
   4450   _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
   4451   if (enable) {
   4452     error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
   4453   }
   4454   return error;
   4455 }
   4456 
   4457 int32_t Channel::GetPlayoutFrequency() {
   4458   int32_t playout_frequency = audio_coding_->PlayoutFrequency();
   4459   CodecInst current_recive_codec;
   4460   if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
   4461     if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
   4462       // Even though the actual sampling rate for G.722 audio is
   4463       // 16,000 Hz, the RTP clock rate for the G722 payload format is
   4464       // 8,000 Hz because that value was erroneously assigned in
   4465       // RFC 1890 and must remain unchanged for backward compatibility.
   4466       playout_frequency = 8000;
   4467     } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
   4468       // We are resampling Opus internally to 32,000 Hz until all our
   4469       // DSP routines can operate at 48,000 Hz, but the RTP clock
   4470       // rate for the Opus payload format is standardized to 48,000 Hz,
   4471       // because that is the maximum supported decoding sampling rate.
   4472       playout_frequency = 48000;
   4473     }
   4474   }
   4475   return playout_frequency;
   4476 }
   4477 
   4478 int Channel::GetRTT() const {
   4479   RTCPMethod method = _rtpRtcpModule->RTCP();
   4480   if (method == kRtcpOff) {
   4481     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   4482                  VoEId(_instanceId, _channelId),
   4483                  "GetRTPStatistics() RTCP is disabled => valid RTT "
   4484                  "measurements cannot be retrieved");
   4485     return 0;
   4486   }
   4487   std::vector<RTCPReportBlock> report_blocks;
   4488   _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
   4489   if (report_blocks.empty()) {
   4490     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   4491                  VoEId(_instanceId, _channelId),
   4492                  "GetRTPStatistics() failed to measure RTT since no "
   4493                  "RTCP packets have been received yet");
   4494     return 0;
   4495   }
   4496 
   4497   uint32_t remoteSSRC = rtp_receiver_->SSRC();
   4498   std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
   4499   for (; it != report_blocks.end(); ++it) {
   4500     if (it->remoteSSRC == remoteSSRC)
   4501       break;
   4502   }
   4503   if (it == report_blocks.end()) {
   4504     // We have not received packets with SSRC matching the report blocks.
   4505     // To calculate RTT we try with the SSRC of the first report block.
   4506     // This is very important for send-only channels where we don't know
   4507     // the SSRC of the other end.
   4508     remoteSSRC = report_blocks[0].remoteSSRC;
   4509   }
   4510   uint16_t rtt = 0;
   4511   uint16_t avg_rtt = 0;
   4512   uint16_t max_rtt= 0;
   4513   uint16_t min_rtt = 0;
   4514   if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt)
   4515       != 0) {
   4516     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
   4517                  VoEId(_instanceId, _channelId),
   4518                  "GetRTPStatistics() failed to retrieve RTT from "
   4519                  "the RTP/RTCP module");
   4520     return 0;
   4521   }
   4522   return static_cast<int>(rtt);
   4523 }
   4524 
   4525 }  // namespace voe
   4526 }  // namespace webrtc
   4527