This source file includes following definitions.
- CreateAndInitializeVEA
 
- Enqueue
 
- UseOutputBitstreamBufferId
 
- RequestEncodingParametersChange
 
- Destroy
 
- RequireBitstreamBuffers
 
- BitstreamBufferReady
 
- NotifyError
 
- EncodeOneFrame
 
- EncodeFrameFinished
 
- RegisterAsyncWaiter
 
- SignalAsyncWaiter
 
- weak_factory_
 
- InitEncode
 
- RegisterEncodeCompleteCallback
 
- Release
 
- SetChannelParameters
 
- SetRates
 
- ReturnEncodedImage
 
- NotifyError
 
- RecordInitEncodeUMA
 
#include "content/renderer/media/rtc_video_encoder.h"
#include "base/bind.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/memory/scoped_vector.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/metrics/histogram.h"
#include "base/rand_util.h"
#include "base/synchronization/waitable_event.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#include "media/filters/gpu_video_accelerator_factories.h"
#include "media/video/video_encode_accelerator.h"
#include "third_party/webrtc/system_wrappers/interface/tick_util.h"
#define NOTIFY_ERROR(x)                             \
  do {                                              \
    DLOG(ERROR) << "calling NotifyError(): " << x;  \
    NotifyError(x);                                 \
  } while (0)
namespace content {
class RTCVideoEncoder::Impl
    : public media::VideoEncodeAccelerator::Client,
      public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> {
 public:
  Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
       const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories);
  
  
  
  
  
  void CreateAndInitializeVEA(const gfx::Size& input_visible_size,
                              uint32 bitrate,
                              media::VideoCodecProfile profile,
                              base::WaitableEvent* async_waiter,
                              int32_t* async_retval);
  
  
  
  void Enqueue(const webrtc::I420VideoFrame* input_frame,
               bool force_keyframe,
               base::WaitableEvent* async_waiter,
               int32_t* async_retval);
  
  
  
  void UseOutputBitstreamBufferId(int32 bitstream_buffer_id);
  
  void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate);
  
  
  void Destroy();
  
  virtual void RequireBitstreamBuffers(unsigned int input_count,
                                       const gfx::Size& input_coded_size,
                                       size_t output_buffer_size) OVERRIDE;
  virtual void BitstreamBufferReady(int32 bitstream_buffer_id,
                                    size_t payload_size,
                                    bool key_frame) OVERRIDE;
  virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE;
 private:
  friend class base::RefCountedThreadSafe<Impl>;
  enum {
    kInputBufferExtraCount = 1,  
                                 
                                 
    kOutputBufferCount = 3,
  };
  virtual ~Impl();
  
  void EncodeOneFrame();
  
  
  void EncodeFrameFinished(int index);
  
  void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval);
  void SignalAsyncWaiter(int32_t retval);
  base::ThreadChecker thread_checker_;
  
  
  const base::WeakPtr<RTCVideoEncoder> weak_encoder_;
  
  const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_;
  
  const scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories_;
  
  
  
  
  base::WaitableEvent* async_waiter_;
  int32_t* async_retval_;
  
  scoped_ptr<media::VideoEncodeAccelerator> video_encoder_;
  
  
  const webrtc::I420VideoFrame* input_next_frame_;
  
  bool input_next_frame_keyframe_;
  
  gfx::Size input_frame_coded_size_;
  gfx::Size input_visible_size_;
  
  ScopedVector<base::SharedMemory> input_buffers_;
  ScopedVector<base::SharedMemory> output_buffers_;
  
  
  std::vector<int> input_buffers_free_;
  
  
  int output_buffers_free_count_;
  
  uint16 picture_id_;
  DISALLOW_COPY_AND_ASSIGN(Impl);
};
RTCVideoEncoder::Impl::Impl(
    const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
    const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
    : weak_encoder_(weak_encoder),
      encoder_message_loop_proxy_(base::MessageLoopProxy::current()),
      gpu_factories_(gpu_factories),
      async_waiter_(NULL),
      async_retval_(NULL),
      input_next_frame_(NULL),
      input_next_frame_keyframe_(false),
      output_buffers_free_count_(0) {
  thread_checker_.DetachFromThread();
  
  picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF));
}
void RTCVideoEncoder::Impl::CreateAndInitializeVEA(
    const gfx::Size& input_visible_size,
    uint32 bitrate,
    media::VideoCodecProfile profile,
    base::WaitableEvent* async_waiter,
    int32_t* async_retval) {
  DVLOG(3) << "Impl::CreateAndInitializeVEA()";
  DCHECK(thread_checker_.CalledOnValidThread());
  RegisterAsyncWaiter(async_waiter, async_retval);
  
  if (bitrate > kuint32max / 1000) {
    NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
    return;
  }
  video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator().Pass();
  if (!video_encoder_) {
    NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
    return;
  }
  input_visible_size_ = input_visible_size;
  if (!video_encoder_->Initialize(media::VideoFrame::I420,
                                  input_visible_size_,
                                  profile,
                                  bitrate * 1000,
                                  this)) {
    NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
    return;
  }
}
void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame,
                                    bool force_keyframe,
                                    base::WaitableEvent* async_waiter,
                                    int32_t* async_retval) {
  DVLOG(3) << "Impl::Enqueue()";
  DCHECK(thread_checker_.CalledOnValidThread());
  DCHECK(!input_next_frame_);
  RegisterAsyncWaiter(async_waiter, async_retval);
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  if (input_buffers_free_.empty() && output_buffers_free_count_ == 0) {
    DVLOG(2) << "Run out of input and output buffers. Drop the frame.";
    SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
    return;
  }
  input_next_frame_ = input_frame;
  input_next_frame_keyframe_ = force_keyframe;
  if (!input_buffers_free_.empty())
    EncodeOneFrame();
}
void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId(
    int32 bitstream_buffer_id) {
  DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): "
              "bitstream_buffer_id=" << bitstream_buffer_id;
  DCHECK(thread_checker_.CalledOnValidThread());
  if (video_encoder_) {
    video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
        bitstream_buffer_id,
        output_buffers_[bitstream_buffer_id]->handle(),
        output_buffers_[bitstream_buffer_id]->mapped_size()));
    output_buffers_free_count_++;
  }
}
void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate,
                                                            uint32 framerate) {
  DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate
           << ", framerate=" << framerate;
  DCHECK(thread_checker_.CalledOnValidThread());
  
  if (bitrate > kuint32max / 1000) {
    NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
    return;
  }
  if (video_encoder_)
    video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate);
}
void RTCVideoEncoder::Impl::Destroy() {
  DVLOG(3) << "Impl::Destroy()";
  DCHECK(thread_checker_.CalledOnValidThread());
  if (video_encoder_)
    video_encoder_.release()->Destroy();
}
void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
    unsigned int input_count,
    const gfx::Size& input_coded_size,
    size_t output_buffer_size) {
  DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count
           << ", input_coded_size=" << input_coded_size.ToString()
           << ", output_buffer_size=" << output_buffer_size;
  DCHECK(thread_checker_.CalledOnValidThread());
  if (!video_encoder_)
    return;
  input_frame_coded_size_ = input_coded_size;
  for (unsigned int i = 0; i < input_count + kInputBufferExtraCount; ++i) {
    base::SharedMemory* shm =
        gpu_factories_->CreateSharedMemory(media::VideoFrame::AllocationSize(
            media::VideoFrame::I420, input_coded_size));
    if (!shm) {
      DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
                     "failed to create input buffer " << i;
      NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
      return;
    }
    input_buffers_.push_back(shm);
    input_buffers_free_.push_back(i);
  }
  for (int i = 0; i < kOutputBufferCount; ++i) {
    base::SharedMemory* shm =
        gpu_factories_->CreateSharedMemory(output_buffer_size);
    if (!shm) {
      DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
                     "failed to create output buffer " << i;
      NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
      return;
    }
    output_buffers_.push_back(shm);
  }
  
  for (size_t i = 0; i < output_buffers_.size(); ++i) {
    video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
        i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size()));
    output_buffers_free_count_++;
  }
  SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
}
void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id,
                                                 size_t payload_size,
                                                 bool key_frame) {
  DVLOG(3) << "Impl::BitstreamBufferReady(): "
              "bitstream_buffer_id=" << bitstream_buffer_id
           << ", payload_size=" << payload_size
           << ", key_frame=" << key_frame;
  DCHECK(thread_checker_.CalledOnValidThread());
  if (bitstream_buffer_id < 0 ||
      bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) {
    DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id="
                << bitstream_buffer_id;
    NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
    return;
  }
  base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
  if (payload_size > output_buffer->mapped_size()) {
    DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size="
                << payload_size;
    NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
    return;
  }
  output_buffers_free_count_--;
  
  
  const int64 capture_time_us = webrtc::TickTime::MicrosecondTimestamp();
  
  int64 capture_time_ms = capture_time_us / 1000;
  uint32_t rtp_timestamp = static_cast<uint32_t>(capture_time_us * 90 / 1000);
  scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage(
      reinterpret_cast<uint8_t*>(output_buffer->memory()),
      payload_size,
      output_buffer->mapped_size()));
  image->_encodedWidth = input_visible_size_.width();
  image->_encodedHeight = input_visible_size_.height();
  image->_timeStamp = rtp_timestamp;
  image->capture_time_ms_ = capture_time_ms;
  image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
  image->_completeFrame = true;
  encoder_message_loop_proxy_->PostTask(
      FROM_HERE,
      base::Bind(&RTCVideoEncoder::ReturnEncodedImage,
                 weak_encoder_,
                 base::Passed(&image),
                 bitstream_buffer_id,
                 picture_id_));
  
  picture_id_ = (picture_id_ + 1) & 0x7FFF;
}
void RTCVideoEncoder::Impl::NotifyError(
    media::VideoEncodeAccelerator::Error error) {
  DVLOG(3) << "Impl::NotifyError(): error=" << error;
  DCHECK(thread_checker_.CalledOnValidThread());
  int32_t retval;
  switch (error) {
    case media::VideoEncodeAccelerator::kInvalidArgumentError:
      retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
      break;
    default:
      retval = WEBRTC_VIDEO_CODEC_ERROR;
  }
  if (video_encoder_)
    video_encoder_.release()->Destroy();
  if (async_waiter_) {
    SignalAsyncWaiter(retval);
  } else {
    encoder_message_loop_proxy_->PostTask(
        FROM_HERE,
        base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval));
  }
}
RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); }
void RTCVideoEncoder::Impl::EncodeOneFrame() {
  DVLOG(3) << "Impl::EncodeOneFrame()";
  DCHECK(thread_checker_.CalledOnValidThread());
  DCHECK(input_next_frame_);
  DCHECK(!input_buffers_free_.empty());
  
  
  
  
  const webrtc::I420VideoFrame* next_frame = input_next_frame_;
  bool next_frame_keyframe = input_next_frame_keyframe_;
  input_next_frame_ = NULL;
  input_next_frame_keyframe_ = false;
  if (!video_encoder_) {
    SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
    return;
  }
  const int index = input_buffers_free_.back();
  base::SharedMemory* input_buffer = input_buffers_[index];
  scoped_refptr<media::VideoFrame> frame =
      media::VideoFrame::WrapExternalPackedMemory(
          media::VideoFrame::I420,
          input_frame_coded_size_,
          gfx::Rect(input_visible_size_),
          input_visible_size_,
          reinterpret_cast<uint8*>(input_buffer->memory()),
          input_buffer->mapped_size(),
          input_buffer->handle(),
          base::TimeDelta(),
          base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index));
  if (!frame) {
    DLOG(ERROR) << "Impl::EncodeOneFrame(): failed to create frame";
    NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
    return;
  }
  
  
  
  media::CopyYPlane(next_frame->buffer(webrtc::kYPlane),
                    next_frame->stride(webrtc::kYPlane),
                    next_frame->height(),
                    frame.get());
  media::CopyUPlane(next_frame->buffer(webrtc::kUPlane),
                    next_frame->stride(webrtc::kUPlane),
                    next_frame->height(),
                    frame.get());
  media::CopyVPlane(next_frame->buffer(webrtc::kVPlane),
                    next_frame->stride(webrtc::kVPlane),
                    next_frame->height(),
                    frame.get());
  video_encoder_->Encode(frame, next_frame_keyframe);
  input_buffers_free_.pop_back();
  SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
}
void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
  DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
  DCHECK(thread_checker_.CalledOnValidThread());
  DCHECK_GE(index, 0);
  DCHECK_LT(index, static_cast<int>(input_buffers_.size()));
  input_buffers_free_.push_back(index);
  if (input_next_frame_)
    EncodeOneFrame();
}
void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter,
                                                int32_t* retval) {
  DCHECK(thread_checker_.CalledOnValidThread());
  DCHECK(!async_waiter_);
  DCHECK(!async_retval_);
  async_waiter_ = waiter;
  async_retval_ = retval;
}
void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) {
  DCHECK(thread_checker_.CalledOnValidThread());
  *async_retval_ = retval;
  async_waiter_->Signal();
  async_retval_ = NULL;
  async_waiter_ = NULL;
}
#undef NOTIFY_ERROR
RTCVideoEncoder::RTCVideoEncoder(
    webrtc::VideoCodecType type,
    media::VideoCodecProfile profile,
    const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
    : video_codec_type_(type),
      video_codec_profile_(profile),
      gpu_factories_(gpu_factories),
      encoded_image_callback_(NULL),
      impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED),
      weak_factory_(this) {
  DVLOG(1) << "RTCVideoEncoder(): profile=" << profile;
}
RTCVideoEncoder::~RTCVideoEncoder() {
  DCHECK(thread_checker_.CalledOnValidThread());
  Release();
  DCHECK(!impl_);
}
int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
                                    int32_t number_of_cores,
                                    uint32_t max_payload_size) {
  DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType
           << ", width=" << codec_settings->width
           << ", height=" << codec_settings->height
           << ", startBitrate=" << codec_settings->startBitrate;
  DCHECK(thread_checker_.CalledOnValidThread());
  DCHECK(!impl_);
  weak_factory_.InvalidateWeakPtrs();
  impl_ = new Impl(weak_factory_.GetWeakPtr(), gpu_factories_);
  base::WaitableEvent initialization_waiter(true, false);
  int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  gpu_factories_->GetTaskRunner()->PostTask(
      FROM_HERE,
      base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA,
                 impl_,
                 gfx::Size(codec_settings->width, codec_settings->height),
                 codec_settings->startBitrate,
                 video_codec_profile_,
                 &initialization_waiter,
                 &initialization_retval));
  
  initialization_waiter.Wait();
  RecordInitEncodeUMA(initialization_retval);
  return initialization_retval;
}
int32_t RTCVideoEncoder::Encode(
    const webrtc::I420VideoFrame& input_image,
    const webrtc::CodecSpecificInfo* codec_specific_info,
    const std::vector<webrtc::VideoFrameType>* frame_types) {
  DVLOG(3) << "Encode()";
  
  
  if (!impl_) {
    DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_;
    return impl_status_;
  }
  bool want_key_frame = frame_types && frame_types->size() &&
                        frame_types->front() == webrtc::kKeyFrame;
  base::WaitableEvent encode_waiter(true, false);
  int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  gpu_factories_->GetTaskRunner()->PostTask(
      FROM_HERE,
      base::Bind(&RTCVideoEncoder::Impl::Enqueue,
                 impl_,
                 &input_image,
                 want_key_frame,
                 &encode_waiter,
                 &encode_retval));
  
  encode_waiter.Wait();
  DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval;
  return encode_retval;
}
int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
    webrtc::EncodedImageCallback* callback) {
  DVLOG(3) << "RegisterEncodeCompleteCallback()";
  DCHECK(thread_checker_.CalledOnValidThread());
  if (!impl_) {
    DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_;
    return impl_status_;
  }
  encoded_image_callback_ = callback;
  return WEBRTC_VIDEO_CODEC_OK;
}
int32_t RTCVideoEncoder::Release() {
  DVLOG(3) << "Release()";
  DCHECK(thread_checker_.CalledOnValidThread());
  if (impl_) {
    gpu_factories_->GetTaskRunner()->PostTask(
        FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
    impl_ = NULL;
    weak_factory_.InvalidateWeakPtrs();
    impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  }
  return WEBRTC_VIDEO_CODEC_OK;
}
int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) {
  DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss
           << ", rtt=" << rtt;
  DCHECK(thread_checker_.CalledOnValidThread());
  
  return WEBRTC_VIDEO_CODEC_OK;
}
int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
  DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
           << ", frame_rate=" << frame_rate;
  DCHECK(thread_checker_.CalledOnValidThread());
  if (!impl_) {
    DVLOG(3) << "SetRates(): returning " << impl_status_;
    return impl_status_;
  }
  gpu_factories_->GetTaskRunner()->PostTask(
      FROM_HERE,
      base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange,
                 impl_,
                 new_bit_rate,
                 frame_rate));
  return WEBRTC_VIDEO_CODEC_OK;
}
void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image,
                                         int32 bitstream_buffer_id,
                                         uint16 picture_id) {
  DCHECK(thread_checker_.CalledOnValidThread());
  DVLOG(3) << "ReturnEncodedImage(): "
           << "bitstream_buffer_id=" << bitstream_buffer_id
           << ", picture_id=" << picture_id;
  if (!encoded_image_callback_)
    return;
  webrtc::CodecSpecificInfo info;
  memset(&info, 0, sizeof(info));
  info.codecType = video_codec_type_;
  if (video_codec_type_ == webrtc::kVideoCodecVP8) {
    info.codecSpecific.VP8.pictureId = picture_id;
    info.codecSpecific.VP8.tl0PicIdx = -1;
    info.codecSpecific.VP8.keyIdx = -1;
  }
  
  webrtc::RTPFragmentationHeader header;
  memset(&header, 0, sizeof(header));
  header.VerifyAndAllocateFragmentationHeader(1);
  header.fragmentationOffset[0] = 0;
  header.fragmentationLength[0] = image->_length;
  header.fragmentationPlType[0] = 0;
  header.fragmentationTimeDiff[0] = 0;
  int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header);
  if (retval < 0) {
    DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned "
             << retval;
  }
  
  
  gpu_factories_->GetTaskRunner()->PostTask(
      FROM_HERE,
      base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId,
                 impl_,
                 bitstream_buffer_id));
}
void RTCVideoEncoder::NotifyError(int32_t error) {
  DCHECK(thread_checker_.CalledOnValidThread());
  DVLOG(1) << "NotifyError(): error=" << error;
  impl_status_ = error;
  gpu_factories_->GetTaskRunner()->PostTask(
      FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
  impl_ = NULL;
}
void RTCVideoEncoder::RecordInitEncodeUMA(int32_t init_retval) {
  UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
                        init_retval == WEBRTC_VIDEO_CODEC_OK);
  if (init_retval == WEBRTC_VIDEO_CODEC_OK) {
    UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
                              video_codec_profile_,
                              media::VIDEO_CODEC_PROFILE_MAX + 1);
  }
}
}