This source file includes following definitions.
- Start
 
- AddUrlHandler
 
- Factory
 
- NumberOutstandingRequests
 
- FinishPendingRequests
 
- ErrorPendingRequests
 
- weak_factory_
 
- StartAsync
 
- FillBufferHelper
 
- ReadRawData
 
- CheckDoneStatus
 
- GetResponseInfo
 
- GetResponseInfoConst
 
- GetMimeType
 
#include "content/test/net/url_request_slow_download_job.h"
#include "base/bind.h"
#include "base/compiler_specific.h"
#include "base/logging.h"
#include "base/message_loop/message_loop.h"
#include "base/strings/string_util.h"
#include "base/strings/stringprintf.h"
#include "content/public/browser/browser_thread.h"
#include "net/base/io_buffer.h"
#include "net/base/net_errors.h"
#include "net/http/http_response_headers.h"
#include "net/url_request/url_request.h"
#include "net/url_request/url_request_filter.h"
#include "url/gurl.h"
namespace content {
const char URLRequestSlowDownloadJob::kUnknownSizeUrl[] =
  "http://url.handled.by.slow.download/download-unknown-size";
const char URLRequestSlowDownloadJob::kKnownSizeUrl[] =
  "http://url.handled.by.slow.download/download-known-size";
const char URLRequestSlowDownloadJob::kFinishDownloadUrl[] =
  "http://url.handled.by.slow.download/download-finish";
const char URLRequestSlowDownloadJob::kErrorDownloadUrl[] =
  "http://url.handled.by.slow.download/download-error";
const int URLRequestSlowDownloadJob::kFirstDownloadSize = 1024 * 35;
const int URLRequestSlowDownloadJob::kSecondDownloadSize = 1024 * 10;
base::LazyInstance<URLRequestSlowDownloadJob::SlowJobsSet>::Leaky
    URLRequestSlowDownloadJob::pending_requests_ = LAZY_INSTANCE_INITIALIZER;
void URLRequestSlowDownloadJob::Start() {
  base::MessageLoop::current()->PostTask(
      FROM_HERE,
      base::Bind(&URLRequestSlowDownloadJob::StartAsync,
                 weak_factory_.GetWeakPtr()));
}
void URLRequestSlowDownloadJob::AddUrlHandler() {
  net::URLRequestFilter* filter = net::URLRequestFilter::GetInstance();
  filter->AddUrlHandler(GURL(kUnknownSizeUrl),
                        &URLRequestSlowDownloadJob::Factory);
  filter->AddUrlHandler(GURL(kKnownSizeUrl),
                        &URLRequestSlowDownloadJob::Factory);
  filter->AddUrlHandler(GURL(kFinishDownloadUrl),
                        &URLRequestSlowDownloadJob::Factory);
  filter->AddUrlHandler(GURL(kErrorDownloadUrl),
                        &URLRequestSlowDownloadJob::Factory);
}
net::URLRequestJob* URLRequestSlowDownloadJob::Factory(
    net::URLRequest* request,
    net::NetworkDelegate* network_delegate,
    const std::string& scheme) {
  DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
  URLRequestSlowDownloadJob* job = new URLRequestSlowDownloadJob(
      request, network_delegate);
  if (request->url().spec() != kFinishDownloadUrl &&
      request->url().spec() != kErrorDownloadUrl)
    pending_requests_.Get().insert(job);
  return job;
}
size_t URLRequestSlowDownloadJob::NumberOutstandingRequests() {
  DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
  return pending_requests_.Get().size();
}
void URLRequestSlowDownloadJob::FinishPendingRequests() {
  typedef std::set<URLRequestSlowDownloadJob*> JobList;
  DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
  for (JobList::iterator it = pending_requests_.Get().begin(); it !=
       pending_requests_.Get().end(); ++it) {
    (*it)->set_should_finish_download();
  }
}
void URLRequestSlowDownloadJob::ErrorPendingRequests() {
  typedef std::set<URLRequestSlowDownloadJob*> JobList;
  DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
  for (JobList::iterator it = pending_requests_.Get().begin(); it !=
       pending_requests_.Get().end(); ++it) {
    (*it)->set_should_error_download();
  }
}
URLRequestSlowDownloadJob::URLRequestSlowDownloadJob(
    net::URLRequest* request, net::NetworkDelegate* network_delegate)
    : net::URLRequestJob(request, network_delegate),
      bytes_already_sent_(0),
      should_error_download_(false),
      should_finish_download_(false),
      buffer_size_(0),
      weak_factory_(this) {
}
void URLRequestSlowDownloadJob::StartAsync() {
  if (LowerCaseEqualsASCII(kFinishDownloadUrl, request_->url().spec().c_str()))
    URLRequestSlowDownloadJob::FinishPendingRequests();
  if (LowerCaseEqualsASCII(kErrorDownloadUrl, request_->url().spec().c_str()))
    URLRequestSlowDownloadJob::ErrorPendingRequests();
  NotifyHeadersComplete();
}
URLRequestSlowDownloadJob::ReadStatus
URLRequestSlowDownloadJob::FillBufferHelper(
    net::IOBuffer* buf, int buf_size, int* bytes_written) {
  if (bytes_already_sent_ < kFirstDownloadSize) {
    int bytes_to_write = std::min(kFirstDownloadSize - bytes_already_sent_,
                                  buf_size);
    for (int i = 0; i < bytes_to_write; ++i) {
      buf->data()[i] = '*';
    }
    *bytes_written = bytes_to_write;
    bytes_already_sent_ += bytes_to_write;
    return BUFFER_FILLED;
  }
  if (!should_finish_download_)
    return REQUEST_BLOCKED;
  if (bytes_already_sent_ < kFirstDownloadSize + kSecondDownloadSize) {
    int bytes_to_write =
        std::min(kFirstDownloadSize + kSecondDownloadSize - bytes_already_sent_,
                 buf_size);
    for (int i = 0; i < bytes_to_write; ++i) {
      buf->data()[i] = '*';
    }
    *bytes_written = bytes_to_write;
    bytes_already_sent_ += bytes_to_write;
    return BUFFER_FILLED;
  }
  return REQUEST_COMPLETE;
}
bool URLRequestSlowDownloadJob::ReadRawData(net::IOBuffer* buf, int buf_size,
                                            int* bytes_read) {
  if (LowerCaseEqualsASCII(kFinishDownloadUrl,
                           request_->url().spec().c_str()) ||
      LowerCaseEqualsASCII(kErrorDownloadUrl,
                           request_->url().spec().c_str())) {
    VLOG(10) << __FUNCTION__ << " called w/ kFinish/ErrorDownloadUrl.";
    *bytes_read = 0;
    return true;
  }
  VLOG(10) << __FUNCTION__ << " called at position "
           << bytes_already_sent_ << " in the stream.";
  ReadStatus status = FillBufferHelper(buf, buf_size, bytes_read);
  switch (status) {
    case BUFFER_FILLED:
      return true;
    case REQUEST_BLOCKED:
      buffer_ = buf;
      buffer_size_ = buf_size;
      SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0));
      base::MessageLoop::current()->PostDelayedTask(
          FROM_HERE,
          base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus,
                     weak_factory_.GetWeakPtr()),
          base::TimeDelta::FromMilliseconds(100));
      return false;
    case REQUEST_COMPLETE:
      *bytes_read = 0;
      return true;
  }
  NOTREACHED();
  return true;
}
void URLRequestSlowDownloadJob::CheckDoneStatus() {
  if (should_finish_download_) {
    VLOG(10) << __FUNCTION__ << " called w/ should_finish_download_ set.";
    DCHECK(NULL != buffer_.get());
    int bytes_written = 0;
    ReadStatus status =
        FillBufferHelper(buffer_.get(), buffer_size_, &bytes_written);
    DCHECK_EQ(BUFFER_FILLED, status);
    buffer_ = NULL;                     
    SetStatus(net::URLRequestStatus());
    NotifyReadComplete(bytes_written);
  } else if (should_error_download_) {
    VLOG(10) << __FUNCTION__ << " called w/ should_finish_ownload_ set.";
    NotifyDone(net::URLRequestStatus(
        net::URLRequestStatus::FAILED, net::ERR_CONNECTION_RESET));
  } else {
    base::MessageLoop::current()->PostDelayedTask(
        FROM_HERE,
        base::Bind(&URLRequestSlowDownloadJob::CheckDoneStatus,
                   weak_factory_.GetWeakPtr()),
        base::TimeDelta::FromMilliseconds(100));
  }
}
void URLRequestSlowDownloadJob::GetResponseInfo(net::HttpResponseInfo* info) {
  
  GetResponseInfoConst(info);
}
URLRequestSlowDownloadJob::~URLRequestSlowDownloadJob() {
  DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
  pending_requests_.Get().erase(this);
}
void URLRequestSlowDownloadJob::GetResponseInfoConst(
    net::HttpResponseInfo* info) const {
  
  std::string raw_headers;
  if (LowerCaseEqualsASCII(kFinishDownloadUrl,
                           request_->url().spec().c_str()) ||
      LowerCaseEqualsASCII(kErrorDownloadUrl,
                           request_->url().spec().c_str())) {
    raw_headers.append(
      "HTTP/1.1 200 OK\n"
      "Content-type: text/plain\n");
  } else {
    raw_headers.append(
      "HTTP/1.1 200 OK\n"
      "Content-type: application/octet-stream\n"
      "Cache-Control: max-age=0\n");
    if (LowerCaseEqualsASCII(kKnownSizeUrl, request_->url().spec().c_str())) {
      raw_headers.append(base::StringPrintf(
          "Content-Length: %d\n",
          kFirstDownloadSize + kSecondDownloadSize));
    }
  }
  
  ReplaceSubstringsAfterOffset(&raw_headers, 0, "\n", std::string("\0", 1));
  info->headers = new net::HttpResponseHeaders(raw_headers);
}
bool URLRequestSlowDownloadJob::GetMimeType(std::string* mime_type) const {
  net::HttpResponseInfo info;
  GetResponseInfoConst(&info);
  return info.headers.get() && info.headers->GetMimeType(mime_type);
}
}