cur_               21 chrome/browser/ui/tab_contents/tab_contents_iterator.cc   DCHECK(cur_ || web_view_index_ == -1) << "Trying to advance past the end";
cur_               35 chrome/browser/ui/tab_contents/tab_contents_iterator.cc       cur_ = next_tab;
cur_               40 chrome/browser/ui/tab_contents/tab_contents_iterator.cc   cur_ = NULL;
cur_               34 chrome/browser/ui/tab_contents/tab_contents_iterator.h   bool done() const { return cur_ == NULL; }
cur_               46 chrome/browser/ui/tab_contents/tab_contents_iterator.h     return cur_;
cur_               49 chrome/browser/ui/tab_contents/tab_contents_iterator.h     return cur_;
cur_               64 chrome/browser/ui/tab_contents/tab_contents_iterator.h   content::WebContents* cur_;
cur_              113 net/dns/dns_response.cc   size_t consumed = ReadName(cur_, &out->name);
cur_              116 net/dns/dns_response.cc   base::BigEndianReader reader(cur_ + consumed,
cur_              117 net/dns/dns_response.cc                                packet_ + length_ - (cur_ + consumed));
cur_              124 net/dns/dns_response.cc     cur_ = reader.ptr();
cur_              131 net/dns/dns_response.cc   size_t consumed = ReadName(cur_, NULL);
cur_              135 net/dns/dns_response.cc   const char* next = cur_ + consumed + 2 * sizeof(uint16);  // QTYPE + QCLASS
cur_              139 net/dns/dns_response.cc   cur_ = next;
cur_               53 net/dns/dns_response.h   bool AtEnd() const { return cur_ == packet_ + length_; }
cur_               56 net/dns/dns_response.h   size_t GetOffset() const { return cur_ - packet_; }
cur_               77 net/dns/dns_response.h   const char* cur_;
cur_              145 tools/gn/parser.cc     cur_++;
cur_              159 tools/gn/parser.cc       return tokens_[cur_++];
cur_              166 tools/gn/parser.cc   return tokens_[cur_++];
cur_               87 tools/gn/parser.h   const Token& cur_token() const { return tokens_[cur_]; }
cur_               90 tools/gn/parser.h   bool at_end() const { return cur_ >= tokens_.size(); }
cur_              100 tools/gn/parser.h   size_t cur_;
cur_               78 tools/gn/tokenizer.cc       cur_(0),
cur_              105 tools/gn/tokenizer.cc     size_t token_begin = cur_;
cur_              109 tools/gn/tokenizer.cc     size_t token_end = cur_;
cur_              215 tools/gn/tokenizer.cc     char following_char = input_[cur_ + 1];
cur_              273 tools/gn/tokenizer.cc         if (CanIncrement() && CouldBeTwoCharOperatorEnd(input_[cur_ + 1]))
cur_              312 tools/gn/tokenizer.cc   char c = input_[cur_];
cur_              325 tools/gn/tokenizer.cc   for (int i = static_cast<int>(cur_) - 1; i >= 0 && input_[i] == '\\'; i--)
cur_              334 tools/gn/tokenizer.cc   return IsNewline(input_, cur_);
cur_              338 tools/gn/tokenizer.cc   DCHECK(cur_ < input_.size());
cur_              345 tools/gn/tokenizer.cc   cur_++;
cur_              361 tools/gn/tokenizer.cc   } else if (cur_char() == '/' && cur_ + 1 < input_.size() &&
cur_              362 tools/gn/tokenizer.cc       (input_[cur_ + 1] == '/' || input_[cur_ + 1] == '*')) {
cur_               58 tools/gn/tokenizer.h   bool CanIncrement() const { return cur_ < input_.size(); }
cur_               70 tools/gn/tokenizer.h   bool at_end() const { return cur_ == input_.size(); }
cur_               71 tools/gn/tokenizer.h   char cur_char() const { return input_[cur_]; }
cur_               80 tools/gn/tokenizer.h   size_t cur_;  // Byte offset into input buffer.