Stay within lex'd token bounds.

If we've walked to the end of the list, make sure we don't advance past
the EOF or Error tokens.

Bug: 1347943
Change-Id: I79c9254c39747cc0fb236ae92f8a0f3aa035a932
Reviewed-on: https://dawn-review.googlesource.com/c/dawn/+/97460
Auto-Submit: Dan Sinclair <dsinclair@chromium.org>
Reviewed-by: Ben Clayton <bclayton@google.com>
Commit-Queue: Dan Sinclair <dsinclair@chromium.org>
Kokoro: Kokoro <noreply+kokoro@google.com>
This commit is contained in:
dan sinclair 2022-07-27 22:54:50 +00:00 committed by Dawn LUCI CQ
parent 08659d098d
commit 3b2ce130b6
1 changed files with 8 additions and 1 deletions

View File

@ -248,7 +248,11 @@ const Token& ParserImpl::next() {
next_token_idx_++; next_token_idx_++;
} }
last_source_idx_ = next_token_idx_; last_source_idx_ = next_token_idx_;
return tokens_[next_token_idx_++];
if (!tokens_[next_token_idx_].IsEof() && !tokens_[next_token_idx_].IsError()) {
next_token_idx_++;
}
return tokens_[last_source_idx_];
} }
const Token& ParserImpl::peek(size_t idx) { const Token& ParserImpl::peek(size_t idx) {
@ -263,6 +267,9 @@ const Token& ParserImpl::peek(size_t idx) {
} }
idx++; idx++;
} }
if (next_token_idx_ + idx >= tokens_.size()) {
return tokens_[tokens_.size() - 1];
}
return tokens_[next_token_idx_ + idx]; return tokens_[next_token_idx_ + idx];
} }