2016-09-19 16:50:29 +00:00
|
|
|
// Copyright 2016 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
|
|
|
// Tests v8::internal::Scanner. Note that presently most unit tests for the
|
|
|
|
// Scanner are in cctest/test-parsing.cc, rather than here.
|
|
|
|
|
|
|
|
#include "src/handles-inl.h"
|
|
|
|
#include "src/parsing/scanner-character-streams.h"
|
|
|
|
#include "src/parsing/scanner.h"
|
|
|
|
#include "src/unicode-cache.h"
|
|
|
|
#include "test/cctest/cctest.h"
|
|
|
|
|
|
|
|
using namespace v8::internal;
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
const char src_simple[] = "function foo() { var x = 2 * a() + b; }";
|
|
|
|
|
2016-11-04 18:53:40 +00:00
|
|
|
std::unique_ptr<Scanner> make_scanner(const char* src, UnicodeCache* cache) {
|
|
|
|
std::unique_ptr<Scanner> scanner(new Scanner(cache));
|
2016-09-19 16:50:29 +00:00
|
|
|
scanner->Initialize(ScannerStream::ForTesting(src).release());
|
|
|
|
return scanner;
|
|
|
|
}
|
|
|
|
|
|
|
|
} // anonymous namespace
|
|
|
|
|
2016-09-23 13:15:57 +00:00
|
|
|
// DCHECK_TOK checks token equality, but by checking for equality of the token
|
|
|
|
// names. That should have the same result, but has much nicer error messaages.
|
|
|
|
#define DCHECK_TOK(a, b) DCHECK_EQ(Token::Name(a), Token::Name(b))
|
|
|
|
|
2016-09-19 16:50:29 +00:00
|
|
|
TEST(Bookmarks) {
|
2016-11-04 18:53:40 +00:00
|
|
|
UnicodeCache unicode_cache;
|
|
|
|
|
2016-09-19 16:50:29 +00:00
|
|
|
// Scan through the given source and record the tokens for use as reference
|
|
|
|
// below.
|
|
|
|
std::vector<Token::Value> tokens;
|
|
|
|
{
|
2016-11-04 18:53:40 +00:00
|
|
|
auto scanner = make_scanner(src_simple, &unicode_cache);
|
2016-09-19 16:50:29 +00:00
|
|
|
do {
|
|
|
|
tokens.push_back(scanner->Next());
|
|
|
|
} while (scanner->current_token() != Token::EOS);
|
|
|
|
}
|
|
|
|
|
|
|
|
// For each position:
|
|
|
|
// - Scan through file,
|
|
|
|
// - set a bookmark once the position is reached,
|
|
|
|
// - scan a bit more,
|
|
|
|
// - reset to the bookmark, and
|
|
|
|
// - scan until the end.
|
|
|
|
// At each step, compare to the reference token sequence generated above.
|
|
|
|
for (size_t bookmark_pos = 0; bookmark_pos < tokens.size(); bookmark_pos++) {
|
2016-11-04 18:53:40 +00:00
|
|
|
auto scanner = make_scanner(src_simple, &unicode_cache);
|
2016-09-19 16:50:29 +00:00
|
|
|
Scanner::BookmarkScope bookmark(scanner.get());
|
|
|
|
|
|
|
|
for (size_t i = 0; i < std::min(bookmark_pos + 10, tokens.size()); i++) {
|
|
|
|
if (i == bookmark_pos) {
|
|
|
|
bookmark.Set();
|
|
|
|
}
|
2016-09-23 13:15:57 +00:00
|
|
|
DCHECK_TOK(tokens[i], scanner->Next());
|
2016-09-19 16:50:29 +00:00
|
|
|
}
|
|
|
|
|
2016-09-20 13:47:31 +00:00
|
|
|
bookmark.Apply();
|
2016-09-19 16:50:29 +00:00
|
|
|
for (size_t i = bookmark_pos; i < tokens.size(); i++) {
|
2016-09-23 13:15:57 +00:00
|
|
|
DCHECK_TOK(tokens[i], scanner->Next());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(AllThePushbacks) {
|
|
|
|
const struct {
|
|
|
|
const char* src;
|
|
|
|
const Token::Value tokens[5]; // Large enough for any of the test cases.
|
|
|
|
} test_cases[] = {
|
|
|
|
{"<-x", {Token::LT, Token::SUB, Token::IDENTIFIER, Token::EOS}},
|
|
|
|
{"<!x", {Token::LT, Token::NOT, Token::IDENTIFIER, Token::EOS}},
|
|
|
|
{"<!-x",
|
|
|
|
{Token::LT, Token::NOT, Token::SUB, Token::IDENTIFIER, Token::EOS}},
|
|
|
|
{"<!-- xx -->\nx", {Token::IDENTIFIER, Token::EOS}},
|
|
|
|
};
|
|
|
|
|
2016-11-04 18:53:40 +00:00
|
|
|
UnicodeCache unicode_cache;
|
2016-09-23 13:15:57 +00:00
|
|
|
for (const auto& test_case : test_cases) {
|
2016-11-04 18:53:40 +00:00
|
|
|
auto scanner = make_scanner(test_case.src, &unicode_cache);
|
2016-09-23 13:15:57 +00:00
|
|
|
for (size_t i = 0; test_case.tokens[i] != Token::EOS; i++) {
|
|
|
|
DCHECK_TOK(test_case.tokens[i], scanner->Next());
|
2016-09-19 16:50:29 +00:00
|
|
|
}
|
2016-09-23 13:15:57 +00:00
|
|
|
DCHECK_TOK(Token::EOS, scanner->Next());
|
2016-09-19 16:50:29 +00:00
|
|
|
}
|
|
|
|
}
|