Skip to content

Commit b9db5ee

Browse files
committed
converted TestTokenizerCompileLimits into a Python test
1 parent 9ac26af commit b9db5ee

2 files changed

Lines changed: 32 additions & 48 deletions

File tree

test/cli/other_test.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3653,3 +3653,35 @@ def test_debug_syntaxerror_c(tmp_path):
36533653
assert stderr.splitlines() == [
36543654
"{}:2:1: error: Code 'template<...' is invalid C code. [syntaxError]".format(test_file)
36553655
]
3656+
3657+
3658+
def test_ast_max_depth(tmp_path):
3659+
test_file = tmp_path / 'test.cpp'
3660+
with open(test_file, "w") as f:
3661+
f.write(
3662+
"""
3663+
#define PTR1 (* (* (* (*
3664+
#define PTR2 PTR1 PTR1 PTR1 PTR1
3665+
#define PTR3 PTR2 PTR2 PTR2 PTR2
3666+
#define PTR4 PTR3 PTR3 PTR3 PTR3
3667+
3668+
#define RBR1 ) ) ) )
3669+
#define RBR2 RBR1 RBR1 RBR1 RBR1
3670+
#define RBR3 RBR2 RBR2 RBR2 RBR2
3671+
#define RBR4 RBR3 RBR3 RBR3 RBR3
3672+
3673+
int PTR4 q4_var RBR4 = 0;
3674+
""")
3675+
3676+
args = [
3677+
'-q',
3678+
'--template=simple',
3679+
str(test_file)
3680+
]
3681+
3682+
exitcode, stdout, stderr = cppcheck(args)
3683+
assert exitcode == 0, stdout
3684+
assert stdout.splitlines() == []
3685+
assert stderr.splitlines() == [
3686+
'{}:12:5: error: maximum AST depth exceeded [internalAstError]'.format(test_file)
3687+
]

test/testtokenize.cpp

Lines changed: 0 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -8662,51 +8662,3 @@ class TestTokenizer : public TestFixture {
86628662
};
86638663

86648664
REGISTER_TEST(TestTokenizer)
8665-
8666-
class TestTokenizerCompileLimits : public TestFixture
8667-
{
8668-
public:
8669-
TestTokenizerCompileLimits() : TestFixture("TestTokenizerCompileLimits") {}
8670-
8671-
private:
8672-
void run() override
8673-
{
8674-
TEST_CASE(test); // #5592 crash: gcc: testsuit: gcc.c-torture/compile/limits-declparen.c
8675-
}
8676-
8677-
#define tokenizeAndStringify(...) tokenizeAndStringify_(__FILE__, __LINE__, __VA_ARGS__)
8678-
std::string tokenizeAndStringify_(const char* file, int linenr, const std::string& code) {
8679-
// tokenize..
8680-
SimpleTokenizer tokenizer(settingsDefault, *this);
8681-
ASSERT_LOC(tokenizer.tokenize(code), file, linenr);
8682-
8683-
if (tokenizer.tokens())
8684-
return tokenizer.tokens()->stringifyList(false, true, false, true, false, nullptr, nullptr);
8685-
return "";
8686-
}
8687-
8688-
void test() {
8689-
const char raw_code[] = "#define PTR1 (* (* (* (*\n"
8690-
"#define PTR2 PTR1 PTR1 PTR1 PTR1\n"
8691-
"#define PTR3 PTR2 PTR2 PTR2 PTR2\n"
8692-
"#define PTR4 PTR3 PTR3 PTR3 PTR3\n"
8693-
"\n"
8694-
"#define RBR1 ) ) ) )\n"
8695-
"#define RBR2 RBR1 RBR1 RBR1 RBR1\n"
8696-
"#define RBR3 RBR2 RBR2 RBR2 RBR2\n"
8697-
"#define RBR4 RBR3 RBR3 RBR3 RBR3\n"
8698-
"\n"
8699-
"int PTR4 q4_var RBR4 = 0;\n";
8700-
8701-
// Preprocess file..
8702-
simplecpp::OutputList outputList;
8703-
std::vector<std::string> files;
8704-
const simplecpp::TokenList tokens1(raw_code, sizeof(raw_code), files, "", &outputList);
8705-
const std::string filedata = tokens1.stringify();
8706-
const std::string code = PreprocessorHelper::getcodeforcfg(settingsDefault, *this, filedata, "", "test.c");
8707-
8708-
ASSERT_THROW_INTERNAL_EQUALS(tokenizeAndStringify(code), AST, "maximum AST depth exceeded");
8709-
}
8710-
};
8711-
8712-
REGISTER_TEST(TestTokenizerCompileLimits)

0 commit comments

Comments
 (0)