diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index e1e91937d44..56b738a4550 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -3605,6 +3605,9 @@ void Tokenizer::concatenateNegativeNumberAndAnyPositive() if (!Token::Match(tok, "?|:|,|(|[|{|return|case|sizeof|%op% +|-") || tok->tokType() == Token::eIncDecOp) continue; + if (tok->findOpeningBracket()) + continue; + while (tok->str() != ">" && tok->next() && tok->strAt(1) == "+" && (!Token::Match(tok->tokAt(2), "%name% (|;") || Token::Match(tok, "%op%"))) tok->deleteNext(); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 2fbef515a62..7f943d7db94 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -1010,6 +1010,8 @@ class TestTokenizer : public TestFixture { ASSERT_EQUALS("int x [ 2 ] = { -2 , 1 }", tokenizeAndStringify("int x[2] = {-2,1}")); ASSERT_EQUALS("f ( 123 )", tokenizeAndStringify("f(+123)")); + + ASSERT_EQUALS("std :: extent_v < A > - 1 ;", tokenizeAndStringify("std::extent_v - 1;")); // #11341 }