diff --git a/democlient/democlient.cpp b/democlient/democlient.cpp index c0601d6d434..a8317c2da4b 100644 --- a/democlient/democlient.cpp +++ b/democlient/democlient.cpp @@ -66,8 +66,8 @@ class CppcheckExecutor : public ErrorLogger { , cppcheck(settings, supprs, *this, false, nullptr) {} - void run(const char code[]) { - cppcheck.check(FileWithDetails("test.cpp", Standards::Language::CPP, 0), code); + void run(const char* code) { + cppcheck.checkBuffer(FileWithDetails("test.cpp", Standards::Language::CPP, 0), reinterpret_cast(code), strlen(code)); } void reportOut(const std::string & /*outmsg*/, Color /*c*/) override {} diff --git a/gui/mainwindow.cpp b/gui/mainwindow.cpp index 05212db30f8..e6efd50066d 100644 --- a/gui/mainwindow.cpp +++ b/gui/mainwindow.cpp @@ -729,8 +729,11 @@ void MainWindow::analyzeCode(const QString& code, const QString& filename) checkLockDownUI(); clearResults(); mUI->mResults->checkingStarted(1); - // TODO: apply enforcedLanguage? - cppcheck.check(FileWithDetails(filename.toStdString(), Path::identify(filename.toStdString(), false), 0), code.toStdString()); + { + const std::string code_s = code.toStdString(); + // TODO: apply enforcedLanguage? + cppcheck.checkBuffer(FileWithDetails(filename.toStdString(), Path::identify(filename.toStdString(), false), 0), reinterpret_cast(code_s.data()), code_s.size()); + } analysisDone(); // Expand results diff --git a/lib/cppcheck.cpp b/lib/cppcheck.cpp index ad46f2651cd..008bbe0eee9 100644 --- a/lib/cppcheck.cpp +++ b/lib/cppcheck.cpp @@ -799,10 +799,9 @@ unsigned int CppCheck::check(const FileWithDetails &file) return returnValue; } -unsigned int CppCheck::check(const FileWithDetails &file, const std::string &content) +unsigned int CppCheck::checkBuffer(const FileWithDetails &file, const uint8_t* data, std::size_t size) { - std::istringstream iss(content); - return checkFile(file, "", 0, &iss); + return checkBuffer(file, "", 0, data, size); } unsigned int CppCheck::check(const FileSettings &fs) @@ -851,14 +850,6 @@ unsigned int CppCheck::check(const FileSettings &fs) return returnValue; } -static simplecpp::TokenList createTokenList(const std::string& filename, std::vector& files, simplecpp::OutputList* outputList, std::istream* fileStream) -{ - if (fileStream) - return {*fileStream, files, filename, outputList}; - - return {filename, files, outputList}; -} - std::size_t CppCheck::calculateHash(const Preprocessor& preprocessor, const simplecpp::TokenList& tokens, const std::string& filePath) const { std::ostringstream toolinfo; @@ -880,7 +871,23 @@ std::size_t CppCheck::calculateHash(const Preprocessor& preprocessor, const simp return preprocessor.calculateHash(tokens, toolinfo.str()); } -unsigned int CppCheck::checkFile(const FileWithDetails& file, const std::string &cfgname, int fileIndex, std::istream* fileStream) +unsigned int CppCheck::checkBuffer(const FileWithDetails &file, const std::string &cfgname, int fileIndex, const uint8_t* data, std::size_t size) +{ + const auto f = [&file, data, size](std::vector& files, simplecpp::OutputList* outputList) { + return simplecpp::TokenList{data, size, files, file.spath(), outputList}; + }; + return checkInternal(file, cfgname, fileIndex, f); +} + +unsigned int CppCheck::checkFile(const FileWithDetails& file, const std::string &cfgname, int fileIndex) +{ + const auto f = [&file](std::vector& files, simplecpp::OutputList* outputList) { + return simplecpp::TokenList{file.spath(), files, outputList}; + }; + return checkInternal(file, cfgname, fileIndex, f); +} + +unsigned int CppCheck::checkInternal(const FileWithDetails& file, const std::string &cfgname, int fileIndex, const CreateTokenListFn& createTokenList) { // TODO: move to constructor when CppCheck no longer owns the settings if (mSettings.checks.isEnabled(Checks::unusedFunction) && !mUnusedFunctionsCheck) @@ -931,24 +938,13 @@ unsigned int CppCheck::checkFile(const FileWithDetails& file, const std::string std::size_t hash = 0; // markup files are special and do not adhere to the enforced language TokenList tokenlist{mSettings, Standards::Language::C}; - if (fileStream) { - std::vector files; - simplecpp::TokenList tokens(*fileStream, files, file.spath()); - if (analyzerInformation) { - const Preprocessor preprocessor(mSettings, mErrorLogger, Standards::Language::C); - hash = calculateHash(preprocessor, tokens); - } - tokenlist.createTokens(std::move(tokens)); - } - else { - std::vector files; - simplecpp::TokenList tokens(file.spath(), files); - if (analyzerInformation) { - const Preprocessor preprocessor(mSettings, mErrorLogger, file.lang()); - hash = calculateHash(preprocessor, tokens); - } - tokenlist.createTokens(std::move(tokens)); + std::vector files; + simplecpp::TokenList tokens = createTokenList(files, nullptr); + if (analyzerInformation) { + const Preprocessor preprocessor(mSettings, mErrorLogger, file.lang()); + hash = calculateHash(preprocessor, tokens); } + tokenlist.createTokens(std::move(tokens)); // this is not a real source file - we just want to tokenize it. treat it as C anyways as the language needs to be determined. Tokenizer tokenizer(std::move(tokenlist), mErrorLogger); mUnusedFunctionsCheck->parseTokens(tokenizer, mSettings); @@ -967,7 +963,7 @@ unsigned int CppCheck::checkFile(const FileWithDetails& file, const std::string simplecpp::OutputList outputList; std::vector files; - simplecpp::TokenList tokens1 = createTokenList(file.spath(), files, &outputList, fileStream); + simplecpp::TokenList tokens1 = createTokenList(files, &outputList); // If there is a syntax error, report it and stop const auto output_it = std::find_if(outputList.cbegin(), outputList.cend(), [](const simplecpp::Output &output){ @@ -1075,8 +1071,7 @@ unsigned int CppCheck::checkFile(const FileWithDetails& file, const std::string code += "#line " + std::to_string(dir.linenr) + " \"" + dir.file + "\"\n" + dir.str + '\n'; } TokenList tokenlist(mSettings, file.lang()); - std::istringstream istr2(code); - tokenlist.createTokens(istr2); // TODO: check result? + tokenlist.createTokensFromBuffer(code.data(), code.size()); // TODO: check result? executeRules("define", tokenlist); } #endif diff --git a/lib/cppcheck.h b/lib/cppcheck.h index 7dd8947631e..18135efbc6c 100644 --- a/lib/cppcheck.h +++ b/lib/cppcheck.h @@ -45,7 +45,10 @@ class Settings; struct Suppressions; class Preprocessor; -namespace simplecpp { class TokenList; } +namespace simplecpp { + class TokenList; + struct Output; +} /// @addtogroup Core /// @{ @@ -100,12 +103,13 @@ class CPPCHECKLIB CppCheck { * the disk but the content is given in @p content. In errors the @p path * is used as a filename. * @param file The file to check. - * @param content File content as a string. + * @param data File content as a buffer. + * @param size Size of buffer. * @return amount of errors found or 0 if none were found. * @note You must set settings before calling this function (by calling * settings()). */ - unsigned int check(const FileWithDetails &file, const std::string &content); + unsigned int checkBuffer(const FileWithDetails &file, const uint8_t* data, std::size_t size); /** * @brief Returns current version number as a string. @@ -174,14 +178,35 @@ class CPPCHECKLIB CppCheck { */ std::size_t calculateHash(const Preprocessor &preprocessor, const simplecpp::TokenList &tokens, const std::string& filePath = {}) const; + /** + * @brief Check a file + * @param file the file + * @param cfgname cfg name + * @return number of errors found + */ + unsigned int checkFile(const FileWithDetails& file, const std::string &cfgname, int fileIndex); + + /** + * @brief Check a file using buffer + * @param file the file + * @param cfgname cfg name + * @param data the data to be read + * @param size the size of the data to be read + * @return number of errors found + */ + unsigned int checkBuffer(const FileWithDetails& file, const std::string &cfgname, int fileIndex, const uint8_t* data, std::size_t size); + + // TODO: should use simplecpp::OutputList + using CreateTokenListFn = std::function&, std::list*)>; + /** * @brief Check a file using stream * @param file the file * @param cfgname cfg name - * @param fileStream stream the file content can be read from + * @param createTokenList a function to create the simplecpp::TokenList with * @return number of errors found */ - unsigned int checkFile(const FileWithDetails& file, const std::string &cfgname, int fileIndex, std::istream* fileStream = nullptr); + unsigned int checkInternal(const FileWithDetails& file, const std::string &cfgname, int fileIndex, const CreateTokenListFn& createTokenList); /** * @brief Check normal tokens diff --git a/lib/importproject.cpp b/lib/importproject.cpp index d1f8a3fb40b..bb57ed16efc 100644 --- a/lib/importproject.cpp +++ b/lib/importproject.cpp @@ -533,8 +533,7 @@ namespace { // TODO: improve evaluation const Settings s; TokenList tokenlist(s, Standards::Language::C); - std::istringstream istr(c); - tokenlist.createTokens(istr); // TODO: check result + tokenlist.createTokensFromBuffer(c.data(), c.size()); // TODO: check result // TODO: put in a helper // generate links { diff --git a/lib/library.cpp b/lib/library.cpp index f786584bdf2..c83aebbcd6d 100644 --- a/lib/library.cpp +++ b/lib/library.cpp @@ -35,7 +35,6 @@ #include #include #include -#include #include #include #include @@ -178,8 +177,8 @@ static std::vector getnames(const char *names) static void gettokenlistfromvalid(const std::string& valid, TokenList& tokenList) { - std::istringstream istr(valid + ','); - tokenList.createTokens(istr); // TODO: check result? + const std::string str(valid + ','); + tokenList.createTokensFromBuffer(str.data(), str.size()); // TODO: check result? for (Token *tok = tokenList.front(); tok; tok = tok->next()) { if (Token::Match(tok,"- %num%")) { tok->str("-" + tok->strAt(1)); diff --git a/lib/programmemory.cpp b/lib/programmemory.cpp index 86d5e35273d..e72815d6c7c 100644 --- a/lib/programmemory.cpp +++ b/lib/programmemory.cpp @@ -1822,8 +1822,7 @@ static std::shared_ptr createTokenFromExpression(const std::string& retur std::shared_ptr tokenList = std::make_shared(settings, cpp ? Standards::Language::CPP : Standards::Language::C); { const std::string code = "return " + returnValue + ";"; - std::istringstream istr(code); - if (!tokenList->createTokens(istr)) + if (!tokenList->createTokensFromBuffer(code.data(), code.size())) return nullptr; } diff --git a/lib/symboldatabase.cpp b/lib/symboldatabase.cpp index e05001b6896..f5e7b2a2f63 100644 --- a/lib/symboldatabase.cpp +++ b/lib/symboldatabase.cpp @@ -7756,8 +7756,8 @@ void SymbolDatabase::setValueTypeInTokenList(bool reportDebugWarnings, Token *to if (!typestr.empty()) { ValueType valuetype; TokenList tokenList(mSettings, tok->isCpp() ? Standards::Language::CPP : Standards::Language::C); - std::istringstream istr(typestr+";"); - tokenList.createTokens(istr); // TODO: check result? + const std::string str(typestr+";"); + tokenList.createTokensFromBuffer(str.data(), str.size()); // TODO: check result? tokenList.simplifyStdType(); if (parsedecl(tokenList.front(), &valuetype, mDefaultSignedness, mSettings)) { valuetype.originalTypeName = typestr; @@ -7846,8 +7846,8 @@ void SymbolDatabase::setValueTypeInTokenList(bool reportDebugWarnings, Token *to continue; } TokenList tokenList(mSettings, tok->isCpp() ? Standards::Language::CPP : Standards::Language::C); - std::istringstream istr(typestr+";"); - if (tokenList.createTokens(istr)) { + const std::string str(typestr+";"); + if (tokenList.createTokensFromBuffer(str.data(), str.size())) { ValueType vt; tokenList.simplifyPlatformTypes(); tokenList.simplifyStdType(); diff --git a/lib/tokenlist.cpp b/lib/tokenlist.cpp index af49c65391a..3466202414b 100644 --- a/lib/tokenlist.cpp +++ b/lib/tokenlist.cpp @@ -321,17 +321,17 @@ void TokenList::insertTokens(Token *dest, const Token *src, nonneg int n) //--------------------------------------------------------------------------- -bool TokenList::createTokens(std::istream &code) +bool TokenList::createTokensFromBuffer(const uint8_t* data, size_t size) { - return createTokensInternal(code, mFiles.empty() ? "" : *mFiles.cbegin()); + return createTokensFromBufferInternal(data, size, mFiles.empty() ? "" : *mFiles.cbegin()); } //--------------------------------------------------------------------------- -bool TokenList::createTokensInternal(std::istream &code, const std::string& file0) +bool TokenList::createTokensFromBufferInternal(const uint8_t* data, size_t size, const std::string& file0) { simplecpp::OutputList outputList; - simplecpp::TokenList tokens(code, mFiles, file0, &outputList); + simplecpp::TokenList tokens(data, size, mFiles, file0, &outputList); createTokens(std::move(tokens)); diff --git a/lib/tokenlist.h b/lib/tokenlist.h index 56385f519e2..76db5bb2a89 100644 --- a/lib/tokenlist.h +++ b/lib/tokenlist.h @@ -98,9 +98,16 @@ class CPPCHECKLIB TokenList { * - multiline strings are not handled. * - UTF in the code are not handled. * - comments are not handled. - * @param code input stream for code */ - bool createTokens(std::istream &code); + bool createTokensFromBuffer(const uint8_t* data, size_t size); + bool createTokensFromBuffer(const char* data, size_t size) { + return createTokensFromBuffer(reinterpret_cast(data), size); + } + template + // cppcheck-suppress unusedFunction - used in tests only + bool createTokensFromString(const char (&data)[size]) { + return createTokensFromBuffer(reinterpret_cast(data), size-1); + } void createTokens(simplecpp::TokenList&& tokenList); @@ -208,7 +215,7 @@ class CPPCHECKLIB TokenList { } private: - bool createTokensInternal(std::istream &code, const std::string& file0); + bool createTokensFromBufferInternal(const uint8_t* data, std::size_t size, const std::string& file0); /** Token list */ std::shared_ptr mTokensFrontBack; diff --git a/lib/valueflow.cpp b/lib/valueflow.cpp index 049e953f59f..304e06c299c 100644 --- a/lib/valueflow.cpp +++ b/lib/valueflow.cpp @@ -124,7 +124,6 @@ #include #include #include -#include #include #include #include @@ -1997,8 +1996,7 @@ static bool isNotEqual(std::pair x, std::pair x, const std::string& y, bool cpp, const Settings& settings) { TokenList tokenList(settings, cpp ? Standards::Language::CPP : Standards::Language::C); - std::istringstream istr(y); - tokenList.createTokens(istr); // TODO: check result? + tokenList.createTokensFromBuffer(y.data(), y.size()); // TODO: check result? return isNotEqual(x, std::make_pair(tokenList.front(), tokenList.back())); } static bool isNotEqual(std::pair x, const ValueType* y, bool cpp, const Settings& settings) @@ -7117,8 +7115,8 @@ static bool getMinMaxValues(const std::string& typestr, MathLib::bigint& maxvalue) { TokenList typeTokens(settings, cpp ? Standards::Language::CPP : Standards::Language::C); - std::istringstream istr(typestr + ";"); - if (!typeTokens.createTokens(istr)) + const std::string str(typestr + ";"); + if (!typeTokens.createTokensFromBuffer(str.data(), str.size())) return false; typeTokens.simplifyPlatformTypes(); typeTokens.simplifyStdType(); diff --git a/oss-fuzz/main.cpp b/oss-fuzz/main.cpp index bcd7a6c37c4..a5c717da776 100644 --- a/oss-fuzz/main.cpp +++ b/oss-fuzz/main.cpp @@ -60,11 +60,11 @@ static const Settings s_settings = create_settings(); static DummyErrorLogger s_errorLogger; static const FileWithDetails s_file("test.cpp", Standards::Language::CPP, 0); -static void doCheck(const std::string& code) +static void doCheck(const uint8_t *data, size_t dataSize) { Suppressions supprs; CppCheck cppcheck(s_settings, supprs, s_errorLogger, false, nullptr); - cppcheck.check(s_file, code); + cppcheck.checkBuffer(s_file, data, dataSize); } #ifndef NO_FUZZ @@ -74,7 +74,7 @@ int LLVMFuzzerTestOneInput(const uint8_t *data, size_t dataSize) { if (dataSize < 10000) { const std::string code = generateCode2(data, dataSize); - doCheck(code); + doCheck(reinterpret_cast(code.data()), code.size()); } return 0; } @@ -98,7 +98,7 @@ int main(int argc, char * argv[]) const std::string code = oss.str(); for (int i = 0; i < cnt; ++i) - doCheck(code); + doCheck(reinterpret_cast(code.data()), code.size()); return EXIT_SUCCESS; } diff --git a/test/helpers.cpp b/test/helpers.cpp index 222fc384784..2d6a60f9f45 100644 --- a/test/helpers.cpp +++ b/test/helpers.cpp @@ -117,25 +117,24 @@ ScopedFile::~ScopedFile() { // TODO: we should be using the actual Preprocessor implementation std::string PreprocessorHelper::getcodeforcfg(const Settings& settings, ErrorLogger& errorlogger, const std::string &filedata, const std::string &cfg, const std::string &filename, SuppressionList *inlineSuppression) { - std::map cfgcode = getcode(settings, errorlogger, filedata.c_str(), std::set{cfg}, filename, inlineSuppression); + std::map cfgcode = getcode(settings, errorlogger, filedata.c_str(), filedata.size(), std::set{cfg}, filename, inlineSuppression); const auto it = cfgcode.find(cfg); if (it == cfgcode.end()) return ""; return it->second; } -std::map PreprocessorHelper::getcode(const Settings& settings, ErrorLogger& errorlogger, const char code[], const std::string &filename) +std::map PreprocessorHelper::getcode(const Settings& settings, ErrorLogger& errorlogger, const char* code, std::size_t size, const std::string &filename) { - return getcode(settings, errorlogger, code, {}, filename, nullptr); + return getcode(settings, errorlogger, code, size, {}, filename, nullptr); } -std::map PreprocessorHelper::getcode(const Settings& settings, ErrorLogger& errorlogger, const char code[], std::set cfgs, const std::string &filename, SuppressionList *inlineSuppression) +std::map PreprocessorHelper::getcode(const Settings& settings, ErrorLogger& errorlogger, const char* code, std::size_t size, std::set cfgs, const std::string &filename, SuppressionList *inlineSuppression) { simplecpp::OutputList outputList; std::vector files; - std::istringstream istr(code); - simplecpp::TokenList tokens(istr, files, Path::simplifyPath(filename), &outputList); + simplecpp::TokenList tokens(code, size, files, Path::simplifyPath(filename), &outputList); Preprocessor preprocessor(settings, errorlogger, Path::identify(tokens.getFiles()[0], false)); if (inlineSuppression) preprocessor.inlineSuppressions(tokens, *inlineSuppression); @@ -162,11 +161,9 @@ std::map PreprocessorHelper::getcode(const Settings& s return cfgcode; } -void SimpleTokenizer2::preprocess(const char code[], std::vector &files, const std::string& file0, Tokenizer& tokenizer, ErrorLogger& errorlogger) +void SimpleTokenizer2::preprocess(const char* code, std::size_t size, std::vector &files, const std::string& file0, Tokenizer& tokenizer, ErrorLogger& errorlogger) { - // TODO: get rid of stream - std::istringstream istr(code); - const simplecpp::TokenList tokens1(istr, files, file0); + const simplecpp::TokenList tokens1(code, size, files, file0); Preprocessor preprocessor(tokenizer.getSettings(), errorlogger, Path::identify(tokens1.getFiles()[0], false)); simplecpp::TokenList tokens2 = preprocessor.preprocess(tokens1, "", files, true); diff --git a/test/helpers.h b/test/helpers.h index 1b803dcb679..1cecdc837a5 100644 --- a/test/helpers.h +++ b/test/helpers.h @@ -61,30 +61,41 @@ class SimpleTokenizer : public Tokenizer { template bool tokenize(const char (&code)[size]) { - std::istringstream istr(code); - return tokenize(istr, std::string(list.isCPP() ? "test.cpp" : "test.c")); + return tokenize(code, size-1); } bool tokenize(const std::string& code) { - std::istringstream istr(code); - return tokenize(istr, std::string(list.isCPP() ? "test.cpp" : "test.c")); + return tokenize(code.data(), code.size()); + } + + bool tokenize(const char* code, std::size_t size) + { + return tokenize(code, size, std::string(list.isCPP() ? "test.cpp" : "test.c")); } private: /** * Tokenize code - * @param istr The code as stream + * @param code The code * @param filename Indicates if the code is C++ * @return false if source code contains syntax errors */ - bool tokenize(std::istream& istr, + template + bool tokenize(const char (&code)[size], + const std::string& filename) + { + return tokenize(code, size-1, filename); + } + + bool tokenize(const char* code, + std::size_t size, const std::string& filename) { if (list.front()) throw std::runtime_error("token list is not empty"); list.appendFileIfNew(filename); - if (!list.createTokens(istr)) + if (!list.createTokensFromBuffer(code, size)) return false; return simplifyTokens1(""); @@ -101,8 +112,7 @@ class SimpleTokenList explicit SimpleTokenList(const char (&code)[size], Standards::Language lang = Standards::Language::CPP) : list{settings, lang} { - std::istringstream iss(code); - if (!list.createTokens(iss)) + if (!list.createTokensFromString(code)) throw std::runtime_error("creating tokens failed"); } @@ -110,9 +120,8 @@ class SimpleTokenList explicit SimpleTokenList(const char (&code)[size], const std::string& file0, Standards::Language lang = Standards::Language::CPP) : list{settings, lang} { - std::istringstream iss(code); list.appendFileIfNew(file0); - if (!list.createTokens(iss)) + if (!list.createTokensFromString(code)) throw std::runtime_error("creating tokens failed"); } @@ -173,11 +182,15 @@ class PreprocessorHelper * @param inlineSuppression the inline suppressions */ static std::string getcodeforcfg(const Settings& settings, ErrorLogger& errorlogger, const std::string &filedata, const std::string &cfg, const std::string &filename, SuppressionList *inlineSuppression = nullptr); - - static std::map getcode(const Settings& settings, ErrorLogger& errorlogger, const char code[], const std::string &filename = "file.c"); + template + static std::map getcode(const Settings& settings, ErrorLogger& errorlogger, const char (&code)[size], const std::string &filename = "file.c") + { + return getcode(settings, errorlogger, code, size-1, filename); + } private: - static std::map getcode(const Settings& settings, ErrorLogger& errorlogger, const char code[], std::set cfgs, const std::string &filename, SuppressionList *inlineSuppression); + static std::map getcode(const Settings& settings, ErrorLogger& errorlogger, const char* code, std::size_t size, const std::string &filename); + static std::map getcode(const Settings& settings, ErrorLogger& errorlogger, const char* code, std::size_t size, std::set cfgs, const std::string &filename, SuppressionList *inlineSuppression); }; namespace cppcheck { @@ -251,30 +264,24 @@ class SimpleTokenizer2 : public Tokenizer { SimpleTokenizer2(const Settings &settings, ErrorLogger &errorlogger, const char (&code)[size], const std::string& file0) : Tokenizer{TokenList{settings, Path::identify(file0, false)}, errorlogger} { - preprocess(code, mFiles, file0, *this, errorlogger); - } - - // TODO: get rid of this - SimpleTokenizer2(const Settings &settings, ErrorLogger &errorlogger, const char code[], const std::string& file0) - : Tokenizer{TokenList{settings, Path::identify(file0, false)}, errorlogger} - { - preprocess(code, mFiles, file0, *this, errorlogger); + preprocess(code, size-1, mFiles, file0, *this, errorlogger); } private: - static void preprocess(const char code[], std::vector &files, const std::string& file0, Tokenizer& tokenizer, ErrorLogger& errorlogger); + static void preprocess(const char* code, std::size_t size, std::vector &files, const std::string& file0, Tokenizer& tokenizer, ErrorLogger& errorlogger); std::vector mFiles; }; struct TokenListHelper { - static bool createTokens(TokenList& tokenlist, std::istream& istr, const std::string& file) + template + static bool createTokensFromString(TokenList& tokenlist, const char (&code)[size], const std::string& file) { if (tokenlist.front()) throw std::runtime_error("token list is not empty"); tokenlist.appendFileIfNew(file); - return tokenlist.createTokens(istr); + return tokenlist.createTokensFromString(code); } }; diff --git a/test/testbufferoverrun.cpp b/test/testbufferoverrun.cpp index 16f0f4c3f72..4753de373bf 100644 --- a/test/testbufferoverrun.cpp +++ b/test/testbufferoverrun.cpp @@ -70,7 +70,8 @@ class TestBufferOverrun : public TestFixture { } #define checkP(...) checkP_(__FILE__, __LINE__, __VA_ARGS__) - void checkP_(const char* file, int line, const char code[]) + template + void checkP_(const char* file, int line, const char (&code)[size]) { const Settings settings = settingsBuilder(settings0).severity(Severity::performance).certainty(Certainty::inconclusive).build(); diff --git a/test/testclass.cpp b/test/testclass.cpp index 05f863189c5..f64ae216741 100644 --- a/test/testclass.cpp +++ b/test/testclass.cpp @@ -8778,7 +8778,8 @@ class TestClass : public TestFixture { } #define checkUselessOverride(...) checkUselessOverride_(__FILE__, __LINE__, __VA_ARGS__) - void checkUselessOverride_(const char* file, int line, const char code[]) { + template + void checkUselessOverride_(const char* file, int line, const char (&code)[size]) { const Settings settings = settingsBuilder().severity(Severity::style).build(); SimpleTokenizer2 tokenizer(settings, *this, code, "test.cpp"); diff --git a/test/testcondition.cpp b/test/testcondition.cpp index caafd5a0bc2..219894ea9b1 100644 --- a/test/testcondition.cpp +++ b/test/testcondition.cpp @@ -136,7 +136,8 @@ class TestCondition : public TestFixture { }; #define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) - void check_(const char* file, int line, const char code[], const CheckOptions& options = make_default_obj()) { + template + void check_(const char* file, int line, const char (&code)[size], const CheckOptions& options = make_default_obj()) { const Settings settings = settingsBuilder(options.s ? *options.s : settings0).certainty(Certainty::inconclusive, options.inconclusive).build(); SimpleTokenizer2 tokenizer(settings, *this, code, options.cpp ? "test.cpp" : "test.c"); @@ -149,7 +150,8 @@ class TestCondition : public TestFixture { } #define checkP(...) checkP_(__FILE__, __LINE__, __VA_ARGS__) - void checkP_(const char* file, int line, const char code[]) + template + void checkP_(const char* file, int line, const char (&code)[size]) { const Settings settings = settingsBuilder(settings0).severity(Severity::performance).certainty(Certainty::inconclusive).build(); diff --git a/test/testincompletestatement.cpp b/test/testincompletestatement.cpp index f1694ea262f..5f864c2d980 100644 --- a/test/testincompletestatement.cpp +++ b/test/testincompletestatement.cpp @@ -39,7 +39,8 @@ class TestIncompleteStatement : public TestFixture { }; #define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) - void check_(const char* file, int line, const char code[], const CheckOptions& options = make_default_obj()) { + template + void check_(const char* file, int line, const char (&code)[size], const CheckOptions& options = make_default_obj()) { const Settings settings1 = settingsBuilder(settings).certainty(Certainty::inconclusive, options.inconclusive).build(); SimpleTokenizer2 tokenizer(settings1, *this, code, options.cpp ? "test.cpp" : "test.c"); diff --git a/test/testleakautovar.cpp b/test/testleakautovar.cpp index c50dc44d9d9..097543fbe39 100644 --- a/test/testleakautovar.cpp +++ b/test/testleakautovar.cpp @@ -3229,7 +3229,8 @@ class TestLeakAutoVarRecursiveCountLimit : public TestFixture { const Settings settings = settingsBuilder().library("std.cfg").checkLibrary().build(); #define checkP(...) checkP_(__FILE__, __LINE__, __VA_ARGS__) - void checkP_(const char* file, int line, const char code[], bool cpp = false) { + template + void checkP_(const char* file, int line, const char (&code)[size], bool cpp = false) { SimpleTokenizer2 tokenizer(settings, *this, code, cpp?"test.cpp":"test.c"); // Tokenizer.. diff --git a/test/testlibrary.cpp b/test/testlibrary.cpp index e59767a1aa2..44b1b6660a7 100644 --- a/test/testlibrary.cpp +++ b/test/testlibrary.cpp @@ -154,8 +154,8 @@ class TestLibrary : public TestFixture { ""; TokenList tokenList(settingsDefault, Standards::Language::CPP); - std::istringstream istr("foo();"); // <- too few arguments, not library function - ASSERT(tokenList.createTokens(istr)); + const char code[] = "foo();"; // <- too few arguments, not library function + ASSERT(tokenList.createTokensFromString(code)); Token::createMutualLinks(tokenList.front()->next(), tokenList.back()->previous()); tokenList.createAst(); @@ -178,8 +178,8 @@ class TestLibrary : public TestFixture { { TokenList tokenList(settingsDefault, Standards::Language::CPP); - std::istringstream istr("foo();"); // <- too few arguments, not library function - ASSERT(tokenList.createTokens(istr)); + const char code[] = "foo();"; // <- too few arguments, not library function + ASSERT(tokenList.createTokensFromString(code)); Token::createMutualLinks(tokenList.front()->next(), tokenList.back()->previous()); tokenList.createAst(); @@ -187,8 +187,8 @@ class TestLibrary : public TestFixture { } { TokenList tokenList(settingsDefault, Standards::Language::CPP); - std::istringstream istr("foo(a);"); // <- library function - ASSERT(tokenList.createTokens(istr)); + const char code[] = "foo(a);"; // <- library function + ASSERT(tokenList.createTokensFromString(code)); Token::createMutualLinks(tokenList.front()->next(), tokenList.back()->previous()); tokenList.createAst(); @@ -198,8 +198,8 @@ class TestLibrary : public TestFixture { } { TokenList tokenList(settingsDefault, Standards::Language::CPP); - std::istringstream istr("foo(a, b);"); // <- library function - ASSERT(tokenList.createTokens(istr)); + const char code[] = "foo(a, b);"; // <- library function + ASSERT(tokenList.createTokensFromString(code)); Token::createMutualLinks(tokenList.front()->next(), tokenList.back()->previous()); tokenList.createAst(); @@ -209,8 +209,8 @@ class TestLibrary : public TestFixture { } { TokenList tokenList(settingsDefault, Standards::Language::CPP); - std::istringstream istr("foo(a, b, c);"); // <- too much arguments, not library function - ASSERT(tokenList.createTokens(istr)); + const char code[] = "foo(a, b, c);"; // <- too much arguments, not library function + ASSERT(tokenList.createTokensFromString(code)); Token::createMutualLinks(tokenList.front()->next(), tokenList.back()->previous()); tokenList.createAst(); diff --git a/test/testother.cpp b/test/testother.cpp index a0583ec13df..64e8ddf5960 100644 --- a/test/testother.cpp +++ b/test/testother.cpp @@ -11711,7 +11711,9 @@ class TestOther : public TestFixture { } #define checkCustomSettings(...) checkCustomSettings_(__FILE__, __LINE__, __VA_ARGS__) - void checkCustomSettings_(const char* file, int line, const char code[], bool cpp = true, bool inconclusive = true, bool runSimpleChecks=true, bool verbose=false, Settings* settings = nullptr) { + // TODO: use options + template + void checkCustomSettings_(const char* file, int line, const char (&code)[size], bool cpp = true, bool inconclusive = true, bool runSimpleChecks=true, bool verbose=false, Settings* settings = nullptr) { if (!settings) { settings = &_settings; } @@ -11728,7 +11730,8 @@ class TestOther : public TestFixture { (void)runSimpleChecks; // TODO Remove this } - void checkCustomSettings_(const char* file, int line, const char code[], Settings *s) { + template + void checkCustomSettings_(const char* file, int line, const char (&code)[size], Settings *s) { checkCustomSettings_(file, line, code, true, true, true, false, s); } diff --git a/test/testpreprocessor.cpp b/test/testpreprocessor.cpp index fd290d75c88..171f9aa7105 100644 --- a/test/testpreprocessor.cpp +++ b/test/testpreprocessor.cpp @@ -309,7 +309,7 @@ class TestPreprocessor : public TestFixture { settings.userUndefs.insert(arg+2); std::vector files; // TODO: this adds an empty filename - simplecpp::TokenList tokens(code, size-1,files); + simplecpp::TokenList tokens(code,size-1,files); tokens.removeComments(); Preprocessor preprocessor(settings, *this, Standards::Language::C); // TODO: do we need to consider #file? const std::set configs = preprocessor.getConfigs(tokens); @@ -813,25 +813,25 @@ class TestPreprocessor : public TestFixture { } void ticket_3675() { - const char* code = "#ifdef YYSTACKSIZE\n" - "#define YYMAXDEPTH YYSTACKSIZE\n" - "#else\n" - "#define YYSTACKSIZE YYMAXDEPTH\n" - "#endif\n" - "#if YYDEBUG\n" - "#endif\n"; + const char code[] = "#ifdef YYSTACKSIZE\n" + "#define YYMAXDEPTH YYSTACKSIZE\n" + "#else\n" + "#define YYSTACKSIZE YYMAXDEPTH\n" + "#endif\n" + "#if YYDEBUG\n" + "#endif\n"; (void)PreprocessorHelper::getcode(settings0, *this, code); // There's nothing to assert. It just needs to not hang. } void ticket_3699() { - const char* code = "#define INLINE __forceinline\n" - "#define inline __forceinline\n" - "#define __forceinline inline\n" - "#if !defined(_WIN32)\n" - "#endif\n" - "INLINE inline __forceinline\n"; + const char code[] = "#define INLINE __forceinline\n" + "#define inline __forceinline\n" + "#define __forceinline inline\n" + "#if !defined(_WIN32)\n" + "#endif\n" + "INLINE inline __forceinline\n"; const std::map actual = PreprocessorHelper::getcode(settings0, *this, code); // First, it must not hang. Second, inline must becomes inline, and __forceinline must become __forceinline. @@ -839,9 +839,9 @@ class TestPreprocessor : public TestFixture { } void ticket_4922() { // #4922 - const char* code = "__asm__ \n" - "{ int extern __value) 0; (double return (\"\" } extern\n" - "__typeof __finite (__finite) __finite __inline \"__GI___finite\");"; + const char code[] = "__asm__ \n" + "{ int extern __value) 0; (double return (\"\" } extern\n" + "__typeof __finite (__finite) __finite __inline \"__GI___finite\");"; (void)PreprocessorHelper::getcode(settings0, *this, code); } @@ -2284,12 +2284,12 @@ class TestPreprocessor : public TestFixture { } void if_sizeof() { // #4071 - static const char* code = "#if sizeof(unsigned short) == 2\n" - "Fred & Wilma\n" - "#elif sizeof(unsigned short) == 4\n" - "Fred & Wilma\n" - "#else\n" - "#endif"; + const char code[] = "#if sizeof(unsigned short) == 2\n" + "Fred & Wilma\n" + "#elif sizeof(unsigned short) == 4\n" + "Fred & Wilma\n" + "#else\n" + "#endif"; const std::map actual = PreprocessorHelper::getcode(settings0, *this, code); ASSERT_EQUALS("\nFred & Wilma", actual.at("")); diff --git a/test/testsimplifytemplate.cpp b/test/testsimplifytemplate.cpp index 7c6736f291a..df7d1f99be5 100644 --- a/test/testsimplifytemplate.cpp +++ b/test/testsimplifytemplate.cpp @@ -5455,11 +5455,11 @@ class TestSimplifyTemplate : public TestFixture { "C> y;")); } - unsigned int templateParameters(const char code[]) { + template + unsigned int templateParameters(const char (&data)[size]) { TokenList tokenlist{settings, Standards::Language::CPP}; - std::istringstream istr(code); tokenlist.appendFileIfNew("test.cpp"); - if (!tokenlist.createTokens(istr)) + if (!tokenlist.createTokensFromString(data)) return false; Tokenizer tokenizer(std::move(tokenlist), *this); tokenizer.createLinks(); @@ -5524,12 +5524,12 @@ class TestSimplifyTemplate : public TestFixture { } // Helper function to unit test TemplateSimplifier::getTemplateNamePosition - int templateNamePositionHelper(const char code[], unsigned offset = 0) { + template + int templateNamePositionHelper(const char (&data)[size], unsigned offset = 0) { TokenList tokenlist{settings, Standards::Language::CPP}; - std::istringstream istr(code); tokenlist.appendFileIfNew("test.cpp"); - if (!tokenlist.createTokens(istr)) + if (!tokenlist.createTokensFromString(data)) return false; Tokenizer tokenizer(std::move(tokenlist), *this); tokenizer.createLinks(); @@ -5597,10 +5597,10 @@ class TestSimplifyTemplate : public TestFixture { } // Helper function to unit test TemplateSimplifier::findTemplateDeclarationEnd - bool findTemplateDeclarationEndHelper(const char code[], const char pattern[], unsigned offset = 0) { + template + bool findTemplateDeclarationEndHelper(const char (&data)[size], const char pattern[], unsigned offset = 0) { TokenList tokenlist{settings, Standards::Language::CPP}; - std::istringstream istr(code); - if (!TokenListHelper::createTokens(tokenlist, istr, "test.cpp")) + if (!TokenListHelper::createTokensFromString(tokenlist, data, "test.cpp")) return false; Tokenizer tokenizer(std::move(tokenlist), *this); tokenizer.createLinks(); @@ -5627,11 +5627,11 @@ class TestSimplifyTemplate : public TestFixture { } // Helper function to unit test TemplateSimplifier::getTemplateParametersInDeclaration - bool getTemplateParametersInDeclarationHelper(const char code[], const std::vector & params) { + template + bool getTemplateParametersInDeclarationHelper(const char (&data)[size], const std::vector & params) { TokenList tokenlist{settings, Standards::Language::CPP}; - std::istringstream istr(code); - if (!TokenListHelper::createTokens(tokenlist, istr, "test.cpp")) + if (!TokenListHelper::createTokensFromString(tokenlist, data, "test.cpp")) return false; Tokenizer tokenizer(std::move(tokenlist), *this); tokenizer.createLinks(); diff --git a/test/testsimplifytypedef.cpp b/test/testsimplifytypedef.cpp index 54286ae3bb2..d72f45521ae 100644 --- a/test/testsimplifytypedef.cpp +++ b/test/testsimplifytypedef.cpp @@ -26,7 +26,6 @@ #include "tokenlist.h" #include -#include #include #include @@ -277,10 +276,10 @@ class TestSimplifyTypedef : public TestFixture { return tokenizer.tokens()->stringifyList(nullptr, !options.simplify); } - std::string simplifyTypedef(const char code[]) { + template + std::string simplifyTypedef(const char (&data)[size]) { TokenList tokenlist{settings1, Standards::Language::CPP}; - std::istringstream istr(code); - if (!tokenlist.createTokens(istr)) + if (!tokenlist.createTokensFromString(data)) return ""; Tokenizer tokenizer(std::move(tokenlist), *this); tokenizer.createLinks(); @@ -289,8 +288,8 @@ class TestSimplifyTypedef : public TestFixture { return tokenizer.tokens()->stringifyList(nullptr, false); } - - std::string simplifyTypedefP(const char code[]) { + template + std::string simplifyTypedefP(const char (&code)[size]) { SimpleTokenizer2 tokenizer(settings0, *this, code, "test.cpp"); // Tokenize.. @@ -311,11 +310,11 @@ class TestSimplifyTypedef : public TestFixture { } - std::string simplifyTypedefC(const char code[]) { + template + std::string simplifyTypedefC(const char (&data)[size]) { TokenList tokenlist{settings1, Standards::Language::C}; - std::istringstream istr(code); - if (!TokenListHelper::createTokens(tokenlist, istr, "file.c")) + if (!TokenListHelper::createTokensFromString(tokenlist, data, "file.c")) return ""; Tokenizer tokenizer(std::move(tokenlist), *this); tokenizer.createLinks(); @@ -328,10 +327,10 @@ class TestSimplifyTypedef : public TestFixture { return tokenizer.tokens()->stringifyList(nullptr, false); } - std::string dumpTypedefInfo(const char code[]) { + template + std::string dumpTypedefInfo(const char (&code)[size]) { TokenList tokenlist{settings1, Standards::Language::C}; - std::istringstream istr(code); - if (!TokenListHelper::createTokens(tokenlist, istr, "file.c")) + if (!TokenListHelper::createTokensFromString(tokenlist, code, "file.c")) return {}; Tokenizer tokenizer(std::move(tokenlist), *this); tokenizer.createLinks(); @@ -515,17 +514,16 @@ class TestSimplifyTypedef : public TestFixture { } void carray3() { - const char* code{}; - code = "typedef int a[256];\n" // #11689 - "typedef a b[256];\n" - "b* p;\n"; + const char code[] = "typedef int a[256];\n" // #11689 + "typedef a b[256];\n" + "b* p;\n"; ASSERT_EQUALS("int ( * p ) [ 256 ] [ 256 ] ;", simplifyTypedef(code)); - code = "typedef int a[1];\n" - "typedef a b[2];\n" - "typedef b c[3];\n" - "c* p;\n"; - ASSERT_EQUALS("int ( * p ) [ 3 ] [ 2 ] [ 1 ] ;", simplifyTypedef(code)); + const char code1[] = "typedef int a[1];\n" + "typedef a b[2];\n" + "typedef b c[3];\n" + "c* p;\n"; + ASSERT_EQUALS("int ( * p ) [ 3 ] [ 2 ] [ 1 ] ;", simplifyTypedef(code1)); } void carray4() { @@ -4459,8 +4457,7 @@ class TestSimplifyTypedef : public TestFixture { "void test(rFunctionPointer_fp functionPointer);"; TokenList tokenlist{settings1, Standards::Language::C}; - std::istringstream istr(code); - ASSERT(TokenListHelper::createTokens(tokenlist, istr, "file.c")); + ASSERT(TokenListHelper::createTokensFromString(tokenlist, code, "file.c")); Tokenizer tokenizer(std::move(tokenlist), *this); tokenizer.createLinks(); tokenizer.simplifyTypedef(); @@ -4502,8 +4499,7 @@ class TestSimplifyTypedef : public TestFixture { "}"; TokenList tokenlist{settings1, Standards::Language::C}; - std::istringstream istr(code); - ASSERT(TokenListHelper::createTokens(tokenlist, istr, "file.c")); + ASSERT(TokenListHelper::createTokensFromString(tokenlist, code, "file.c")); Tokenizer tokenizer(std::move(tokenlist), *this); tokenizer.createLinks(); tokenizer.simplifyTypedef(); @@ -4521,8 +4517,7 @@ class TestSimplifyTypedef : public TestFixture { "}"; TokenList tokenlist{settings1, Standards::Language::C}; - std::istringstream istr(code); - ASSERT(TokenListHelper::createTokens(tokenlist, istr, "file.c")); + ASSERT(TokenListHelper::createTokensFromString(tokenlist, code, "file.c")); Tokenizer tokenizer(std::move(tokenlist), *this); tokenizer.createLinks(); tokenizer.simplifyTypedef(); diff --git a/test/teststring.cpp b/test/teststring.cpp index 02bf749bf04..c9a31247f58 100644 --- a/test/teststring.cpp +++ b/test/teststring.cpp @@ -67,7 +67,8 @@ class TestString : public TestFixture { }; #define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) - void check_(const char* file, int line, const char code[], const CheckOptions& options = make_default_obj()) { + template + void check_(const char* file, int line, const char (&code)[size], const CheckOptions& options = make_default_obj()) { SimpleTokenizer2 tokenizer(settings, *this, code, options.cpp ? "test.cpp" : "test.c"); // Tokenize.. diff --git a/test/testsuppressions.cpp b/test/testsuppressions.cpp index b9ad2122839..1db32939440 100644 --- a/test/testsuppressions.cpp +++ b/test/testsuppressions.cpp @@ -1252,7 +1252,7 @@ class TestSuppressions : public TestFixture { CppCheck cppCheck(settings, supprs, *this, false, nullptr); // <- do not "use global suppressions". pretend this is a thread that just checks a file. const char code[] = "int f() { int a; return a; }"; - ASSERT_EQUALS(0, cppCheck.check(FileWithDetails("test.c", Standards::Language::C, 0), code)); // <- no unsuppressed error is seen + ASSERT_EQUALS(0, cppCheck.checkBuffer(FileWithDetails("test.c", Standards::Language::C, 0), reinterpret_cast(code), sizeof(code))); // <- no unsuppressed error is seen ASSERT_EQUALS("[test.c:1:25]: (error) Uninitialized variable: a [uninitvar]\n", errout_str()); // <- report error so ThreadExecutor can suppress it and make sure the global suppression is matched. } @@ -1296,7 +1296,7 @@ class TestSuppressions : public TestFixture { " int y;\n" "};"; CppCheck cppCheck(settings, supprs, *this, true, nullptr); - ASSERT_EQUALS(0, cppCheck.check(FileWithDetails("/somewhere/test.cpp", Standards::Language::CPP, 0), code)); + ASSERT_EQUALS(0, cppCheck.checkBuffer(FileWithDetails("/somewhere/test.cpp", Standards::Language::CPP, 0), reinterpret_cast(code), sizeof(code))); ASSERT_EQUALS("",errout_str()); } diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 84d94908454..6e468ad524d 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -497,6 +497,7 @@ class TestTokenizer : public TestFixture { } #define tokenizeAndStringify(...) tokenizeAndStringify_(__FILE__, __LINE__, __VA_ARGS__) + // TODO: use options template std::string tokenizeAndStringify_(const char* file, int linenr, const char (&code)[size], bool expand = true, Platform::Type platform = Platform::Type::Native, bool cpp = true, Standards::cppstd_t cppstd = Standards::CPP11, Standards::cstd_t cstd = Standards::C11) { @@ -560,15 +561,16 @@ class TestTokenizer : public TestFixture { return tokenizer.tokens()->stringifyList(true,true,true,true,false); } - void directiveDump(const char filedata[], std::ostream& ostr) { - directiveDump(filedata, "test.c", settingsDefault, ostr); + template + void directiveDump(const char (&code)[size], std::ostream& ostr) { + directiveDump(code, "test.c", settingsDefault, ostr); } - void directiveDump(const char filedata[], const char filename[], const Settings& settings, std::ostream& ostr) { - std::istringstream istr(filedata); + template + void directiveDump(const char (&code)[size], const char filename[], const Settings& settings, std::ostream& ostr) { simplecpp::OutputList outputList; std::vector files; - const simplecpp::TokenList tokens1(istr, files, filename, &outputList); + const simplecpp::TokenList tokens1(code, size-1, files, filename, &outputList); Preprocessor preprocessor(settings, *this, Path::identify(tokens1.getFiles()[0], false)); std::list directives = preprocessor.createDirectives(tokens1); @@ -910,9 +912,8 @@ class TestTokenizer : public TestFixture { { TokenList tokenlist{settings1, Standards::Language::C}; // headers are treated as C files const char code[] = "void foo(int i) { reinterpret_cast(i) };"; - std::istringstream istr(code); tokenlist.appendFileIfNew("test.h"); - ASSERT(tokenlist.createTokens(istr)); + ASSERT(tokenlist.createTokensFromString(code)); Tokenizer tokenizer(std::move(tokenlist), *this); ASSERT_THROW_INTERNAL(tokenizer.simplifyTokens1(""), SYNTAX); } @@ -6204,12 +6205,12 @@ class TestTokenizer : public TestFixture { Z3 }; - std::string testAst(const char code[], AstStyle style = AstStyle::Simple) { + template + std::string testAst(const char (&data)[size], AstStyle style = AstStyle::Simple) { // tokenize given code.. TokenList tokenlist{settings0, Standards::Language::CPP}; - std::istringstream istr(code); tokenlist.appendFileIfNew("test.cpp"); - if (!tokenlist.createTokens(istr)) + if (!tokenlist.createTokensFromString(data)) return "ERROR"; Tokenizer tokenizer(std::move(tokenlist), *this); @@ -8048,7 +8049,8 @@ class TestTokenizer : public TestFixture { } #define checkHdrs(...) checkHdrs_(__FILE__, __LINE__, __VA_ARGS__) - std::string checkHdrs_(const char* file, int line, const char code[], bool checkHeadersFlag) { + template + std::string checkHdrs_(const char* file, int line, const char (&code)[size], bool checkHeadersFlag) { const Settings settings = settingsBuilder().checkHeaders(checkHeadersFlag).build(); SimpleTokenizer2 tokenizer(settings, *this, code, "test.cpp"); @@ -8201,9 +8203,9 @@ class TestTokenizer : public TestFixture { void cpp11init() { #define testIsCpp11init(...) testIsCpp11init_(__FILE__, __LINE__, __VA_ARGS__) - auto testIsCpp11init_ = [this](const char* file, int line, const char* code, const char* find, TokenImpl::Cpp11init expected) { + auto testIsCpp11init_ = [this](const char* file, int line, const std::string& code, const char* find, TokenImpl::Cpp11init expected) { SimpleTokenizer tokenizer(settingsDefault, *this); - ASSERT_LOC(tokenizer.tokenize(code), file, line); + ASSERT_LOC(tokenizer.tokenize(code.data(), code.size()), file, line); const Token* tok = Token::findsimplematch(tokenizer.tokens(), find, strlen(find)); ASSERT_LOC(tok, file, line); @@ -8627,7 +8629,7 @@ class TestTokenizer : public TestFixture { } void dumpFallthrough() { - const char * code = "void f(int n) {\n" + const char code[] = "void f(int n) {\n" " void g(), h(), i();\n" " switch (n) {\n" " case 1:\n" @@ -8651,9 +8653,9 @@ class TestTokenizer : public TestFixture { } void simplifyRedundantParentheses() { - const char *code = "int f(struct S s) {\n" - " return g(1, &(int){ s.i });\n" - "}\n"; + const char code[] = "int f(struct S s) {\n" + " return g(1, &(int){ s.i });\n" + "}\n"; SimpleTokenizer tokenizer(settingsDefault, *this, false); ASSERT_NO_THROW(tokenizer.tokenize(code)); } @@ -8697,10 +8699,9 @@ class TestTokenizerCompileLimits : public TestFixture "int PTR4 q4_var RBR4 = 0;\n"; // Preprocess file.. - std::istringstream fin(raw_code); simplecpp::OutputList outputList; std::vector files; - const simplecpp::TokenList tokens1(fin, files, "", &outputList); + const simplecpp::TokenList tokens1(raw_code, sizeof(raw_code), files, "", &outputList); const std::string filedata = tokens1.stringify(); const std::string code = PreprocessorHelper::getcodeforcfg(settingsDefault, *this, filedata, "", "test.c"); diff --git a/test/testtokenlist.cpp b/test/testtokenlist.cpp index d96ebcb300a..627f9533c30 100644 --- a/test/testtokenlist.cpp +++ b/test/testtokenlist.cpp @@ -26,7 +26,6 @@ #include "token.h" #include "tokenlist.h" -#include #include #include #include @@ -125,9 +124,8 @@ class TestTokenList : public TestFixture { const char code2[] = "_Generic"; // C11 keyword const Settings s = settingsBuilder().c(Standards::C89).build(); TokenList tokenlist(s, Standards::Language::C); - std::istringstream istr(code2); tokenlist.appendFileIfNew("a.c"); - ASSERT(tokenlist.createTokens(istr)); + ASSERT(tokenlist.createTokensFromString(code2)); ASSERT_EQUALS(false, tokenlist.front()->isKeyword()); } @@ -147,9 +145,8 @@ class TestTokenList : public TestFixture { const char code2[] = "noexcept"; // C++11 keyword const Settings s = settingsBuilder().cpp(Standards::CPP03).build(); TokenList tokenlist(s, Standards::Language::CPP); - std::istringstream istr(code2); tokenlist.appendFileIfNew("a.cpp"); - ASSERT(tokenlist.createTokens(istr)); + ASSERT(tokenlist.createTokensFromString(code2)); ASSERT_EQUALS(false, tokenlist.front()->isKeyword()); } } @@ -158,9 +155,8 @@ class TestTokenList : public TestFixture { // analyzing /usr/include/poll.h caused Path::identify() to be called with an empty filename from // TokenList::determineCppC() because there are no tokens const char code[] = "#include "; - std::istringstream istr(code); std::vector files; - simplecpp::TokenList tokens1(istr, files, "poll.h", nullptr); + simplecpp::TokenList tokens1(code, sizeof(code), files, "poll.h", nullptr); Preprocessor preprocessor(settingsDefault, *this, Path::identify(tokens1.getFiles()[0], false)); simplecpp::TokenList tokensP = preprocessor.preprocess(tokens1, "", files, true); TokenList tokenlist(settingsDefault, Standards::Language::C); // headers are treated as C files @@ -168,11 +164,10 @@ class TestTokenList : public TestFixture { } void ast1() const { - const std::string s = "('Release|x64' == 'Release|x64');"; + const char code[] = "('Release|x64' == 'Release|x64');"; TokenList tokenlist(settingsDefault, Standards::Language::C); - std::istringstream istr(s); - ASSERT(tokenlist.createTokens(istr)); + ASSERT(tokenlist.createTokensFromString(code)); // TODO: put this logic in TokenList // generate links { diff --git a/test/testuninitvar.cpp b/test/testuninitvar.cpp index dfdc048b4f9..6321a159004 100644 --- a/test/testuninitvar.cpp +++ b/test/testuninitvar.cpp @@ -5468,7 +5468,8 @@ class TestUninitVar : public TestFixture { TODO_ASSERT_EQUALS("", "[test.c:4:14]: (error) Uninitialized variable: d [legacyUninitvar]\n", errout_str()); } - void valueFlowUninit_(const char* file, int line, const char code[], bool cpp = true) + template + void valueFlowUninit_(const char* file, int line, const char (&code)[size], bool cpp = true) { // Tokenize.. const Settings s = settingsBuilder(settings).debugwarnings(false).build(); @@ -7913,7 +7914,8 @@ class TestUninitVar : public TestFixture { ASSERT_EQUALS("", errout_str()); } - void ctu_(const char* file, int line, const char code[]) { + template + void ctu_(const char* file, int line, const char (&code)[size]) { // Tokenize.. SimpleTokenizer tokenizer(settings, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); diff --git a/test/testunusedprivfunc.cpp b/test/testunusedprivfunc.cpp index 32df6a4d494..d3ae98a975f 100644 --- a/test/testunusedprivfunc.cpp +++ b/test/testunusedprivfunc.cpp @@ -93,7 +93,8 @@ class TestUnusedPrivateFunction : public TestFixture { }; #define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) - void check_(const char* file, int line, const char code[], const CheckOptions& options = make_default_obj()) { + template + void check_(const char* file, int line, const char (&code)[size], const CheckOptions& options = make_default_obj()) { const Settings settings1 = settingsBuilder(settings).platform(options.platform).build(); SimpleTokenizer2 tokenizer(settings1, *this, code, "test.cpp"); diff --git a/test/testunusedvar.cpp b/test/testunusedvar.cpp index ae35cdebe99..124a48ab5fe 100644 --- a/test/testunusedvar.cpp +++ b/test/testunusedvar.cpp @@ -289,7 +289,8 @@ class TestUnusedVar : public TestFixture { }; #define checkStructMemberUsage(...) checkStructMemberUsage_(__FILE__, __LINE__, __VA_ARGS__) - void checkStructMemberUsage_(const char* file, int line, const char code[], const CheckStructMemberUsageOptions& options = make_default_obj()) { + template + void checkStructMemberUsage_(const char* file, int line, const char (&code)[size], const CheckStructMemberUsageOptions& options = make_default_obj()) { // Tokenize.. SimpleTokenizer tokenizer(settings, *this, options.cpp); if (options.directives) @@ -302,7 +303,8 @@ class TestUnusedVar : public TestFixture { } #define checkStructMemberUsageP(...) checkStructMemberUsageP_(__FILE__, __LINE__, __VA_ARGS__) - void checkStructMemberUsageP_(const char* file, int line, const char code[]) { + template + void checkStructMemberUsageP_(const char* file, int line, const char (&code)[size]) { SimpleTokenizer2 tokenizer(settings, *this, code, "test.cpp"); // Tokenizer.. @@ -314,7 +316,8 @@ class TestUnusedVar : public TestFixture { } #define checkFunctionVariableUsageP(...) checkFunctionVariableUsageP_(__FILE__, __LINE__, __VA_ARGS__) - void checkFunctionVariableUsageP_(const char* file, int line, const char code[]) { + template + void checkFunctionVariableUsageP_(const char* file, int line, const char (&code)[size]) { SimpleTokenizer2 tokenizer(settings, *this, code, "test.cpp"); // Tokenizer..