Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
100 changes: 50 additions & 50 deletions Makefile

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion lib/clangimport.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#include "clangimport.h"

#include "errortypes.h"
#include "filesettings.h"
#include "mathlib.h"
#include "settings.h"
#include "standards.h"
Expand Down Expand Up @@ -552,7 +553,7 @@ void clangimport::AstNode::setLocations(TokenList &tokenList, int file, int line
const bool windowsPath = colon == 2 && ext.size() > 3 && ext[2] == ':';
const std::string::size_type sep1 = windowsPath ? ext.find(':', 4) : colon;
const std::string::size_type sep2 = ext.find(':', sep1 + 1);
file = tokenList.appendFileIfNew(ext.substr(1, sep1 - 1));
file = tokenList.appendFileIfNew(FileWithDetails(ext.substr(1, sep1 - 1)));
line = strToInt<int>(ext.substr(sep1 + 1, sep2 - sep1 - 1));
}
else {
Expand Down
2 changes: 1 addition & 1 deletion lib/cppcheck.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -717,7 +717,7 @@ unsigned int CppCheck::checkClang(const FileWithDetails &file)

try {
Tokenizer tokenizer(mSettings, mErrorLogger);
tokenizer.list.appendFileIfNew(file.spath());
tokenizer.list.appendFileIfNew(file);
std::istringstream ast(output2);
clangimport::parseClangAstDump(tokenizer, ast);
ValueFlow::setValues(tokenizer.list,
Expand Down
13 changes: 7 additions & 6 deletions lib/tokenlist.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
#include "astutils.h"
#include "errorlogger.h"
#include "errortypes.h"
#include "filesettings.h"
#include "keywords.h"
#include "library.h"
#include "path.h"
Expand Down Expand Up @@ -112,17 +113,17 @@ void TokenList::determineCppC()
}
}

int TokenList::appendFileIfNew(std::string fileName)
int TokenList::appendFileIfNew(const FileWithDetails& file)
{
// Has this file been tokenized already?
auto it = std::find_if(mFiles.cbegin(), mFiles.cend(), [&](const std::string& f) {
return Path::sameFileName(f, fileName);
return Path::sameFileName(f, file.spath());
});
if (it != mFiles.cend())
return static_cast<int>(std::distance(mFiles.cbegin(), it));

// The "mFiles" vector remembers what files have been tokenized..
mFiles.push_back(std::move(fileName));
mFiles.push_back(file.spath());

// Update mIsC and mIsCpp properties
if (mFiles.size() == 1) { // Update only useful if first file added to _files
Expand Down Expand Up @@ -344,13 +345,13 @@ void TokenList::insertTokens(Token *dest, const Token *src, nonneg int n)
// Tokenize - tokenizes a given file.
//---------------------------------------------------------------------------

bool TokenList::createTokens(std::istream &code, const std::string& file0)
bool TokenList::createTokens(std::istream &code, const FileWithDetails& file0)
{
ASSERT_LANG(!file0.empty());
ASSERT_LANG(!file0.spath().empty());

appendFileIfNew(file0);

return createTokensInternal(code, file0);
return createTokensInternal(code, file0.spath());
}

//---------------------------------------------------------------------------
Expand Down
5 changes: 3 additions & 2 deletions lib/tokenlist.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
class Token;
class TokenList;
class Settings;
class FileWithDetails;

namespace simplecpp {
class TokenList;
Expand Down Expand Up @@ -105,7 +106,7 @@ class CPPCHECKLIB TokenList {
* @param code input stream for code
* @param file0 source file name
*/
bool createTokens(std::istream &code, const std::string& file0);
bool createTokens(std::istream &code, const FileWithDetails& file0);
bool createTokens(std::istream &code, Standards::Language lang);

void createTokens(simplecpp::TokenList&& tokenList);
Expand All @@ -114,7 +115,7 @@ class CPPCHECKLIB TokenList {
void deallocateTokens();

/** append file name if seen the first time; return its index in any case */
int appendFileIfNew(std::string fileName);
int appendFileIfNew(const FileWithDetails& file);

/** get first token of list */
const Token *front() const {
Expand Down
4 changes: 2 additions & 2 deletions oss-fuzz/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ $(libcppdir)/checkunusedvar.o: ../lib/checkunusedvar.cpp ../lib/addoninfo.h ../l
$(libcppdir)/checkvaarg.o: ../lib/checkvaarg.cpp ../lib/addoninfo.h ../lib/astutils.h ../lib/check.h ../lib/checkers.h ../lib/checkvaarg.h ../lib/config.h ../lib/errortypes.h ../lib/library.h ../lib/mathlib.h ../lib/platform.h ../lib/settings.h ../lib/smallvector.h ../lib/sourcelocation.h ../lib/standards.h ../lib/symboldatabase.h ../lib/templatesimplifier.h ../lib/token.h ../lib/tokenize.h ../lib/tokenlist.h ../lib/utils.h ../lib/vfvalue.h
$(CXX) ${LIB_FUZZING_ENGINE} $(CPPFLAGS) $(CXXFLAGS) -c -o $@ $(libcppdir)/checkvaarg.cpp

$(libcppdir)/clangimport.o: ../lib/clangimport.cpp ../lib/addoninfo.h ../lib/checkers.h ../lib/clangimport.h ../lib/config.h ../lib/errortypes.h ../lib/library.h ../lib/mathlib.h ../lib/platform.h ../lib/settings.h ../lib/sourcelocation.h ../lib/standards.h ../lib/symboldatabase.h ../lib/templatesimplifier.h ../lib/token.h ../lib/tokenize.h ../lib/tokenlist.h ../lib/utils.h ../lib/vfvalue.h
$(libcppdir)/clangimport.o: ../lib/clangimport.cpp ../lib/addoninfo.h ../lib/checkers.h ../lib/clangimport.h ../lib/config.h ../lib/errortypes.h ../lib/filesettings.h ../lib/library.h ../lib/mathlib.h ../lib/path.h ../lib/platform.h ../lib/settings.h ../lib/sourcelocation.h ../lib/standards.h ../lib/symboldatabase.h ../lib/templatesimplifier.h ../lib/token.h ../lib/tokenize.h ../lib/tokenlist.h ../lib/utils.h ../lib/vfvalue.h
$(CXX) ${LIB_FUZZING_ENGINE} $(CPPFLAGS) $(CXXFLAGS) -c -o $@ $(libcppdir)/clangimport.cpp

$(libcppdir)/color.o: ../lib/color.cpp ../lib/color.h ../lib/config.h
Expand Down Expand Up @@ -330,7 +330,7 @@ $(libcppdir)/timer.o: ../lib/timer.cpp ../lib/config.h ../lib/timer.h ../lib/uti
$(libcppdir)/token.o: ../lib/token.cpp ../externals/simplecpp/simplecpp.h ../lib/addoninfo.h ../lib/astutils.h ../lib/checkers.h ../lib/config.h ../lib/errortypes.h ../lib/library.h ../lib/mathlib.h ../lib/platform.h ../lib/settings.h ../lib/smallvector.h ../lib/sourcelocation.h ../lib/standards.h ../lib/symboldatabase.h ../lib/templatesimplifier.h ../lib/token.h ../lib/tokenlist.h ../lib/tokenrange.h ../lib/utils.h ../lib/valueflow.h ../lib/vfvalue.h
$(CXX) ${LIB_FUZZING_ENGINE} $(CPPFLAGS) $(CXXFLAGS) -c -o $@ $(libcppdir)/token.cpp

$(libcppdir)/tokenlist.o: ../lib/tokenlist.cpp ../externals/simplecpp/simplecpp.h ../lib/addoninfo.h ../lib/astutils.h ../lib/checkers.h ../lib/config.h ../lib/errorlogger.h ../lib/errortypes.h ../lib/keywords.h ../lib/library.h ../lib/mathlib.h ../lib/path.h ../lib/platform.h ../lib/settings.h ../lib/smallvector.h ../lib/standards.h ../lib/templatesimplifier.h ../lib/token.h ../lib/tokenlist.h ../lib/utils.h ../lib/vfvalue.h
$(libcppdir)/tokenlist.o: ../lib/tokenlist.cpp ../externals/simplecpp/simplecpp.h ../lib/addoninfo.h ../lib/astutils.h ../lib/checkers.h ../lib/config.h ../lib/errorlogger.h ../lib/errortypes.h ../lib/filesettings.h ../lib/keywords.h ../lib/library.h ../lib/mathlib.h ../lib/path.h ../lib/platform.h ../lib/settings.h ../lib/smallvector.h ../lib/standards.h ../lib/templatesimplifier.h ../lib/token.h ../lib/tokenlist.h ../lib/utils.h ../lib/vfvalue.h
$(CXX) ${LIB_FUZZING_ENGINE} $(CPPFLAGS) $(CXXFLAGS) -c -o $@ $(libcppdir)/tokenlist.cpp

$(libcppdir)/utils.o: ../lib/utils.cpp ../lib/config.h ../lib/utils.h
Expand Down
5 changes: 5 additions & 0 deletions test/helpers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -233,3 +233,8 @@ Library::Error LibraryHelper::loadxmldoc(Library &lib, const tinyxml2::XMLDocume
{
return lib.load(doc);
}

FileWithDetails createFileWithDetails(std::string filename)
{
return FileWithDetails(std::move(filename));
}
7 changes: 5 additions & 2 deletions test/helpers.h
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#ifndef helpersH
#define helpersH

#include "filesettings.h"
#include "library.h"
#include "preprocessor.h"
#include "settings.h"
Expand All @@ -44,6 +45,8 @@ namespace tinyxml2 {
class XMLDocument;
}

FileWithDetails createFileWithDetails(std::string filename);

// TODO: make Tokenizer private
class SimpleTokenizer : public Tokenizer {
public:
Expand Down Expand Up @@ -82,7 +85,7 @@ class SimpleTokenizer : public Tokenizer {
const std::string &configuration = "")
{
std::istringstream istr(code);
if (!list.createTokens(istr, cpp ? "test.cpp" : "test.c"))
if (!list.createTokens(istr, createFileWithDetails(cpp ? "test.cpp" : "test.c")))
return false;

return simplifyTokens1(configuration);
Expand All @@ -94,7 +97,7 @@ class SimpleTokenizer : public Tokenizer {
const std::string &configuration = "")
{
std::istringstream istr(code);
if (!list.createTokens(istr, cpp ? "test.cpp" : "test.c"))
if (!list.createTokens(istr, createFileWithDetails(cpp ? "test.cpp" : "test.c")))
return false;

return simplifyTokens1(configuration);
Expand Down
2 changes: 1 addition & 1 deletion test/testclass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9088,7 +9088,7 @@ class TestClass : public TestFixture {
Tokenizer tokenizer(settingsDefault, *this);
std::istringstream istr(c);
const std::string filename = std::to_string(fileInfo.size()) + ".cpp";
ASSERT(tokenizer.list.createTokens(istr, filename));
ASSERT(tokenizer.list.createTokens(istr, createFileWithDetails(filename)));
ASSERT(tokenizer.simplifyTokens1(""));
fileInfo.push_back(check.getFileInfo(tokenizer, settingsDefault));
}
Expand Down
8 changes: 4 additions & 4 deletions test/testsimplifytemplate.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5390,7 +5390,7 @@ class TestSimplifyTemplate : public TestFixture {
Tokenizer tokenizer(settings, *this);

std::istringstream istr(code);
if (!tokenizer.list.createTokens(istr, "test.cpp"))
if (!tokenizer.list.createTokens(istr, createFileWithDetails("test.cpp")))
return false;
tokenizer.createLinks();
tokenizer.splitTemplateRightAngleBrackets(false);
Expand Down Expand Up @@ -5458,7 +5458,7 @@ class TestSimplifyTemplate : public TestFixture {
Tokenizer tokenizer(settings, *this);

std::istringstream istr(code);
if (!tokenizer.list.createTokens(istr, "test.cpp"))
if (!tokenizer.list.createTokens(istr, createFileWithDetails("test.cpp")))
return false;
tokenizer.createLinks();
tokenizer.splitTemplateRightAngleBrackets(false);
Expand Down Expand Up @@ -5529,7 +5529,7 @@ class TestSimplifyTemplate : public TestFixture {
Tokenizer tokenizer(settings, *this);

std::istringstream istr(code);
if (!tokenizer.list.createTokens(istr, "test.cpp"))
if (!tokenizer.list.createTokens(istr, createFileWithDetails("test.cpp")))
return false;
tokenizer.createLinks();
tokenizer.splitTemplateRightAngleBrackets(false);
Expand Down Expand Up @@ -5559,7 +5559,7 @@ class TestSimplifyTemplate : public TestFixture {
Tokenizer tokenizer(settings, *this);

std::istringstream istr(code);
if (!tokenizer.list.createTokens(istr, "test.cpp"))
if (!tokenizer.list.createTokens(istr, createFileWithDetails("test.cpp")))
return false;
tokenizer.createLinks();
tokenizer.splitTemplateRightAngleBrackets(false);
Expand Down
6 changes: 3 additions & 3 deletions test/testsimplifytypedef.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ class TestSimplifyTypedef : public TestFixture {
Tokenizer tokenizer(settings1, *this);

std::istringstream istr(code);
if (!tokenizer.list.createTokens(istr, "file.c"))
if (!tokenizer.list.createTokens(istr, createFileWithDetails("file.c")))
return "";
tokenizer.createLinks();
tokenizer.simplifyTypedef();
Expand All @@ -328,7 +328,7 @@ class TestSimplifyTypedef : public TestFixture {
Tokenizer tokenizer(settings1, *this);

std::istringstream istr(code);
if (!tokenizer.list.createTokens(istr, "file.c"))
if (!tokenizer.list.createTokens(istr, createFileWithDetails("file.c")))
return {};
tokenizer.createLinks();
tokenizer.simplifyTypedef();
Expand Down Expand Up @@ -4456,7 +4456,7 @@ class TestSimplifyTypedef : public TestFixture {

Tokenizer tokenizer(settings1, *this);
std::istringstream istr(code);
ASSERT(tokenizer.list.createTokens(istr, "file.c"));
ASSERT(tokenizer.list.createTokens(istr, createFileWithDetails("file.c")));
tokenizer.createLinks();
tokenizer.simplifyTypedef();

Expand Down
2 changes: 1 addition & 1 deletion test/testsimplifyusing.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ class TestSimplifyUsing : public TestFixture {
std::vector<std::string> files(1, "test.cpp");
PreprocessorHelper::preprocess(code, files, tokenizer, *this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.list.createTokens(istr, "test.cpp"), file, line); // TODO: this creates the tokens a second time
ASSERT_LOC(tokenizer.list.createTokens(istr, createFileWithDetails("test.cpp")), file, line); // TODO: this creates the tokens a second time
ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line);
return tokenizer.tokens()->stringifyList(nullptr);
}
Expand Down
4 changes: 2 additions & 2 deletions test/testtokenize.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -863,7 +863,7 @@ class TestTokenizer : public TestFixture {
Tokenizer tokenizer(settings1, *this);
const char code[] = "void foo(int i) { reinterpret_cast<char>(i) };";
std::istringstream istr(code);
ASSERT(tokenizer.list.createTokens(istr, "test.h"));
ASSERT(tokenizer.list.createTokens(istr, createFileWithDetails("test.h")));
ASSERT_THROW_INTERNAL(tokenizer.simplifyTokens1(""), SYNTAX);
}
}
Expand Down Expand Up @@ -6122,7 +6122,7 @@ class TestTokenizer : public TestFixture {
// tokenize given code..
Tokenizer tokenizer(settings0, *this);
std::istringstream istr(code);
if (!tokenizer.list.createTokens(istr,"test.cpp"))
if (!tokenizer.list.createTokens(istr,createFileWithDetails("test.cpp")))
return "ERROR";

tokenizer.combineStringAndCharLiterals();
Expand Down
4 changes: 2 additions & 2 deletions test/testtokenlist.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ class TestTokenList : public TestFixture {
const Settings s = settingsBuilder().c(Standards::C89).build();
TokenList tokenlist(&s);
std::istringstream istr(code2);
ASSERT(tokenlist.createTokens(istr, "a.c"));
ASSERT(tokenlist.createTokens(istr, createFileWithDetails("a.c")));
ASSERT_EQUALS(false, tokenlist.front()->isKeyword());
}

Expand All @@ -150,7 +150,7 @@ class TestTokenList : public TestFixture {
const Settings s = settingsBuilder().cpp(Standards::CPP03).build();
TokenList tokenlist(&s);
std::istringstream istr(code2);
ASSERT(tokenlist.createTokens(istr, "a.cpp"));
ASSERT(tokenlist.createTokens(istr, createFileWithDetails("a.cpp")));
ASSERT_EQUALS(false, tokenlist.front()->isKeyword());
}
}
Expand Down
2 changes: 1 addition & 1 deletion test/testunusedfunctions.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -600,7 +600,7 @@ class TestUnusedFunctions : public TestFixture {

Tokenizer tokenizer(settings, *this);
std::istringstream istr(code);
ASSERT(tokenizer.list.createTokens(istr, fname));
ASSERT(tokenizer.list.createTokens(istr, createFileWithDetails(fname)));
ASSERT(tokenizer.simplifyTokens1(""));

c.parseTokens(tokenizer, settings);
Expand Down
2 changes: 1 addition & 1 deletion test/testvarid.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ class TestVarID : public TestFixture {
std::string tokenizeHeader_(const char* file, int line, const char (&code)[size], const char filename[]) {
Tokenizer tokenizer(settings, *this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.list.createTokens(istr, filename), file, line);
ASSERT_LOC(tokenizer.list.createTokens(istr, createFileWithDetails(filename)), file, line);
ASSERT_EQUALS(true, tokenizer.simplifyTokens1(""));

// result..
Expand Down