diff options
author | Jeff Brown <jeffbrown@google.com> | 2012-04-17 18:19:50 -0700 |
---|---|---|
committer | Jeff Brown <jeffbrown@google.com> | 2012-04-17 18:19:50 -0700 |
commit | a8be8fa0966521afe78324351e805b4a8351dbd9 (patch) | |
tree | b2d93d0e2dc83517c4ff744d54e8651d7e0e0a7c /libs/utils | |
parent | 172a62a224967beee9e35e02a5b2fb2705dd2cc0 (diff) | |
download | frameworks_native-a8be8fa0966521afe78324351e805b4a8351dbd9.zip frameworks_native-a8be8fa0966521afe78324351e805b4a8351dbd9.tar.gz frameworks_native-a8be8fa0966521afe78324351e805b4a8351dbd9.tar.bz2 |
Support tokenizing arbitrary content.
Bug: 6110399
Change-Id: I37be63b68934fd451e6dffbf7d6079553619c0a3
Diffstat (limited to 'libs/utils')
-rw-r--r-- | libs/utils/Tokenizer.cpp | 20 |
1 files changed, 16 insertions, 4 deletions
diff --git a/libs/utils/Tokenizer.cpp b/libs/utils/Tokenizer.cpp index efda2bf..7067533 100644 --- a/libs/utils/Tokenizer.cpp +++ b/libs/utils/Tokenizer.cpp @@ -35,15 +35,18 @@ static inline bool isDelimiter(char ch, const char* delimiters) { return strchr(delimiters, ch) != NULL; } -Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, size_t length) : +Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, + bool ownBuffer, size_t length) : mFilename(filename), mFileMap(fileMap), - mBuffer(buffer), mLength(length), mCurrent(buffer), mLineNumber(1) { + mBuffer(buffer), mOwnBuffer(ownBuffer), mLength(length), + mCurrent(buffer), mLineNumber(1) { } Tokenizer::~Tokenizer() { if (mFileMap) { mFileMap->release(); - } else { + } + if (mOwnBuffer) { delete[] mBuffer; } } @@ -65,6 +68,7 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) { size_t length = size_t(stat.st_size); FileMap* fileMap = new FileMap(); + bool ownBuffer = false; char* buffer; if (fileMap->create(NULL, fd, 0, length, true)) { fileMap->advise(FileMap::SEQUENTIAL); @@ -77,6 +81,7 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) { // The length we obtained from stat is wrong too (it will always be 4096) // so we must trust that read will read the entire file. buffer = new char[length]; + ownBuffer = true; ssize_t nrd = read(fd, buffer, length); if (nrd < 0) { result = -errno; @@ -89,7 +94,7 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) { } if (!result) { - *outTokenizer = new Tokenizer(filename, fileMap, buffer, length); + *outTokenizer = new Tokenizer(filename, fileMap, buffer, ownBuffer, length); } } close(fd); @@ -97,6 +102,13 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) { return result; } +status_t Tokenizer::fromContents(const String8& filename, + const char* contents, Tokenizer** outTokenizer) { + *outTokenizer = new Tokenizer(filename, NULL, + const_cast<char*>(contents), false, strlen(contents)); + return OK; +} + String8 Tokenizer::getLocation() const { String8 result; result.appendFormat("%s:%d", mFilename.string(), mLineNumber); |