pdfviewer: more load references dinamically plumming
Review URL: https://codereview.chromium.org/18536014 git-svn-id: http://skia.googlecode.com/svn/trunk@9988 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
parent
432640ae0b
commit
951d6532de
@ -374,8 +374,8 @@ class PdfClassManager:
|
||||
if len(prop.fTypes.split()) == 1:
|
||||
t = prop.fTypes.strip()
|
||||
|
||||
nativeFileClass.write(' ' + knowTypes[t][0] + ' ' + prop.fCppName + '(const SkNativeParsedPDF* doc);\n')
|
||||
nativeFileClassCpp.write('' + knowTypes[t][0] + ' SkPdf' + cls.fName + '::' + prop.fCppName + '(const SkNativeParsedPDF* doc) {\n')
|
||||
nativeFileClass.write(' ' + knowTypes[t][0] + ' ' + prop.fCppName + '(SkNativeParsedPDF* doc);\n')
|
||||
nativeFileClassCpp.write('' + knowTypes[t][0] + ' SkPdf' + cls.fName + '::' + prop.fCppName + '(SkNativeParsedPDF* doc) {\n')
|
||||
nativeFileClassCpp.write(' SkPdfObject* ret = get(\"' + prop.fName + '\", \"' + prop.fAbr + '\");\n')
|
||||
nativeFileClassCpp.write(' if (doc) {ret = doc->resolveReference(ret);}\n')
|
||||
nativeFileClassCpp.write(' if ((ret != NULL && ' + knowTypes[t][3] + ') || (doc == NULL && ret != NULL && ret->isReference())) return ' + knowTypes[t][1] + ';\n')
|
||||
@ -395,17 +395,17 @@ class PdfClassManager:
|
||||
for type in prop.fTypes.split():
|
||||
t = type.strip()
|
||||
|
||||
nativeFileClass.write(' bool is' + prop.fCppName + 'A' + t.title() + '(const SkNativeParsedPDF* doc);\n')
|
||||
nativeFileClass.write(' bool is' + prop.fCppName + 'A' + t.title() + '(SkNativeParsedPDF* doc);\n')
|
||||
|
||||
nativeFileClassCpp.write('bool SkPdf' + cls.fName + '::is' + prop.fCppName + 'A' + t.title() + '(const SkNativeParsedPDF* doc) {\n')
|
||||
nativeFileClassCpp.write('bool SkPdf' + cls.fName + '::is' + prop.fCppName + 'A' + t.title() + '(SkNativeParsedPDF* doc) {\n')
|
||||
nativeFileClassCpp.write(' SkPdfObject* ret = get(\"' + prop.fName + '\", \"' + prop.fAbr + '\");\n')
|
||||
nativeFileClassCpp.write(' if (doc) {ret = doc->resolveReference(ret);}\n')
|
||||
nativeFileClassCpp.write(' return ret != NULL && ' + knowTypes[t][3] + ';\n')
|
||||
nativeFileClassCpp.write('}\n')
|
||||
nativeFileClassCpp.write('\n')
|
||||
|
||||
nativeFileClass.write(' ' + knowTypes[t][0] + ' get' + prop.fCppName + 'As' + t.title() + '(const SkNativeParsedPDF* doc);\n')
|
||||
nativeFileClassCpp.write('' + knowTypes[t][0] + ' SkPdf' + cls.fName + '::get' + prop.fCppName + 'As' + t.title() + '(const SkNativeParsedPDF* doc) {\n')
|
||||
nativeFileClass.write(' ' + knowTypes[t][0] + ' get' + prop.fCppName + 'As' + t.title() + '(SkNativeParsedPDF* doc);\n')
|
||||
nativeFileClassCpp.write('' + knowTypes[t][0] + ' SkPdf' + cls.fName + '::get' + prop.fCppName + 'As' + t.title() + '(SkNativeParsedPDF* doc) {\n')
|
||||
|
||||
nativeFileClassCpp.write(' SkPdfObject* ret = get(\"' + prop.fName + '\", \"' + prop.fAbr + '\");\n')
|
||||
nativeFileClassCpp.write(' if (doc) {ret = doc->resolveReference(ret);}\n')
|
||||
@ -468,11 +468,11 @@ class PdfClassManager:
|
||||
|
||||
fileMapperNative.write('class SkPdfMapper {\n')
|
||||
|
||||
fileMapperNative.write(' const SkNativeParsedPDF* fParsedDoc;\n')
|
||||
fileMapperNative.write(' SkNativeParsedPDF* fParsedDoc;\n')
|
||||
|
||||
fileMapperNative.write('public:\n')
|
||||
|
||||
fileMapperNative.write(' SkPdfMapper(const SkNativeParsedPDF* doc) : fParsedDoc(doc) {}\n')
|
||||
fileMapperNative.write(' SkPdfMapper(SkNativeParsedPDF* doc) : fParsedDoc(doc) {}\n')
|
||||
fileMapperNative.write('\n')
|
||||
|
||||
for name in self.fClassesNamesInOrder:
|
||||
|
@ -128,14 +128,14 @@ unsigned char* SkNativeParsedPDF::readCrossReferenceSection(unsigned char* xrefS
|
||||
while (current < trailerEnd) {
|
||||
token.reset();
|
||||
unsigned char* previous = current;
|
||||
current = nextObject(current, trailerEnd, &token, NULL);
|
||||
current = nextObject(current, trailerEnd, &token, NULL, NULL);
|
||||
if (!token.isInteger()) {
|
||||
return previous;
|
||||
}
|
||||
|
||||
int startId = (int)token.intValue();
|
||||
token.reset();
|
||||
current = nextObject(current, trailerEnd, &token, NULL);
|
||||
current = nextObject(current, trailerEnd, &token, NULL, NULL);
|
||||
|
||||
if (!token.isInteger()) {
|
||||
// TODO(edisonn): report/warning
|
||||
@ -146,7 +146,7 @@ unsigned char* SkNativeParsedPDF::readCrossReferenceSection(unsigned char* xrefS
|
||||
|
||||
for (int i = 0; i < entries; i++) {
|
||||
token.reset();
|
||||
current = nextObject(current, trailerEnd, &token, NULL);
|
||||
current = nextObject(current, trailerEnd, &token, NULL, NULL);
|
||||
if (!token.isInteger()) {
|
||||
// TODO(edisonn): report/warning
|
||||
return current;
|
||||
@ -154,7 +154,7 @@ unsigned char* SkNativeParsedPDF::readCrossReferenceSection(unsigned char* xrefS
|
||||
int offset = (int)token.intValue();
|
||||
|
||||
token.reset();
|
||||
current = nextObject(current, trailerEnd, &token, NULL);
|
||||
current = nextObject(current, trailerEnd, &token, NULL, NULL);
|
||||
if (!token.isInteger()) {
|
||||
// TODO(edisonn): report/warning
|
||||
return current;
|
||||
@ -162,7 +162,7 @@ unsigned char* SkNativeParsedPDF::readCrossReferenceSection(unsigned char* xrefS
|
||||
int generation = (int)token.intValue();
|
||||
|
||||
token.reset();
|
||||
current = nextObject(current, trailerEnd, &token, NULL);
|
||||
current = nextObject(current, trailerEnd, &token, NULL, NULL);
|
||||
if (!token.isKeyword() || token.len() != 1 || (*token.c_str() != 'f' && *token.c_str() != 'n')) {
|
||||
// TODO(edisonn): report/warning
|
||||
return current;
|
||||
@ -179,7 +179,7 @@ long SkNativeParsedPDF::readTrailer(unsigned char* trailerStart, unsigned char*
|
||||
unsigned char* current = ignoreLine(trailerStart, trailerEnd); // TODO(edisonn): verify next keyord is "trailer" use nextObject here
|
||||
|
||||
SkPdfObject token;
|
||||
current = nextObject(current, trailerEnd, &token, fAllocator);
|
||||
current = nextObject(current, trailerEnd, &token, fAllocator, NULL);
|
||||
if (!token.isDictionary()) {
|
||||
return -1;
|
||||
}
|
||||
@ -214,7 +214,7 @@ void SkNativeParsedPDF::addCrossSectionInfo(int id, int generation, int offset,
|
||||
fObjects[id].fObj = NULL;
|
||||
}
|
||||
|
||||
SkPdfObject* SkNativeParsedPDF::readObject(int id/*, int expectedGeneration*/) const {
|
||||
SkPdfObject* SkNativeParsedPDF::readObject(int id/*, int expectedGeneration*/) {
|
||||
long startOffset = fObjects[id].fOffset;
|
||||
//long endOffset = fObjects[id].fOffsetEnd;
|
||||
// TODO(edisonn): use hinted endOffset
|
||||
@ -225,26 +225,26 @@ SkPdfObject* SkNativeParsedPDF::readObject(int id/*, int expectedGeneration*/) c
|
||||
unsigned char* current = fFileContent + startOffset;
|
||||
unsigned char* end = fFileContent + fContentLength;
|
||||
|
||||
SkPdfNativeTokenizer tokenizer(current, end - current, fMapper, fAllocator);
|
||||
SkPdfNativeTokenizer tokenizer(current, end - current, fMapper, fAllocator, this);
|
||||
|
||||
SkPdfObject idObj;
|
||||
SkPdfObject generationObj;
|
||||
SkPdfObject objKeyword;
|
||||
SkPdfObject* dict = fAllocator->allocObject();
|
||||
|
||||
current = nextObject(current, end, &idObj, NULL);
|
||||
current = nextObject(current, end, &idObj, NULL, NULL);
|
||||
if (current >= end) {
|
||||
// TODO(edisonn): report warning/error
|
||||
return NULL;
|
||||
}
|
||||
|
||||
current = nextObject(current, end, &generationObj, NULL);
|
||||
current = nextObject(current, end, &generationObj, NULL, NULL);
|
||||
if (current >= end) {
|
||||
// TODO(edisonn): report warning/error
|
||||
return NULL;
|
||||
}
|
||||
|
||||
current = nextObject(current, end, &objKeyword, NULL);
|
||||
current = nextObject(current, end, &objKeyword, NULL, NULL);
|
||||
if (current >= end) {
|
||||
// TODO(edisonn): report warning/error
|
||||
return NULL;
|
||||
@ -258,7 +258,7 @@ SkPdfObject* SkNativeParsedPDF::readObject(int id/*, int expectedGeneration*/) c
|
||||
// TODO(edisonn): report warning/error
|
||||
}
|
||||
|
||||
current = nextObject(current, end, dict, fAllocator);
|
||||
current = nextObject(current, end, dict, fAllocator, this);
|
||||
|
||||
// TODO(edisonn): report warning/error - verify last token is endobj
|
||||
|
||||
@ -293,7 +293,7 @@ SkPdfResourceDictionary* SkNativeParsedPDF::pageResources(int page) {
|
||||
}
|
||||
|
||||
// TODO(edisonn): Partial implemented. Move the logics directly in the code generator for inheritable and default value?
|
||||
SkRect SkNativeParsedPDF::MediaBox(int page) const {
|
||||
SkRect SkNativeParsedPDF::MediaBox(int page) {
|
||||
SkPdfPageObjectDictionary* current = fPages[page];
|
||||
while (!current->has_MediaBox() && current->has_Parent()) {
|
||||
current = (SkPdfPageObjectDictionary*)current->Parent(this);
|
||||
@ -305,7 +305,7 @@ SkRect SkNativeParsedPDF::MediaBox(int page) const {
|
||||
}
|
||||
|
||||
// TODO(edisonn): stream or array ... ? for now only array
|
||||
SkPdfNativeTokenizer* SkNativeParsedPDF::tokenizerOfPage(int page) const {
|
||||
SkPdfNativeTokenizer* SkNativeParsedPDF::tokenizerOfPage(int page) {
|
||||
if (fPages[page]->isContentsAStream(this)) {
|
||||
return tokenizerOfStream(fPages[page]->getContentsAsStream(this));
|
||||
} else {
|
||||
@ -315,19 +315,19 @@ SkPdfNativeTokenizer* SkNativeParsedPDF::tokenizerOfPage(int page) const {
|
||||
}
|
||||
}
|
||||
|
||||
SkPdfNativeTokenizer* SkNativeParsedPDF::tokenizerOfStream(SkPdfObject* stream) const {
|
||||
SkPdfNativeTokenizer* SkNativeParsedPDF::tokenizerOfStream(SkPdfObject* stream) {
|
||||
if (stream == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return new SkPdfNativeTokenizer(stream, fMapper, fAllocator);
|
||||
return new SkPdfNativeTokenizer(stream, fMapper, fAllocator, this);
|
||||
}
|
||||
|
||||
// TODO(edisonn): NYI
|
||||
SkPdfNativeTokenizer* SkNativeParsedPDF::tokenizerOfBuffer(unsigned char* buffer, size_t len) const {
|
||||
SkPdfNativeTokenizer* SkNativeParsedPDF::tokenizerOfBuffer(unsigned char* buffer, size_t len) {
|
||||
// warning does not track two calls in the same buffer! the buffer is updated!
|
||||
// make a clean copy if needed!
|
||||
return new SkPdfNativeTokenizer(buffer, len, fMapper, fAllocator);
|
||||
return new SkPdfNativeTokenizer(buffer, len, fMapper, fAllocator, this);
|
||||
}
|
||||
|
||||
size_t SkNativeParsedPDF::objects() const {
|
||||
@ -376,13 +376,9 @@ SkPdfAllocator* SkNativeParsedPDF::allocator() const {
|
||||
return fAllocator;
|
||||
}
|
||||
|
||||
SkPdfObject* SkNativeParsedPDF::resolveReference(SkPdfObject* ref) const {
|
||||
return (SkPdfObject*)resolveReference((const SkPdfObject*)ref);
|
||||
}
|
||||
|
||||
// TODO(edisonn): fix infinite loop if ref to itself!
|
||||
// TODO(edisonn): perf, fix refs at load, and resolve will simply return fResolvedReference?
|
||||
SkPdfObject* SkNativeParsedPDF::resolveReference(const SkPdfObject* ref) const {
|
||||
SkPdfObject* SkNativeParsedPDF::resolveReference(const SkPdfObject* ref) {
|
||||
if (ref && ref->isReference()) {
|
||||
int id = ref->referenceId();
|
||||
// TODO(edisonn): generation/updates not supported now
|
||||
|
@ -42,11 +42,11 @@ public:
|
||||
|
||||
int pages() const;
|
||||
SkPdfResourceDictionary* pageResources(int page);
|
||||
SkRect MediaBox(int page) const;
|
||||
SkPdfNativeTokenizer* tokenizerOfPage(int n) const;
|
||||
SkRect MediaBox(int page);
|
||||
SkPdfNativeTokenizer* tokenizerOfPage(int n);
|
||||
|
||||
SkPdfNativeTokenizer* tokenizerOfStream(SkPdfObject* stream) const;
|
||||
SkPdfNativeTokenizer* tokenizerOfBuffer(unsigned char* buffer, size_t len) const;
|
||||
SkPdfNativeTokenizer* tokenizerOfStream(SkPdfObject* stream);
|
||||
SkPdfNativeTokenizer* tokenizerOfBuffer(unsigned char* buffer, size_t len);
|
||||
|
||||
size_t objects() const;
|
||||
SkPdfObject* object(int i);
|
||||
@ -61,8 +61,7 @@ public:
|
||||
|
||||
void drawPage(int page, SkCanvas* canvas);
|
||||
|
||||
SkPdfObject* resolveReference(SkPdfObject* ref) const;
|
||||
SkPdfObject* resolveReference(const SkPdfObject* ref) const;
|
||||
SkPdfObject* resolveReference(const SkPdfObject* ref);
|
||||
|
||||
private:
|
||||
|
||||
@ -77,7 +76,7 @@ private:
|
||||
obj->fOffset = -1;
|
||||
}
|
||||
|
||||
SkPdfObject* readObject(int id/*, int generation*/) const;
|
||||
SkPdfObject* readObject(int id/*, int generation*/);
|
||||
|
||||
void fillPages(SkPdfPageTreeNodeDictionary* tree);
|
||||
|
||||
|
@ -40,7 +40,7 @@ static unsigned char* endOfPdfToken(unsigned char* start, unsigned char* end) {
|
||||
}
|
||||
|
||||
// last elem has to be ]
|
||||
static unsigned char* readArray(unsigned char* start, unsigned char* end, SkPdfObject* array, SkPdfAllocator* allocator) {
|
||||
static unsigned char* readArray(unsigned char* start, unsigned char* end, SkPdfObject* array, SkPdfAllocator* allocator, SkNativeParsedPDF* doc) {
|
||||
while (start < end) {
|
||||
// skip white spaces
|
||||
start = skipPdfWhiteSpaces(start, end);
|
||||
@ -57,7 +57,7 @@ static unsigned char* readArray(unsigned char* start, unsigned char* end, SkPdfO
|
||||
}
|
||||
|
||||
SkPdfObject* newObj = allocator->allocObject();
|
||||
start = nextObject(start, end, newObj, allocator);
|
||||
start = nextObject(start, end, newObj, allocator, doc);
|
||||
// TODO(edisonn): perf/memory: put the variables on the stack, and flush them on the array only when
|
||||
// we are sure they are not references!
|
||||
if (newObj->isKeywordReference() && array->size() >= 2 && array->objAtAIndex(array->size() - 1)->isInteger() && array->objAtAIndex(array->size() - 2)->isInteger()) {
|
||||
@ -443,7 +443,7 @@ and it could get worse, with multiple object like this
|
||||
// right now implement the silly algorithm that assumes endstream is finishing the stream
|
||||
|
||||
|
||||
static unsigned char* readStream(unsigned char* start, unsigned char* end, SkPdfObject* dict) {
|
||||
static unsigned char* readStream(unsigned char* start, unsigned char* end, SkPdfObject* dict, SkNativeParsedPDF* doc) {
|
||||
start = skipPdfWhiteSpaces(start, end);
|
||||
if (!(start[0] == 's' && start[1] == 't' && start[2] == 'r' && start[3] == 'e' && start[4] == 'a' && start[5] == 'm')) {
|
||||
// no stream. return.
|
||||
@ -462,8 +462,8 @@ static unsigned char* readStream(unsigned char* start, unsigned char* end, SkPdf
|
||||
int64_t length = -1;
|
||||
|
||||
// TODO(edisonn): very basic implementation
|
||||
if (stream->has_Length() && stream->Length(NULL) > 0) {
|
||||
length = stream->Length(NULL);
|
||||
if (stream->has_Length() && stream->Length(doc) > 0) {
|
||||
length = stream->Length(doc);
|
||||
}
|
||||
|
||||
// TODO(edisonn): laod external streams
|
||||
@ -504,7 +504,7 @@ static unsigned char* readStream(unsigned char* start, unsigned char* end, SkPdf
|
||||
return start;
|
||||
}
|
||||
|
||||
static unsigned char* readDictionary(unsigned char* start, unsigned char* end, SkPdfObject* dict, SkPdfAllocator* allocator) {
|
||||
static unsigned char* readDictionary(unsigned char* start, unsigned char* end, SkPdfObject* dict, SkPdfAllocator* allocator, SkNativeParsedPDF* doc) {
|
||||
SkPdfObject::makeEmptyDictionary(dict);
|
||||
|
||||
start = skipPdfWhiteSpaces(start, end);
|
||||
@ -518,7 +518,7 @@ static unsigned char* readDictionary(unsigned char* start, unsigned char* end, S
|
||||
|
||||
if (start < end) {
|
||||
SkPdfObject* value = allocator->allocObject();
|
||||
start = nextObject(start, end, value, allocator);
|
||||
start = nextObject(start, end, value, allocator, doc);
|
||||
|
||||
start = skipPdfWhiteSpaces(start, end);
|
||||
|
||||
@ -526,10 +526,10 @@ static unsigned char* readDictionary(unsigned char* start, unsigned char* end, S
|
||||
// seems we have an indirect reference
|
||||
if (isPdfDigit(*start)) {
|
||||
SkPdfObject generation;
|
||||
start = nextObject(start, end, &generation, allocator);
|
||||
start = nextObject(start, end, &generation, allocator, doc);
|
||||
|
||||
SkPdfObject keywordR;
|
||||
start = nextObject(start, end, &keywordR, allocator);
|
||||
start = nextObject(start, end, &keywordR, allocator, doc);
|
||||
|
||||
if (value->isInteger() && generation.isInteger() && keywordR.isKeywordReference()) {
|
||||
int64_t id = value->intValue();
|
||||
@ -566,12 +566,12 @@ static unsigned char* readDictionary(unsigned char* start, unsigned char* end, S
|
||||
// TODO(edisonn): read stream ... put dict and stream in a struct, and have a pointer to struct ...
|
||||
// or alocate 2 objects, and if there is no stream, free it to be used by someone else? or just leave it ?
|
||||
|
||||
start = readStream(start, end, dict);
|
||||
start = readStream(start, end, dict, doc);
|
||||
|
||||
return start;
|
||||
}
|
||||
|
||||
unsigned char* nextObject(unsigned char* start, unsigned char* end, SkPdfObject* token, SkPdfAllocator* allocator) {
|
||||
unsigned char* nextObject(unsigned char* start, unsigned char* end, SkPdfObject* token, SkPdfAllocator* allocator, SkNativeParsedPDF* doc) {
|
||||
unsigned char* current;
|
||||
|
||||
// skip white spaces
|
||||
@ -592,7 +592,7 @@ unsigned char* nextObject(unsigned char* start, unsigned char* end, SkPdfObject*
|
||||
case kOpenedSquareBracket_PdfDelimiter:
|
||||
*start = '\0';
|
||||
SkPdfObject::makeEmptyArray(token);
|
||||
return readArray(current, end, token, allocator);
|
||||
return readArray(current, end, token, allocator, doc);
|
||||
|
||||
case kOpenedRoundBracket_PdfDelimiter:
|
||||
*start = '\0';
|
||||
@ -602,7 +602,7 @@ unsigned char* nextObject(unsigned char* start, unsigned char* end, SkPdfObject*
|
||||
*start = '\0';
|
||||
if (end > start + 1 && start[1] == kOpenedInequityBracket_PdfDelimiter) {
|
||||
// TODO(edisonn): pass here the length somehow?
|
||||
return readDictionary(start + 2, end, token, allocator); // skip <<
|
||||
return readDictionary(start + 2, end, token, allocator, doc); // skip <<
|
||||
} else {
|
||||
return readHexString(start + 1, end, token); // skip <
|
||||
}
|
||||
@ -678,7 +678,7 @@ SkPdfObject* SkPdfAllocator::allocObject() {
|
||||
}
|
||||
|
||||
// TODO(edisonn): perf: do no copy the buffers, but use them, and mark cache the result, so there is no need of a second pass
|
||||
SkPdfNativeTokenizer::SkPdfNativeTokenizer(SkPdfObject* objWithStream, const SkPdfMapper* mapper, SkPdfAllocator* allocator) : fMapper(mapper), fAllocator(allocator), fUncompressedStream(NULL), fUncompressedStreamEnd(NULL), fEmpty(false), fHasPutBack(false) {
|
||||
SkPdfNativeTokenizer::SkPdfNativeTokenizer(SkPdfObject* objWithStream, const SkPdfMapper* mapper, SkPdfAllocator* allocator, SkNativeParsedPDF* doc) : fDoc(doc), fMapper(mapper), fAllocator(allocator), fUncompressedStream(NULL), fUncompressedStreamEnd(NULL), fEmpty(false), fHasPutBack(false) {
|
||||
unsigned char* buffer = NULL;
|
||||
size_t len = 0;
|
||||
objWithStream->GetFilteredStreamRef(&buffer, &len, fAllocator);
|
||||
@ -692,7 +692,7 @@ SkPdfNativeTokenizer::SkPdfNativeTokenizer(SkPdfObject* objWithStream, const SkP
|
||||
memcpy(fUncompressedStream, buffer, len);
|
||||
}
|
||||
|
||||
SkPdfNativeTokenizer::SkPdfNativeTokenizer(unsigned char* buffer, int len, const SkPdfMapper* mapper, SkPdfAllocator* allocator) : fMapper(mapper), fAllocator(allocator), fEmpty(false), fHasPutBack(false) {
|
||||
SkPdfNativeTokenizer::SkPdfNativeTokenizer(unsigned char* buffer, int len, const SkPdfMapper* mapper, SkPdfAllocator* allocator, SkNativeParsedPDF* doc) : fDoc(doc), fMapper(mapper), fAllocator(allocator), fEmpty(false), fHasPutBack(false) {
|
||||
// TODO(edisonn): hack, find end of object
|
||||
char* endobj = strstr((char*)buffer, "endobj");
|
||||
if (endobj) {
|
||||
@ -716,7 +716,7 @@ bool SkPdfNativeTokenizer::readTokenCore(PdfToken* token) {
|
||||
}
|
||||
|
||||
SkPdfObject obj;
|
||||
fUncompressedStream = nextObject(fUncompressedStream, fUncompressedStreamEnd, &obj, fAllocator);
|
||||
fUncompressedStream = nextObject(fUncompressedStream, fUncompressedStreamEnd, &obj, fAllocator, fDoc);
|
||||
|
||||
// If it is a keyword, we will only get the pointer of the string
|
||||
if (obj.type() == SkPdfObject::kKeyword_PdfObjectType) {
|
||||
|
@ -102,7 +102,8 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
unsigned char* nextObject(unsigned char* start, unsigned char* end, SkPdfObject* token, SkPdfAllocator* allocator);
|
||||
class SkNativeParsedPDF;
|
||||
unsigned char* nextObject(unsigned char* start, unsigned char* end, SkPdfObject* token, SkPdfAllocator* allocator, SkNativeParsedPDF* doc);
|
||||
|
||||
enum SkPdfTokenType {
|
||||
kKeyword_TokenType,
|
||||
@ -120,8 +121,8 @@ struct PdfToken {
|
||||
|
||||
class SkPdfNativeTokenizer {
|
||||
public:
|
||||
SkPdfNativeTokenizer(SkPdfObject* objWithStream, const SkPdfMapper* mapper, SkPdfAllocator* allocator);
|
||||
SkPdfNativeTokenizer(unsigned char* buffer, int len, const SkPdfMapper* mapper, SkPdfAllocator* allocator);
|
||||
SkPdfNativeTokenizer(SkPdfObject* objWithStream, const SkPdfMapper* mapper, SkPdfAllocator* allocator, SkNativeParsedPDF* doc);
|
||||
SkPdfNativeTokenizer(unsigned char* buffer, int len, const SkPdfMapper* mapper, SkPdfAllocator* allocator, SkNativeParsedPDF* doc);
|
||||
|
||||
virtual ~SkPdfNativeTokenizer();
|
||||
|
||||
@ -130,6 +131,7 @@ public:
|
||||
void PutBack(PdfToken token);
|
||||
|
||||
private:
|
||||
SkNativeParsedPDF* fDoc;
|
||||
const SkPdfMapper* fMapper;
|
||||
SkPdfAllocator* fAllocator;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user