pdfviewer: more plumming for soft masks, blend modes and transparency groups
Review URL: https://codereview.chromium.org/21125002 git-svn-id: http://skia.googlecode.com/svn/trunk@10419 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
parent
08df48d241
commit
e878e726bd
@ -189,7 +189,8 @@ blend mode name or array (PDF 1.4) The current blend mode to be used in t
|
||||
transparency group XObject (see Section 7.5.5, “Transparency
|
||||
Group XObjects”). Initial value: Normal.
|
||||
*/
|
||||
SkXfermode::Mode fBlendMode;
|
||||
SkXfermode::Mode fBlendModes[256];
|
||||
int fBlendModesLength;
|
||||
|
||||
/*
|
||||
soft mask dictionary (PDF 1.4) A soft-mask dictionary (see “Soft-Mask Dictionaries” on
|
||||
@ -327,7 +328,8 @@ smoothness number (PDF 1.3) The precision with which col
|
||||
fAlphaSource = false;
|
||||
fDashArrayLength = 0;
|
||||
fDashPhase = 0;
|
||||
fBlendMode = SkXfermode::kSrc_Mode; // PDF: Normal Blend mode
|
||||
fBlendModesLength = 1;
|
||||
fBlendModes[0] = SkXfermode::kSrc_Mode; // PDF: Normal Blend mode
|
||||
}
|
||||
|
||||
// TODO(edisonn): make two functons instead, stroking and non stoking, avoid branching
|
||||
|
@ -308,7 +308,7 @@ SkPdfMultiMasterFont* SkPdfFont::fontFromMultiMasterFontDictionary(SkNativeParse
|
||||
static int skstoi(const SkPdfObject* str) {
|
||||
// TODO(edisonn): report err of it is not a (hex) string
|
||||
int ret = 0;
|
||||
for (unsigned int i = 0 ; i < str->len(); i++) {
|
||||
for (unsigned int i = 0 ; i < str->lenstr(); i++) {
|
||||
ret = (ret << 8) + ((unsigned char*)str->c_str())[i];
|
||||
}
|
||||
// TODO(edisonn): character larger than 0x0000ffff not supported right now.
|
||||
|
@ -47,7 +47,7 @@ struct SkUnencodedText {
|
||||
public:
|
||||
SkUnencodedText(const SkPdfString* obj) {
|
||||
text = (void*)obj->c_str();
|
||||
len = obj->len();
|
||||
len = obj->lenstr();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -668,6 +668,11 @@ static PdfResult doXObject_Form(PdfContext* pdfContext, SkCanvas* canvas, SkPdfT
|
||||
// TODO(edisonn): iterate smart on the stream even if it is compressed, tokenize it as we go.
|
||||
// For this PdfContentsTokenizer needs to be extended.
|
||||
|
||||
// This is a group?
|
||||
if (skobj->has_Group()) {
|
||||
//TransparencyGroupDictionary* ...
|
||||
}
|
||||
|
||||
SkPdfStream* stream = (SkPdfStream*)skobj;
|
||||
|
||||
SkPdfNativeTokenizer* tokenizer =
|
||||
@ -1582,20 +1587,112 @@ void skpdfGraphicsStateApplyFont(PdfContext* pdfContext, SkPdfArray* fontAndSize
|
||||
skpdfGraphicsStateApplyFontCore(pdfContext, fontAndSize->objAtAIndex(0), fontAndSize->objAtAIndex(1)->numberValue());
|
||||
}
|
||||
|
||||
SkTDict<SkXfermode::Mode> gPdfBlendModes(20);
|
||||
|
||||
class InitBlendModes {
|
||||
public:
|
||||
InitBlendModes() {
|
||||
// TODO(edisonn): use the python code generator?
|
||||
// TABLE 7.2 Standard separable blend modes
|
||||
gPdfBlendModes.set("Normal", SkXfermode::kSrc_Mode);
|
||||
gPdfBlendModes.set("Multiply", SkXfermode::kMultiply_Mode);
|
||||
gPdfBlendModes.set("Screen", SkXfermode::kScreen_Mode);
|
||||
gPdfBlendModes.set("Overlay", SkXfermode::kOverlay_Mode);
|
||||
gPdfBlendModes.set("Darken", SkXfermode::kDarken_Mode);
|
||||
gPdfBlendModes.set("Lighten", SkXfermode::kLighten_Mode);
|
||||
gPdfBlendModes.set("ColorDodge", SkXfermode::kColorDodge_Mode);
|
||||
gPdfBlendModes.set("ColorBurn", SkXfermode::kColorBurn_Mode);
|
||||
gPdfBlendModes.set("HardLight", SkXfermode::kHardLight_Mode);
|
||||
gPdfBlendModes.set("SoftLight", SkXfermode::kSoftLight_Mode);
|
||||
gPdfBlendModes.set("Difference", SkXfermode::kDifference_Mode);
|
||||
gPdfBlendModes.set("Exclusion", SkXfermode::kExclusion_Mode);
|
||||
|
||||
// TABLE 7.3 Standard nonseparable blend modes
|
||||
gPdfBlendModes.set("Hue", SkXfermode::kHue_Mode);
|
||||
gPdfBlendModes.set("Saturation", SkXfermode::kSaturation_Mode);
|
||||
gPdfBlendModes.set("Color", SkXfermode::kColor_Mode);
|
||||
gPdfBlendModes.set("Luminosity", SkXfermode::kLuminosity_Mode);
|
||||
}
|
||||
};
|
||||
|
||||
InitBlendModes _gDummyInniter;
|
||||
|
||||
SkXfermode::Mode xferModeFromBlendMode(const char* blendMode, size_t len) {
|
||||
SkXfermode::Mode mode = (SkXfermode::Mode)(SkXfermode::kLastMode + 1);
|
||||
if (gPdfBlendModes.find(blendMode, len, &mode)) {
|
||||
return mode;
|
||||
}
|
||||
|
||||
return (SkXfermode::Mode)(SkXfermode::kLastMode + 1);
|
||||
}
|
||||
|
||||
void skpdfGraphicsStateApplyBM_name(PdfContext* pdfContext, const std::string& blendMode) {
|
||||
// TODO(edisonn): verify input
|
||||
SkXfermode::Mode mode = xferModeFromBlendMode(blendMode.c_str(), blendMode.length());
|
||||
if (mode <= SkXfermode::kLastMode) {
|
||||
pdfContext->fGraphicsState.fBlendModesLength = 1;
|
||||
pdfContext->fGraphicsState.fBlendModes[0] = mode;
|
||||
} else {
|
||||
// TODO(edisonn): report unknown blend mode
|
||||
}
|
||||
}
|
||||
|
||||
void skpdfGraphicsStateApplyBM_array(PdfContext* pdfContext, SkPdfArray* blendModes) {
|
||||
// TODO(edisonn): verify input
|
||||
}
|
||||
if (!blendModes || blendModes->isArray() || blendModes->size() == 0 || blendModes->size() > 256) {
|
||||
// TODO(edisonn): report error/warning
|
||||
return;
|
||||
}
|
||||
SkXfermode::Mode modes[256];
|
||||
int cnt = blendModes->size();
|
||||
for (int i = 0; i < cnt; i++) {
|
||||
SkPdfObject* name = blendModes->objAtAIndex(i);
|
||||
if (!name->isName()) {
|
||||
// TODO(edisonn): report error/warning
|
||||
return;
|
||||
}
|
||||
SkXfermode::Mode mode = xferModeFromBlendMode(name->c_str(), name->lenstr());
|
||||
if (mode > SkXfermode::kLastMode) {
|
||||
// TODO(edisonn): report error/warning
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void skpdfGraphicsStateApplySMask_name(PdfContext* pdfContext, const std::string& sMask) {
|
||||
// TODO(edisonn): verify input
|
||||
pdfContext->fGraphicsState.fBlendModesLength = cnt;
|
||||
for (int i = 0; i < cnt; i++) {
|
||||
pdfContext->fGraphicsState.fBlendModes[i] = modes[i];
|
||||
}
|
||||
}
|
||||
|
||||
void skpdfGraphicsStateApplySMask_dict(PdfContext* pdfContext, SkPdfDictionary* sMask) {
|
||||
// TODO(edisonn): verify input
|
||||
if (pdfContext->fPdfDoc->mapper()->mapSoftMaskDictionary(sMask)) {
|
||||
//SkPdfSoftMaskDictionary* smd = (SkPdfSoftMaskDictionary*)sMask;
|
||||
// TODO(edisonn): load soft mask
|
||||
} else if (pdfContext->fPdfDoc->mapper()->mapSoftMaskImageDictionary(sMask)) {
|
||||
SkPdfSoftMaskImageDictionary* smid = (SkPdfSoftMaskImageDictionary*)sMask;
|
||||
pdfContext->fGraphicsState.fSMask = getImageFromObject(pdfContext, smid, true);
|
||||
} else {
|
||||
// TODO (edisonn): report error/warning
|
||||
}
|
||||
}
|
||||
|
||||
void skpdfGraphicsStateApplySMask_name(PdfContext* pdfContext, const std::string& sMask) {
|
||||
//Next, get the ExtGState Dictionary from the Resource Dictionary:
|
||||
SkPdfDictionary* extGStateDictionary = pdfContext->fGraphicsState.fResources->ExtGState(pdfContext->fPdfDoc);
|
||||
|
||||
if (extGStateDictionary == NULL) {
|
||||
#ifdef PDF_TRACE
|
||||
printf("ExtGState is NULL!\n");
|
||||
#endif
|
||||
// TODO (edisonn): report error/warning
|
||||
return;
|
||||
}
|
||||
|
||||
SkPdfObject* obj = pdfContext->fPdfDoc->resolveReference(extGStateDictionary->get(sMask.c_str()));
|
||||
if (!obj || !obj->isDictionary()) {
|
||||
// TODO (edisonn): report error/warning
|
||||
return;
|
||||
}
|
||||
skpdfGraphicsStateApplySMask_dict(pdfContext, obj->asDictionary());
|
||||
}
|
||||
|
||||
void skpdfGraphicsStateApplyAIS(PdfContext* pdfContext, bool alphaSource) {
|
||||
|
@ -196,7 +196,7 @@ const unsigned char* SkNativeParsedPDF::readCrossReferenceSection(const unsigned
|
||||
|
||||
token.reset();
|
||||
current = nextObject(0, current, trailerEnd, &token, NULL, NULL);
|
||||
if (!token.isKeyword() || token.len() != 1 || (*token.c_str() != 'f' && *token.c_str() != 'n')) {
|
||||
if (!token.isKeyword() || token.lenstr() != 1 || (*token.c_str() != 'f' && *token.c_str() != 'n')) {
|
||||
// TODO(edisonn): report/warning
|
||||
return current;
|
||||
}
|
||||
@ -215,7 +215,7 @@ long SkNativeParsedPDF::readTrailer(const unsigned char* trailerStart, const uns
|
||||
const unsigned char* current =
|
||||
nextObject(0, trailerStart, trailerEnd, &trailerKeyword, NULL, NULL);
|
||||
|
||||
if (!trailerKeyword.isKeyword() || strlen("trailer") != trailerKeyword.len() ||
|
||||
if (!trailerKeyword.isKeyword() || strlen("trailer") != trailerKeyword.lenstr() ||
|
||||
strncmp(trailerKeyword.c_str(), "trailer", strlen("trailer")) != 0) {
|
||||
// TODO(edisonn): report warning, rebuild trailer from objects.
|
||||
return -1;
|
||||
|
@ -927,7 +927,7 @@ bool SkPdfNativeTokenizer::readTokenCore(PdfToken* token) {
|
||||
// If it is a keyword, we will only get the pointer of the string
|
||||
if (obj.type() == SkPdfObject::kKeyword_PdfObjectType) {
|
||||
token->fKeyword = obj.c_str();
|
||||
token->fKeywordLength = obj.len();
|
||||
token->fKeywordLength = obj.lenstr();
|
||||
token->fType = kKeyword_TokenType;
|
||||
} else {
|
||||
SkPdfObject* pobj = fAllocator->allocObject();
|
||||
@ -1062,7 +1062,7 @@ SkPdfImageDictionary* SkPdfNativeTokenizer::readInlineImage() {
|
||||
SkPdfObject* key = fAllocator->allocObject();
|
||||
fUncompressedStream = nextObject(0, fUncompressedStream, fUncompressedStreamEnd, key, fAllocator, fDoc);
|
||||
|
||||
if (key->isKeyword() && key->len() == 2 && key->c_str()[0] == 'I' && key->c_str()[1] == 'D') { // ID
|
||||
if (key->isKeyword() && key->lenstr() == 2 && key->c_str()[0] == 'I' && key->c_str()[1] == 'D') { // ID
|
||||
fUncompressedStream = readInlineImageStream(0, fUncompressedStream, fUncompressedStreamEnd, inlineImage, fDoc);
|
||||
return inlineImage;
|
||||
} else {
|
||||
|
@ -131,7 +131,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
size_t len() const {
|
||||
size_t lenstr() const {
|
||||
switch (fObjectType) {
|
||||
case kString_PdfObjectType:
|
||||
case kHexString_PdfObjectType:
|
||||
|
@ -116,9 +116,9 @@ tableToClassName = {
|
||||
'TABLE 6.5': ['Type10HalftoneDictionary', 'Additional entries specific to a type 10 halftone dictionary'],
|
||||
'TABLE 6.6': ['Type16HalftoneDictionary', 'Additional entries specific to a type 16 halftone dictionary'],
|
||||
'TABLE 6.7': ['Type5HalftoneDictionary', 'Entries in a type 5 halftone dictionary'],
|
||||
'TABLE 7.10': ['SoftMaskDictionary', 'Entries in a soft-mask dictionary'],
|
||||
'TABLE 7.12': ['SoftMaskImageDictionary', 'Additional entry in a soft-mask image dictionary'],
|
||||
'TABLE 7.13': ['TransparencyGroupDictionary', 'Additional entries specific to a transparency group attributes dictionary'],
|
||||
'TABLE 7.10': ['SoftMaskDictionary', 'Entries in a soft-mask dictionary', '', {'S': '[datatypes.PdfName(\'Alpha\'), datatypes.PdfName(\'Luminosity\')]'}],
|
||||
'TABLE 7.12': ['SoftMaskImageDictionary', 'Additional entry in a soft-mask image dictionary', 'ImageDictionary', {'Subtype': '[datatypes.PdfName(\'Image\')]', 'ColorSpace': '[datatypes.PdfName(\'DeviceGray\'), datatypes.PdfName(\'Gray\')]'}],
|
||||
'TABLE 7.13': ['TransparencyGroupDictionary', 'Additional entries specific to a transparency group attributes dictionary', 'XObjectDictionary', {'S': '[datatypes.PdfName(\'Transparency\')]'}],
|
||||
'TABLE 8.1': ['ViewerPreferencesDictionary', 'Entries in a viewer preferences dictionary'],
|
||||
'TABLE 8.3': ['OutlineDictionary', 'Entries in the outline dictionary'],
|
||||
'TABLE 8.4': ['OutlineItemDictionary', 'Entries in an outline item dictionary'],
|
||||
@ -222,6 +222,9 @@ tableToClassName = {
|
||||
'TABLE 9.49': ['OpiVersionDictionary', 'Entry in an OPI version dictionary'],
|
||||
}
|
||||
|
||||
classTree = {
|
||||
}
|
||||
|
||||
def buildKnownDictionaries():
|
||||
global tableToClassName
|
||||
global knownTypes
|
||||
@ -297,6 +300,9 @@ def commitRow(fspecPy):
|
||||
global emitedDitionaryName
|
||||
global table
|
||||
global tableToClassName
|
||||
global classTree
|
||||
global tableKey
|
||||
|
||||
|
||||
if columnValues == None:
|
||||
return
|
||||
@ -392,10 +398,13 @@ def commitRow(fspecPy):
|
||||
emitedDitionaryName = tableToClassName[tableKey][0]
|
||||
comment = fix(tableToClassName[tableKey][1])
|
||||
|
||||
|
||||
if len(tableToClassName[tableKey]) >= 3 and tableToClassName[tableKey][2] != '':
|
||||
fspecPy.write(' pdfspec.addClass(\'' + emitedDitionaryName + '\', \'' + tableToClassName[tableKey][2] + '\', \'' + comment + '\')\\\n')
|
||||
classTree[emitedDitionaryName] = [tableToClassName[tableKey][2], {}]
|
||||
else:
|
||||
fspecPy.write(' pdfspec.addClass(\'' + emitedDitionaryName + '\', \'Dictionary\', \'' + comment + '\')\\\n')
|
||||
classTree[emitedDitionaryName] = ['Dictionary', {}]
|
||||
|
||||
if len(tableToClassName[tableKey]) >= 4 and columnValues[0] in tableToClassName[tableKey][3]:
|
||||
required = True
|
||||
@ -410,6 +419,12 @@ def commitRow(fspecPy):
|
||||
fspecPy.write(' .type(\'' + columnValues[1] + '\')\\\n')
|
||||
fspecPy.write(' .comment(\'' + columnValues[2] + '\')\\\n')
|
||||
|
||||
classTree[emitedDitionaryName][1][columnValues[0]] = ' .field(\'' + columnValues[0] + '\')\\\n' + \
|
||||
' .name(\'' + columnValues[0] + '\')\\\n' + \
|
||||
' .type(\'' + columnValues[1] + '\')\\\n' + \
|
||||
' .comment(\'\')\\\n'
|
||||
|
||||
|
||||
if len(tableToClassName[tableKey]) >= 4 and columnValues[0] in tableToClassName[tableKey][3]:
|
||||
fspecPy.write(' .must(' + tableToClassName[tableKey][3][columnValues[0]] + ')\\\n')
|
||||
|
||||
@ -461,11 +476,39 @@ def rebaseTable(fspecPy, line):
|
||||
def stopTable(fspecPy):
|
||||
global tableHeaderFound
|
||||
global emitedDitionaryName
|
||||
global tableKey
|
||||
global classTree
|
||||
|
||||
if not inTable():
|
||||
return
|
||||
|
||||
commitRow(fspecPy)
|
||||
|
||||
#print tableKey
|
||||
|
||||
# TODO(edisonn): iterate on all requited key in the def, and if not on the definition, get definition from parent and export them
|
||||
if len(tableToClassName[tableKey]) >= 4:
|
||||
for field in tableToClassName[tableKey][3]:
|
||||
#print field
|
||||
if not field in classTree[emitedDitionaryName][1]:
|
||||
fieldDef = ''
|
||||
searchKey = classTree[emitedDitionaryName][0]
|
||||
while searchKey != 'Dictionary' and (not field in classTree[searchKey][1]):
|
||||
searchKey = classTree[searchKey][0]
|
||||
|
||||
if searchKey != 'Dictionary' and (field in classTree[searchKey][1]):
|
||||
#print tableToClassName[tableKey][3][field]
|
||||
#print classTree[searchKey][1][field]
|
||||
# TODO(edisonns): hack - for required fields, they need to be downgraded to only a type
|
||||
classTree[searchKey][1][field] = classTree[searchKey][1][field].replace(' or array', '')
|
||||
classTree[searchKey][1][field] = classTree[searchKey][1][field].replace(' or distionary', '')
|
||||
fspecPy.write(' .required(\'NULL\')\\\n')
|
||||
fspecPy.write(classTree[searchKey][1][field])
|
||||
fspecPy.write(' .must(' + tableToClassName[tableKey][3][field] + ')\\\n')
|
||||
fspecPy.write(' .done().done()\\\n')
|
||||
else:
|
||||
print 'ERROR' + tableKey + '.' + field;
|
||||
|
||||
tableHeaderFound = False
|
||||
emitedDitionaryName = ''
|
||||
fspecPy.write(' .done()\n')
|
||||
@ -569,7 +612,7 @@ def processLineCore(fspecPy, line):
|
||||
return False
|
||||
|
||||
if first != '' and second != '' and third[0] != '(':
|
||||
stopTable()
|
||||
stopTable(fspecPy)
|
||||
return False
|
||||
|
||||
if first == '' and second != '' and second[0] == ' ':
|
||||
|
@ -55,6 +55,8 @@
|
||||
],
|
||||
'outputs': [
|
||||
'<(SHARED_INTERMEDIATE_DIR)/native/autogen/SkPdfEnums_autogen.h',
|
||||
'<(SHARED_INTERMEDIATE_DIR)/native/autogen/SkPdfMapper_autogen.h',
|
||||
'<(SHARED_INTERMEDIATE_DIR)/native/autogen/SkPdfHeaders_autogen.h',
|
||||
'<(SHARED_INTERMEDIATE_DIR)/native/autogen/SkPdfMapper_autogen.cpp',
|
||||
'<(SHARED_INTERMEDIATE_DIR)/native/autogen/SkPdfHeaders_autogen.cpp',
|
||||
# TODO(edisonn): ok, there are many more files here, which we should list but since
|
||||
|
Loading…
Reference in New Issue
Block a user