ICU-20362 segfault/leftover files with long lines
If a file with an input line larger than INT32_MAX (i.e. 2 GB) contains an UTF8 character after that limit, escapesrc crashes on 64 bit systems or does not remove incomplete files on 32 bit systems. The issue is that an unchecked cast from size_t to int32_t can turn negative, which results in negative offsets during array access. This will eventually lead to an out of boundary read, which most likely crashes the tool. This patch sets a fixed limit on 1 GB to make sure that no side effects occur if the line is exactly INT32_MAX or a few bytes less. It should still be way more than anyone would really need. Signed-off-by: Tobias Stoeckmann <tobias@stoeckmann.org>
This commit is contained in:
parent
c04f9f1c01
commit
7369eff4ed
@ -327,6 +327,9 @@ bool fixLine(int /*no*/, std::string &linestr) {
|
||||
|
||||
// start from the end and find all u" cases
|
||||
size_t pos = len = linestr.size();
|
||||
if(len>INT32_MAX/2) {
|
||||
return true;
|
||||
}
|
||||
while((pos>0) && (pos = linestr.rfind("u\"", pos)) != std::string::npos) {
|
||||
//printf("found doublequote at %d\n", pos);
|
||||
if(fixAt(linestr, pos)) return true;
|
||||
@ -391,15 +394,19 @@ int convert(const std::string &infile, const std::string &outfile) {
|
||||
while( getline( inf, linestr)) {
|
||||
no++;
|
||||
if(fixLine(no, linestr)) {
|
||||
outf.close();
|
||||
fprintf(stderr, "%s:%d: Fixup failed by %s\n", infile.c_str(), no, prog.c_str());
|
||||
cleanup(outfile);
|
||||
return 1;
|
||||
goto fail;
|
||||
}
|
||||
outf << linestr << '\n';
|
||||
}
|
||||
|
||||
return 0;
|
||||
if(inf.eof()) {
|
||||
return 0;
|
||||
}
|
||||
fail:
|
||||
outf.close();
|
||||
fprintf(stderr, "%s:%d: Fixup failed by %s\n", infile.c_str(), no, prog.c_str());
|
||||
cleanup(outfile);
|
||||
return 1;
|
||||
}
|
||||
|
||||
/**
|
||||
|
Loading…
Reference in New Issue
Block a user