Merge pull request #1189 from tdesveauxPKFX/host/fix_normalize_tokens

Host path.normalize: Fix normalization for paths containing tokens
This commit is contained in:
Samuel Surtees 2018-11-13 17:59:39 +10:00 committed by GitHub
commit 85d0b3e34c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 81 additions and 1 deletions

View File

@ -17,6 +17,18 @@
#define IS_SPACE(__c) ((__c >= '\t' && __c <= '\r') || __c == ' ')
#define IS_WIN_ENVVAR_START(__c) (*__c == '%')
#define IS_WIN_ENVVAR_END(__c) (*__c == '%')
#define IS_VS_VAR_START(__c) (*__c == '$' && __c[1] == '(')
#define IS_VS_VAR_END(__c) (*__c == ')')
#define IS_UNIX_ENVVAR_START(__c) (*__c == '$' && __c[1] == '{')
#define IS_UNIX_ENVVAR_END(__c) (*__c == '}')
#define IS_PREMAKE_TOKEN_START(__c) (*__c == '%' && __c[1] == '{')
#define IS_PREMAKE_TOKEN_END(__c) (*__c == '}')
static void* normalize_substring(const char* srcPtr, const char* srcEnd, char* dstPtr)
{
#define IS_END(__p) (__p >= srcEnd || *__p == '\0')
@ -93,8 +105,38 @@ static void* normalize_substring(const char* srcPtr, const char* srcEnd, char* d
--dstPtr;
return dstPtr;
#undef IS_END
#undef IS_SEP_OR_END
}
static int skip_tokens(const char *readPtr)
{
int skipped = 0;
#define DO_SKIP_FOR(__kind)\
if (IS_ ## __kind ## _START(readPtr)) { \
do \
{ \
skipped++; \
} while (!IS_ ## __kind ## _END(readPtr++)); \
} \
// DO_SKIP_FOR
do
{
DO_SKIP_FOR(PREMAKE_TOKEN)
DO_SKIP_FOR(WIN_ENVVAR)
DO_SKIP_FOR(VS_VAR)
DO_SKIP_FOR(UNIX_ENVVAR)
} while (IS_WIN_ENVVAR_START(readPtr) ||
IS_VS_VAR_START(readPtr) ||
IS_UNIX_ENVVAR_START(readPtr) ||
IS_PREMAKE_TOKEN_START(readPtr));
return skipped;
#undef DO_SKIP_FOR
}
int path_normalize(lua_State* L)
{
@ -111,9 +153,31 @@ int path_normalize(lua_State* L)
endPtr = readPtr;
while (*endPtr) {
int skipped = skip_tokens(readPtr);
if (skipped > 0) {
if (readPtr != path && writePtr != buffer &&
IS_SEP(readPtr[-1]) && !IS_SEP(writePtr[-1]))
{
*(writePtr++) = (readPtr[-1]);
}
while (skipped-- > 0)
*(writePtr++) = *(readPtr++);
endPtr = readPtr;
}
// find the end of sub path
while (*endPtr && !IS_SPACE(*endPtr))
while (*endPtr && !IS_SPACE(*endPtr) &&
!IS_WIN_ENVVAR_START(endPtr) &&
!IS_VS_VAR_START(endPtr) &&
!IS_UNIX_ENVVAR_START(endPtr) &&
!IS_PREMAKE_TOKEN_START(endPtr))
{
++endPtr;
}
// path is surrounded with quotes
if (readPtr != endPtr &&

View File

@ -724,3 +724,19 @@
test.isequal("\"../../test/test/\"", path.normalize("\"../../test/test/\""))
test.isequal("\"../../test/\"", path.normalize("\"../../test/../test/\""))
end
function suite.normalize_withTokens()
-- Premake tokens
test.isequal("%{wks.location}../../test", path.normalize("%{wks.location}../../test"))
-- Visual Studio var
test.isequal("$(SolutionDir)../../test", path.normalize("$(SolutionDir)../../test"))
-- Windows env var
test.isequal("%APPDATA%../../test", path.normalize("%APPDATA%../../test"))
-- Unix env var
test.isequal("${HOME}../../test", path.normalize("${HOME}../../test"))
-- Middle
test.isequal("../../${MYVAR}/../test", path.normalize("../../${MYVAR}/../test"))
-- End
test.isequal("../../test/${MYVAR}", path.normalize("../../test/${MYVAR}"))
end