diff --git a/src/ciphers/aes/aes.c b/src/ciphers/aes/aes.c index 17122481..8ba1bfcc 100644 --- a/src/ciphers/aes/aes.c +++ b/src/ciphers/aes/aes.c @@ -91,20 +91,20 @@ const struct ltc_cipher_descriptor aes_enc_desc = static ulong32 setup_mix(ulong32 temp) { - return (Te4_3[byte(temp, 2)]) ^ - (Te4_2[byte(temp, 1)]) ^ - (Te4_1[byte(temp, 0)]) ^ - (Te4_0[byte(temp, 3)]); + return (Te4_3[LTC_BYTE(temp, 2)]) ^ + (Te4_2[LTC_BYTE(temp, 1)]) ^ + (Te4_1[LTC_BYTE(temp, 0)]) ^ + (Te4_0[LTC_BYTE(temp, 3)]); } #ifndef ENCRYPT_ONLY #ifdef LTC_SMALL_CODE static ulong32 setup_mix2(ulong32 temp) { - return Td0(255 & Te4[byte(temp, 3)]) ^ - Td1(255 & Te4[byte(temp, 2)]) ^ - Td2(255 & Te4[byte(temp, 1)]) ^ - Td3(255 & Te4[byte(temp, 0)]); + return Td0(255 & Te4[LTC_BYTE(temp, 3)]) ^ + Td1(255 & Te4[LTC_BYTE(temp, 2)]) ^ + Td2(255 & Te4[LTC_BYTE(temp, 1)]) ^ + Td3(255 & Te4[LTC_BYTE(temp, 0)]); } #endif #endif @@ -235,28 +235,28 @@ int SETUP(const unsigned char *key, int keylen, int num_rounds, symmetric_key *s #else temp = rrk[0]; rk[0] = - Tks0[byte(temp, 3)] ^ - Tks1[byte(temp, 2)] ^ - Tks2[byte(temp, 1)] ^ - Tks3[byte(temp, 0)]; + Tks0[LTC_BYTE(temp, 3)] ^ + Tks1[LTC_BYTE(temp, 2)] ^ + Tks2[LTC_BYTE(temp, 1)] ^ + Tks3[LTC_BYTE(temp, 0)]; temp = rrk[1]; rk[1] = - Tks0[byte(temp, 3)] ^ - Tks1[byte(temp, 2)] ^ - Tks2[byte(temp, 1)] ^ - Tks3[byte(temp, 0)]; + Tks0[LTC_BYTE(temp, 3)] ^ + Tks1[LTC_BYTE(temp, 2)] ^ + Tks2[LTC_BYTE(temp, 1)] ^ + Tks3[LTC_BYTE(temp, 0)]; temp = rrk[2]; rk[2] = - Tks0[byte(temp, 3)] ^ - Tks1[byte(temp, 2)] ^ - Tks2[byte(temp, 1)] ^ - Tks3[byte(temp, 0)]; + Tks0[LTC_BYTE(temp, 3)] ^ + Tks1[LTC_BYTE(temp, 2)] ^ + Tks2[LTC_BYTE(temp, 1)] ^ + Tks3[LTC_BYTE(temp, 0)]; temp = rrk[3]; rk[3] = - Tks0[byte(temp, 3)] ^ - Tks1[byte(temp, 2)] ^ - Tks2[byte(temp, 1)] ^ - Tks3[byte(temp, 0)]; + Tks0[LTC_BYTE(temp, 3)] ^ + Tks1[LTC_BYTE(temp, 2)] ^ + Tks2[LTC_BYTE(temp, 1)] ^ + Tks3[LTC_BYTE(temp, 0)]; #endif } @@ -311,28 +311,28 @@ int ECB_ENC(const unsigned char *pt, unsigned char *ct, const symmetric_key *ske for (r = 0; ; r++) { rk += 4; t0 = - Te0(byte(s0, 3)) ^ - Te1(byte(s1, 2)) ^ - Te2(byte(s2, 1)) ^ - Te3(byte(s3, 0)) ^ + Te0(LTC_BYTE(s0, 3)) ^ + Te1(LTC_BYTE(s1, 2)) ^ + Te2(LTC_BYTE(s2, 1)) ^ + Te3(LTC_BYTE(s3, 0)) ^ rk[0]; t1 = - Te0(byte(s1, 3)) ^ - Te1(byte(s2, 2)) ^ - Te2(byte(s3, 1)) ^ - Te3(byte(s0, 0)) ^ + Te0(LTC_BYTE(s1, 3)) ^ + Te1(LTC_BYTE(s2, 2)) ^ + Te2(LTC_BYTE(s3, 1)) ^ + Te3(LTC_BYTE(s0, 0)) ^ rk[1]; t2 = - Te0(byte(s2, 3)) ^ - Te1(byte(s3, 2)) ^ - Te2(byte(s0, 1)) ^ - Te3(byte(s1, 0)) ^ + Te0(LTC_BYTE(s2, 3)) ^ + Te1(LTC_BYTE(s3, 2)) ^ + Te2(LTC_BYTE(s0, 1)) ^ + Te3(LTC_BYTE(s1, 0)) ^ rk[2]; t3 = - Te0(byte(s3, 3)) ^ - Te1(byte(s0, 2)) ^ - Te2(byte(s1, 1)) ^ - Te3(byte(s2, 0)) ^ + Te0(LTC_BYTE(s3, 3)) ^ + Te1(LTC_BYTE(s0, 2)) ^ + Te2(LTC_BYTE(s1, 1)) ^ + Te3(LTC_BYTE(s2, 0)) ^ rk[3]; if (r == Nr-2) { break; @@ -349,28 +349,28 @@ int ECB_ENC(const unsigned char *pt, unsigned char *ct, const symmetric_key *ske r = Nr >> 1; for (;;) { t0 = - Te0(byte(s0, 3)) ^ - Te1(byte(s1, 2)) ^ - Te2(byte(s2, 1)) ^ - Te3(byte(s3, 0)) ^ + Te0(LTC_BYTE(s0, 3)) ^ + Te1(LTC_BYTE(s1, 2)) ^ + Te2(LTC_BYTE(s2, 1)) ^ + Te3(LTC_BYTE(s3, 0)) ^ rk[4]; t1 = - Te0(byte(s1, 3)) ^ - Te1(byte(s2, 2)) ^ - Te2(byte(s3, 1)) ^ - Te3(byte(s0, 0)) ^ + Te0(LTC_BYTE(s1, 3)) ^ + Te1(LTC_BYTE(s2, 2)) ^ + Te2(LTC_BYTE(s3, 1)) ^ + Te3(LTC_BYTE(s0, 0)) ^ rk[5]; t2 = - Te0(byte(s2, 3)) ^ - Te1(byte(s3, 2)) ^ - Te2(byte(s0, 1)) ^ - Te3(byte(s1, 0)) ^ + Te0(LTC_BYTE(s2, 3)) ^ + Te1(LTC_BYTE(s3, 2)) ^ + Te2(LTC_BYTE(s0, 1)) ^ + Te3(LTC_BYTE(s1, 0)) ^ rk[6]; t3 = - Te0(byte(s3, 3)) ^ - Te1(byte(s0, 2)) ^ - Te2(byte(s1, 1)) ^ - Te3(byte(s2, 0)) ^ + Te0(LTC_BYTE(s3, 3)) ^ + Te1(LTC_BYTE(s0, 2)) ^ + Te2(LTC_BYTE(s1, 1)) ^ + Te3(LTC_BYTE(s2, 0)) ^ rk[7]; rk += 8; @@ -379,28 +379,28 @@ int ECB_ENC(const unsigned char *pt, unsigned char *ct, const symmetric_key *ske } s0 = - Te0(byte(t0, 3)) ^ - Te1(byte(t1, 2)) ^ - Te2(byte(t2, 1)) ^ - Te3(byte(t3, 0)) ^ + Te0(LTC_BYTE(t0, 3)) ^ + Te1(LTC_BYTE(t1, 2)) ^ + Te2(LTC_BYTE(t2, 1)) ^ + Te3(LTC_BYTE(t3, 0)) ^ rk[0]; s1 = - Te0(byte(t1, 3)) ^ - Te1(byte(t2, 2)) ^ - Te2(byte(t3, 1)) ^ - Te3(byte(t0, 0)) ^ + Te0(LTC_BYTE(t1, 3)) ^ + Te1(LTC_BYTE(t2, 2)) ^ + Te2(LTC_BYTE(t3, 1)) ^ + Te3(LTC_BYTE(t0, 0)) ^ rk[1]; s2 = - Te0(byte(t2, 3)) ^ - Te1(byte(t3, 2)) ^ - Te2(byte(t0, 1)) ^ - Te3(byte(t1, 0)) ^ + Te0(LTC_BYTE(t2, 3)) ^ + Te1(LTC_BYTE(t3, 2)) ^ + Te2(LTC_BYTE(t0, 1)) ^ + Te3(LTC_BYTE(t1, 0)) ^ rk[2]; s3 = - Te0(byte(t3, 3)) ^ - Te1(byte(t0, 2)) ^ - Te2(byte(t1, 1)) ^ - Te3(byte(t2, 0)) ^ + Te0(LTC_BYTE(t3, 3)) ^ + Te1(LTC_BYTE(t0, 2)) ^ + Te2(LTC_BYTE(t1, 1)) ^ + Te3(LTC_BYTE(t2, 0)) ^ rk[3]; } @@ -411,31 +411,31 @@ int ECB_ENC(const unsigned char *pt, unsigned char *ct, const symmetric_key *ske * map cipher state to byte array block: */ s0 = - (Te4_3[byte(t0, 3)]) ^ - (Te4_2[byte(t1, 2)]) ^ - (Te4_1[byte(t2, 1)]) ^ - (Te4_0[byte(t3, 0)]) ^ + (Te4_3[LTC_BYTE(t0, 3)]) ^ + (Te4_2[LTC_BYTE(t1, 2)]) ^ + (Te4_1[LTC_BYTE(t2, 1)]) ^ + (Te4_0[LTC_BYTE(t3, 0)]) ^ rk[0]; STORE32H(s0, ct); s1 = - (Te4_3[byte(t1, 3)]) ^ - (Te4_2[byte(t2, 2)]) ^ - (Te4_1[byte(t3, 1)]) ^ - (Te4_0[byte(t0, 0)]) ^ + (Te4_3[LTC_BYTE(t1, 3)]) ^ + (Te4_2[LTC_BYTE(t2, 2)]) ^ + (Te4_1[LTC_BYTE(t3, 1)]) ^ + (Te4_0[LTC_BYTE(t0, 0)]) ^ rk[1]; STORE32H(s1, ct+4); s2 = - (Te4_3[byte(t2, 3)]) ^ - (Te4_2[byte(t3, 2)]) ^ - (Te4_1[byte(t0, 1)]) ^ - (Te4_0[byte(t1, 0)]) ^ + (Te4_3[LTC_BYTE(t2, 3)]) ^ + (Te4_2[LTC_BYTE(t3, 2)]) ^ + (Te4_1[LTC_BYTE(t0, 1)]) ^ + (Te4_0[LTC_BYTE(t1, 0)]) ^ rk[2]; STORE32H(s2, ct+8); s3 = - (Te4_3[byte(t3, 3)]) ^ - (Te4_2[byte(t0, 2)]) ^ - (Te4_1[byte(t1, 1)]) ^ - (Te4_0[byte(t2, 0)]) ^ + (Te4_3[LTC_BYTE(t3, 3)]) ^ + (Te4_2[LTC_BYTE(t0, 2)]) ^ + (Te4_1[LTC_BYTE(t1, 1)]) ^ + (Te4_0[LTC_BYTE(t2, 0)]) ^ rk[3]; STORE32H(s3, ct+12); @@ -490,28 +490,28 @@ int ECB_DEC(const unsigned char *ct, unsigned char *pt, const symmetric_key *ske for (r = 0; ; r++) { rk += 4; t0 = - Td0(byte(s0, 3)) ^ - Td1(byte(s3, 2)) ^ - Td2(byte(s2, 1)) ^ - Td3(byte(s1, 0)) ^ + Td0(LTC_BYTE(s0, 3)) ^ + Td1(LTC_BYTE(s3, 2)) ^ + Td2(LTC_BYTE(s2, 1)) ^ + Td3(LTC_BYTE(s1, 0)) ^ rk[0]; t1 = - Td0(byte(s1, 3)) ^ - Td1(byte(s0, 2)) ^ - Td2(byte(s3, 1)) ^ - Td3(byte(s2, 0)) ^ + Td0(LTC_BYTE(s1, 3)) ^ + Td1(LTC_BYTE(s0, 2)) ^ + Td2(LTC_BYTE(s3, 1)) ^ + Td3(LTC_BYTE(s2, 0)) ^ rk[1]; t2 = - Td0(byte(s2, 3)) ^ - Td1(byte(s1, 2)) ^ - Td2(byte(s0, 1)) ^ - Td3(byte(s3, 0)) ^ + Td0(LTC_BYTE(s2, 3)) ^ + Td1(LTC_BYTE(s1, 2)) ^ + Td2(LTC_BYTE(s0, 1)) ^ + Td3(LTC_BYTE(s3, 0)) ^ rk[2]; t3 = - Td0(byte(s3, 3)) ^ - Td1(byte(s2, 2)) ^ - Td2(byte(s1, 1)) ^ - Td3(byte(s0, 0)) ^ + Td0(LTC_BYTE(s3, 3)) ^ + Td1(LTC_BYTE(s2, 2)) ^ + Td2(LTC_BYTE(s1, 1)) ^ + Td3(LTC_BYTE(s0, 0)) ^ rk[3]; if (r == Nr-2) { break; @@ -529,28 +529,28 @@ int ECB_DEC(const unsigned char *ct, unsigned char *pt, const symmetric_key *ske for (;;) { t0 = - Td0(byte(s0, 3)) ^ - Td1(byte(s3, 2)) ^ - Td2(byte(s2, 1)) ^ - Td3(byte(s1, 0)) ^ + Td0(LTC_BYTE(s0, 3)) ^ + Td1(LTC_BYTE(s3, 2)) ^ + Td2(LTC_BYTE(s2, 1)) ^ + Td3(LTC_BYTE(s1, 0)) ^ rk[4]; t1 = - Td0(byte(s1, 3)) ^ - Td1(byte(s0, 2)) ^ - Td2(byte(s3, 1)) ^ - Td3(byte(s2, 0)) ^ + Td0(LTC_BYTE(s1, 3)) ^ + Td1(LTC_BYTE(s0, 2)) ^ + Td2(LTC_BYTE(s3, 1)) ^ + Td3(LTC_BYTE(s2, 0)) ^ rk[5]; t2 = - Td0(byte(s2, 3)) ^ - Td1(byte(s1, 2)) ^ - Td2(byte(s0, 1)) ^ - Td3(byte(s3, 0)) ^ + Td0(LTC_BYTE(s2, 3)) ^ + Td1(LTC_BYTE(s1, 2)) ^ + Td2(LTC_BYTE(s0, 1)) ^ + Td3(LTC_BYTE(s3, 0)) ^ rk[6]; t3 = - Td0(byte(s3, 3)) ^ - Td1(byte(s2, 2)) ^ - Td2(byte(s1, 1)) ^ - Td3(byte(s0, 0)) ^ + Td0(LTC_BYTE(s3, 3)) ^ + Td1(LTC_BYTE(s2, 2)) ^ + Td2(LTC_BYTE(s1, 1)) ^ + Td3(LTC_BYTE(s0, 0)) ^ rk[7]; rk += 8; @@ -560,28 +560,28 @@ int ECB_DEC(const unsigned char *ct, unsigned char *pt, const symmetric_key *ske s0 = - Td0(byte(t0, 3)) ^ - Td1(byte(t3, 2)) ^ - Td2(byte(t2, 1)) ^ - Td3(byte(t1, 0)) ^ + Td0(LTC_BYTE(t0, 3)) ^ + Td1(LTC_BYTE(t3, 2)) ^ + Td2(LTC_BYTE(t2, 1)) ^ + Td3(LTC_BYTE(t1, 0)) ^ rk[0]; s1 = - Td0(byte(t1, 3)) ^ - Td1(byte(t0, 2)) ^ - Td2(byte(t3, 1)) ^ - Td3(byte(t2, 0)) ^ + Td0(LTC_BYTE(t1, 3)) ^ + Td1(LTC_BYTE(t0, 2)) ^ + Td2(LTC_BYTE(t3, 1)) ^ + Td3(LTC_BYTE(t2, 0)) ^ rk[1]; s2 = - Td0(byte(t2, 3)) ^ - Td1(byte(t1, 2)) ^ - Td2(byte(t0, 1)) ^ - Td3(byte(t3, 0)) ^ + Td0(LTC_BYTE(t2, 3)) ^ + Td1(LTC_BYTE(t1, 2)) ^ + Td2(LTC_BYTE(t0, 1)) ^ + Td3(LTC_BYTE(t3, 0)) ^ rk[2]; s3 = - Td0(byte(t3, 3)) ^ - Td1(byte(t2, 2)) ^ - Td2(byte(t1, 1)) ^ - Td3(byte(t0, 0)) ^ + Td0(LTC_BYTE(t3, 3)) ^ + Td1(LTC_BYTE(t2, 2)) ^ + Td2(LTC_BYTE(t1, 1)) ^ + Td3(LTC_BYTE(t0, 0)) ^ rk[3]; } #endif @@ -591,31 +591,31 @@ int ECB_DEC(const unsigned char *ct, unsigned char *pt, const symmetric_key *ske * map cipher state to byte array block: */ s0 = - (Td4[byte(t0, 3)] & 0xff000000) ^ - (Td4[byte(t3, 2)] & 0x00ff0000) ^ - (Td4[byte(t2, 1)] & 0x0000ff00) ^ - (Td4[byte(t1, 0)] & 0x000000ff) ^ + (Td4[LTC_BYTE(t0, 3)] & 0xff000000) ^ + (Td4[LTC_BYTE(t3, 2)] & 0x00ff0000) ^ + (Td4[LTC_BYTE(t2, 1)] & 0x0000ff00) ^ + (Td4[LTC_BYTE(t1, 0)] & 0x000000ff) ^ rk[0]; STORE32H(s0, pt); s1 = - (Td4[byte(t1, 3)] & 0xff000000) ^ - (Td4[byte(t0, 2)] & 0x00ff0000) ^ - (Td4[byte(t3, 1)] & 0x0000ff00) ^ - (Td4[byte(t2, 0)] & 0x000000ff) ^ + (Td4[LTC_BYTE(t1, 3)] & 0xff000000) ^ + (Td4[LTC_BYTE(t0, 2)] & 0x00ff0000) ^ + (Td4[LTC_BYTE(t3, 1)] & 0x0000ff00) ^ + (Td4[LTC_BYTE(t2, 0)] & 0x000000ff) ^ rk[1]; STORE32H(s1, pt+4); s2 = - (Td4[byte(t2, 3)] & 0xff000000) ^ - (Td4[byte(t1, 2)] & 0x00ff0000) ^ - (Td4[byte(t0, 1)] & 0x0000ff00) ^ - (Td4[byte(t3, 0)] & 0x000000ff) ^ + (Td4[LTC_BYTE(t2, 3)] & 0xff000000) ^ + (Td4[LTC_BYTE(t1, 2)] & 0x00ff0000) ^ + (Td4[LTC_BYTE(t0, 1)] & 0x0000ff00) ^ + (Td4[LTC_BYTE(t3, 0)] & 0x000000ff) ^ rk[2]; STORE32H(s2, pt+8); s3 = - (Td4[byte(t3, 3)] & 0xff000000) ^ - (Td4[byte(t2, 2)] & 0x00ff0000) ^ - (Td4[byte(t1, 1)] & 0x0000ff00) ^ - (Td4[byte(t0, 0)] & 0x000000ff) ^ + (Td4[LTC_BYTE(t3, 3)] & 0xff000000) ^ + (Td4[LTC_BYTE(t2, 2)] & 0x00ff0000) ^ + (Td4[LTC_BYTE(t1, 1)] & 0x0000ff00) ^ + (Td4[LTC_BYTE(t0, 0)] & 0x000000ff) ^ rk[3]; STORE32H(s3, pt+12); diff --git a/src/ciphers/blowfish.c b/src/ciphers/blowfish.c index 355a2359..b53e05bc 100644 --- a/src/ciphers/blowfish.c +++ b/src/ciphers/blowfish.c @@ -373,9 +373,9 @@ int blowfish_setup(const unsigned char *key, int keylen, int num_rounds, } #ifndef __GNUC__ -#define F(x) ((S1[byte(x,3)] + S2[byte(x,2)]) ^ S3[byte(x,1)]) + S4[byte(x,0)] +#define F(x) ((S1[LTC_BYTE(x,3)] + S2[LTC_BYTE(x,2)]) ^ S3[LTC_BYTE(x,1)]) + S4[LTC_BYTE(x,0)] #else -#define F(x) ((skey->blowfish.S[0][byte(x,3)] + skey->blowfish.S[1][byte(x,2)]) ^ skey->blowfish.S[2][byte(x,1)]) + skey->blowfish.S[3][byte(x,0)] +#define F(x) ((skey->blowfish.S[0][LTC_BYTE(x,3)] + skey->blowfish.S[1][LTC_BYTE(x,2)]) ^ skey->blowfish.S[2][LTC_BYTE(x,1)]) + skey->blowfish.S[3][LTC_BYTE(x,0)] #endif /** diff --git a/src/ciphers/cast5.c b/src/ciphers/cast5.c index 712b57d5..d69dafcb 100644 --- a/src/ciphers/cast5.c +++ b/src/ciphers/cast5.c @@ -508,7 +508,7 @@ INLINE static ulong32 FI(ulong32 R, ulong32 Km, ulong32 Kr) ulong32 I; I = (Km + R); I = ROL(I, Kr); - return ((S1[byte(I, 3)] ^ S2[byte(I,2)]) - S3[byte(I,1)]) + S4[byte(I,0)]; + return ((S1[LTC_BYTE(I, 3)] ^ S2[LTC_BYTE(I,2)]) - S3[LTC_BYTE(I,1)]) + S4[LTC_BYTE(I,0)]; } INLINE static ulong32 FII(ulong32 R, ulong32 Km, ulong32 Kr) @@ -516,7 +516,7 @@ INLINE static ulong32 FII(ulong32 R, ulong32 Km, ulong32 Kr) ulong32 I; I = (Km ^ R); I = ROL(I, Kr); - return ((S1[byte(I, 3)] - S2[byte(I,2)]) + S3[byte(I,1)]) ^ S4[byte(I,0)]; + return ((S1[LTC_BYTE(I, 3)] - S2[LTC_BYTE(I,2)]) + S3[LTC_BYTE(I,1)]) ^ S4[LTC_BYTE(I,0)]; } INLINE static ulong32 FIII(ulong32 R, ulong32 Km, ulong32 Kr) @@ -524,7 +524,7 @@ INLINE static ulong32 FIII(ulong32 R, ulong32 Km, ulong32 Kr) ulong32 I; I = (Km - R); I = ROL(I, Kr); - return ((S1[byte(I, 3)] + S2[byte(I,2)]) ^ S3[byte(I,1)]) - S4[byte(I,0)]; + return ((S1[LTC_BYTE(I, 3)] + S2[LTC_BYTE(I,2)]) ^ S3[LTC_BYTE(I,1)]) - S4[LTC_BYTE(I,0)]; } /** diff --git a/src/ciphers/des.c b/src/ciphers/des.c index a5210306..44907dde 100644 --- a/src/ciphers/des.c +++ b/src/ciphers/des.c @@ -1432,14 +1432,14 @@ static void _desfunc(ulong32 *block, const ulong32 *keys) #else { ulong64 tmp; - tmp = des_ip[0][byte(leftt, 0)] ^ - des_ip[1][byte(leftt, 1)] ^ - des_ip[2][byte(leftt, 2)] ^ - des_ip[3][byte(leftt, 3)] ^ - des_ip[4][byte(right, 0)] ^ - des_ip[5][byte(right, 1)] ^ - des_ip[6][byte(right, 2)] ^ - des_ip[7][byte(right, 3)]; + tmp = des_ip[0][LTC_BYTE(leftt, 0)] ^ + des_ip[1][LTC_BYTE(leftt, 1)] ^ + des_ip[2][LTC_BYTE(leftt, 2)] ^ + des_ip[3][LTC_BYTE(leftt, 3)] ^ + des_ip[4][LTC_BYTE(right, 0)] ^ + des_ip[5][LTC_BYTE(right, 1)] ^ + des_ip[6][LTC_BYTE(right, 2)] ^ + des_ip[7][LTC_BYTE(right, 3)]; leftt = (ulong32)(tmp >> 32); right = (ulong32)(tmp & 0xFFFFFFFFUL); } @@ -1491,14 +1491,14 @@ static void _desfunc(ulong32 *block, const ulong32 *keys) #else { ulong64 tmp; - tmp = des_fp[0][byte(leftt, 0)] ^ - des_fp[1][byte(leftt, 1)] ^ - des_fp[2][byte(leftt, 2)] ^ - des_fp[3][byte(leftt, 3)] ^ - des_fp[4][byte(right, 0)] ^ - des_fp[5][byte(right, 1)] ^ - des_fp[6][byte(right, 2)] ^ - des_fp[7][byte(right, 3)]; + tmp = des_fp[0][LTC_BYTE(leftt, 0)] ^ + des_fp[1][LTC_BYTE(leftt, 1)] ^ + des_fp[2][LTC_BYTE(leftt, 2)] ^ + des_fp[3][LTC_BYTE(leftt, 3)] ^ + des_fp[4][LTC_BYTE(right, 0)] ^ + des_fp[5][LTC_BYTE(right, 1)] ^ + des_fp[6][LTC_BYTE(right, 2)] ^ + des_fp[7][LTC_BYTE(right, 3)]; leftt = (ulong32)(tmp >> 32); right = (ulong32)(tmp & 0xFFFFFFFFUL); } diff --git a/src/ciphers/twofish/twofish.c b/src/ciphers/twofish/twofish.c index 0a52aefa..c3aa4591 100644 --- a/src/ciphers/twofish/twofish.c +++ b/src/ciphers/twofish/twofish.c @@ -278,8 +278,8 @@ static void h_func(const unsigned char *in, unsigned char *out, const unsigned c #endif /* the G function */ -#define g_func(x, dum) (S1[byte(x,0)] ^ S2[byte(x,1)] ^ S3[byte(x,2)] ^ S4[byte(x,3)]) -#define g1_func(x, dum) (S2[byte(x,0)] ^ S3[byte(x,1)] ^ S4[byte(x,2)] ^ S1[byte(x,3)]) +#define g_func(x, dum) (S1[LTC_BYTE(x,0)] ^ S2[LTC_BYTE(x,1)] ^ S3[LTC_BYTE(x,2)] ^ S4[LTC_BYTE(x,3)]) +#define g1_func(x, dum) (S2[LTC_BYTE(x,0)] ^ S3[LTC_BYTE(x,1)] ^ S4[LTC_BYTE(x,2)] ^ S1[LTC_BYTE(x,3)]) #else diff --git a/src/hashes/tiger.c b/src/hashes/tiger.c index 0d3ba102..970582d4 100644 --- a/src/hashes/tiger.c +++ b/src/hashes/tiger.c @@ -564,8 +564,8 @@ INLINE static void tiger_round(ulong64 *a, ulong64 *b, ulong64 *c, ulong64 x, in { ulong64 tmp; tmp = (*c ^= x); - *a -= t1[byte(tmp, 0)] ^ t2[byte(tmp, 2)] ^ t3[byte(tmp, 4)] ^ t4[byte(tmp, 6)]; - tmp = (*b += t4[byte(tmp, 1)] ^ t3[byte(tmp, 3)] ^ t2[byte(tmp,5)] ^ t1[byte(tmp,7)]); + *a -= t1[LTC_BYTE(tmp, 0)] ^ t2[LTC_BYTE(tmp, 2)] ^ t3[LTC_BYTE(tmp, 4)] ^ t4[LTC_BYTE(tmp, 6)]; + tmp = (*b += t4[LTC_BYTE(tmp, 1)] ^ t3[LTC_BYTE(tmp, 3)] ^ t2[LTC_BYTE(tmp,5)] ^ t1[LTC_BYTE(tmp,7)]); switch (mul) { case 5: *b = (tmp << 2) + tmp; break; case 7: *b = (tmp << 3) - tmp; break; diff --git a/src/headers/tomcrypt_macros.h b/src/headers/tomcrypt_macros.h index 94e368f8..cbfaeef4 100644 --- a/src/headers/tomcrypt_macros.h +++ b/src/headers/tomcrypt_macros.h @@ -431,9 +431,9 @@ static inline ulong64 ROR64(ulong64 word, int i) /* extract a byte portably */ #ifdef _MSC_VER - #define byte(x, n) ((unsigned char)((x) >> (8 * (n)))) + #define LTC_BYTE(x, n) ((unsigned char)((x) >> (8 * (n)))) #else - #define byte(x, n) (((x) >> (8 * (n))) & 255) + #define LTC_BYTE(x, n) (((x) >> (8 * (n))) & 255) #endif /* there is no snprintf before Visual C++ 2015 */ diff --git a/src/mac/pelican/pelican.c b/src/mac/pelican/pelican.c index e8cea642..7c678d20 100644 --- a/src/mac/pelican/pelican.c +++ b/src/mac/pelican/pelican.c @@ -62,25 +62,25 @@ static void _four_rounds(pelican_state *pelmac) LOAD32H(s3, pelmac->state + 12); for (r = 0; r < 4; r++) { t0 = - Te0(byte(s0, 3)) ^ - Te1(byte(s1, 2)) ^ - Te2(byte(s2, 1)) ^ - Te3(byte(s3, 0)); + Te0(LTC_BYTE(s0, 3)) ^ + Te1(LTC_BYTE(s1, 2)) ^ + Te2(LTC_BYTE(s2, 1)) ^ + Te3(LTC_BYTE(s3, 0)); t1 = - Te0(byte(s1, 3)) ^ - Te1(byte(s2, 2)) ^ - Te2(byte(s3, 1)) ^ - Te3(byte(s0, 0)); + Te0(LTC_BYTE(s1, 3)) ^ + Te1(LTC_BYTE(s2, 2)) ^ + Te2(LTC_BYTE(s3, 1)) ^ + Te3(LTC_BYTE(s0, 0)); t2 = - Te0(byte(s2, 3)) ^ - Te1(byte(s3, 2)) ^ - Te2(byte(s0, 1)) ^ - Te3(byte(s1, 0)); + Te0(LTC_BYTE(s2, 3)) ^ + Te1(LTC_BYTE(s3, 2)) ^ + Te2(LTC_BYTE(s0, 1)) ^ + Te3(LTC_BYTE(s1, 0)); t3 = - Te0(byte(s3, 3)) ^ - Te1(byte(s0, 2)) ^ - Te2(byte(s1, 1)) ^ - Te3(byte(s2, 0)); + Te0(LTC_BYTE(s3, 3)) ^ + Te1(LTC_BYTE(s0, 2)) ^ + Te2(LTC_BYTE(s1, 1)) ^ + Te3(LTC_BYTE(s2, 0)); s0 = t0; s1 = t1; s2 = t2; s3 = t3; } STORE32H(s0, pelmac->state );