PCRE2: upgrade to version 10.30

Minimal adjustments to config.h are necessary.

[ChangeLog][Third-Party Code] PCRE2 has been updated to version 10.30.

Change-Id: Iaca6a5ceffe4f5029212411eca8e2965ca7d9410
Reviewed-by: Kai Koehne <kai.koehne@qt.io>
This commit is contained in:
Giuseppe D'Angelo 2017-08-22 19:20:11 +02:00
parent 189e9c93d7
commit f537dc0da2
50 changed files with 22657 additions and 20081 deletions

View File

@ -8,7 +8,7 @@ Email domain: cam.ac.uk
University of Cambridge Computing Service,
Cambridge, England.
Copyright (c) 1997-2016 University of Cambridge
Copyright (c) 1997-2017 University of Cambridge
All rights reserved
@ -19,7 +19,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Emain domain: freemail.hu
Copyright(c) 2010-2016 Zoltan Herczeg
Copyright(c) 2010-2017 Zoltan Herczeg
All rights reserved.
@ -30,7 +30,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Emain domain: freemail.hu
Copyright(c) 2009-2016 Zoltan Herczeg
Copyright(c) 2009-2017 Zoltan Herczeg
All rights reserved.
####

View File

@ -5,9 +5,10 @@ PCRE2 is a library of functions to support regular expressions whose syntax
and semantics are as close as possible to those of the Perl 5 language.
Release 10 of PCRE2 is distributed under the terms of the "BSD" licence, as
specified below. The documentation for PCRE2, supplied in the "doc"
directory, is distributed under the same terms as the software itself. The data
in the testdata directory is not copyrighted and is in the public domain.
specified below, with one exemption for certain binary redistributions. The
documentation for PCRE2, supplied in the "doc" directory, is distributed under
the same terms as the software itself. The data in the testdata directory is
not copyrighted and is in the public domain.
The basic library functions are written in C and are freestanding. Also
included in the distribution is a just-in-time compiler that can be used to
@ -25,7 +26,7 @@ Email domain: cam.ac.uk
University of Cambridge Computing Service,
Cambridge, England.
Copyright (c) 1997-2016 University of Cambridge
Copyright (c) 1997-2017 University of Cambridge
All rights reserved.
@ -36,7 +37,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Emain domain: freemail.hu
Copyright(c) 2010-2016 Zoltan Herczeg
Copyright(c) 2010-2017 Zoltan Herczeg
All rights reserved.
@ -47,7 +48,7 @@ Written by: Zoltan Herczeg
Email local part: hzmester
Emain domain: freemail.hu
Copyright(c) 2009-2016 Zoltan Herczeg
Copyright(c) 2009-2017 Zoltan Herczeg
All rights reserved.
@ -57,11 +58,11 @@ THE "BSD" LICENCE
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
* Redistributions of source code must retain the above copyright notices,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
notices, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the University of Cambridge nor the names of any
@ -80,4 +81,14 @@ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
EXEMPTION FOR BINARY LIBRARY-LIKE PACKAGES
------------------------------------------
The second condition in the BSD licence (covering binary redistributions) does
not apply all the way down a chain of software. If binary package A includes
PCRE2, it must respect the condition, but if package B is software that
includes package A, the condition is not imposed on package B unless it uses
PCRE2 independently.
End

View File

@ -6,12 +6,12 @@
"Description": "The PCRE library is a set of functions that implement regular expression pattern matching using the same syntax and semantics as Perl 5.",
"Homepage": "http://www.pcre.org/",
"Version": "10.22",
"Version": "10.30",
"License": "BSD 3-clause \"New\" or \"Revised\" License",
"LicenseId": "BSD-3-Clause",
"LicenseFile": "LICENCE",
"Copyright": "Copyright (c) 1997-2016 University of Cambridge
Copyright (c) 2009-2016 Zoltan Herczeg
"Copyright": "Copyright (c) 1997-2017 University of Cambridge
Copyright (c) 2009-2017 Zoltan Herczeg
Copyright (c) 2007-2012 Google Inc.
Copyright (c) 2013-2013 Tilera Corporation (jiwang@tilera.com)"
}

View File

@ -7,8 +7,9 @@
#define HAVE_STRING_H 1
#define LINK_SIZE 2
#define HEAP_LIMIT 20000000
#define MATCH_LIMIT 10000000
#define MATCH_LIMIT_RECURSION MATCH_LIMIT
#define MATCH_LIMIT_DEPTH MATCH_LIMIT
#define MAX_NAME_COUNT 10000
#define MAX_NAME_SIZE 32
#define NEWLINE_DEFAULT 2

View File

@ -5,7 +5,7 @@
/* This is the public header file for the PCRE library, second API, to be
#included by applications that call PCRE2 functions.
Copyright (c) 2016 University of Cambridge
Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -36,15 +36,15 @@ POSSIBILITY OF SUCH DAMAGE.
-----------------------------------------------------------------------------
*/
#ifndef _PCRE2_H
#define _PCRE2_H
#ifndef PCRE2_H_IDEMPOTENT_GUARD
#define PCRE2_H_IDEMPOTENT_GUARD
/* The current PCRE version information. */
#define PCRE2_MAJOR 10
#define PCRE2_MINOR 22
#define PCRE2_MINOR 30
#define PCRE2_PRERELEASE
#define PCRE2_DATE 2016-07-29
#define PCRE2_DATE 2017-08-14
/* When an application links to a PCRE DLL in Windows, the symbols that are
imported have to be identified as such. When building PCRE2, the appropriate
@ -67,6 +67,20 @@ don't change existing definitions of PCRE2_EXP_DECL. */
# endif
#endif
/* When compiling with the MSVC compiler, it is sometimes necessary to include
a "calling convention" before exported function names. (This is secondhand
information; I know nothing about MSVC myself). For example, something like
void __cdecl function(....)
might be needed. In order so make this easy, all the exported functions have
PCRE2_CALL_CONVENTION just before their names. It is rarely needed; if not
set, we ensure here that it has no effect. */
#ifndef PCRE2_CALL_CONVENTION
#define PCRE2_CALL_CONVENTION
#endif
/* Have to include limits.h, stdlib.h and stdint.h to ensure that size_t and
uint8_t, UCHAR_MAX, etc are defined. */
@ -87,6 +101,7 @@ others can be added next to them */
#define PCRE2_ANCHORED 0x80000000u
#define PCRE2_NO_UTF_CHECK 0x40000000u
#define PCRE2_ENDANCHORED 0x20000000u
/* The following option bits can be passed only to pcre2_compile(). However,
they may affect compilation, JIT compilation, and/or interpretive execution.
@ -122,6 +137,15 @@ D is inspected during pcre2_dfa_match() execution
#define PCRE2_ALT_CIRCUMFLEX 0x00200000u /* J M D */
#define PCRE2_ALT_VERBNAMES 0x00400000u /* C */
#define PCRE2_USE_OFFSET_LIMIT 0x00800000u /* J M D */
#define PCRE2_EXTENDED_MORE 0x01000000u /* C */
#define PCRE2_LITERAL 0x02000000u /* C */
/* An additional compile options word is available in the compile context. */
#define PCRE2_EXTRA_ALLOW_SURROGATE_ESCAPES 0x00000001u /* C */
#define PCRE2_EXTRA_BAD_ESCAPE_IS_LITERAL 0x00000002u /* C */
#define PCRE2_EXTRA_MATCH_WORD 0x00000004u /* C */
#define PCRE2_EXTRA_MATCH_LINE 0x00000008u /* C */
/* These are for pcre2_jit_compile(). */
@ -160,6 +184,16 @@ ignored for pcre2_jit_match(). */
#define PCRE2_NO_JIT 0x00002000u
/* Options for pcre2_pattern_convert(). */
#define PCRE2_CONVERT_UTF 0x00000001u
#define PCRE2_CONVERT_NO_UTF_CHECK 0x00000002u
#define PCRE2_CONVERT_POSIX_BASIC 0x00000004u
#define PCRE2_CONVERT_POSIX_EXTENDED 0x00000008u
#define PCRE2_CONVERT_GLOB 0x00000010u
#define PCRE2_CONVERT_GLOB_NO_WILD_SEPARATOR 0x00000030u
#define PCRE2_CONVERT_GLOB_NO_STARSTAR 0x00000050u
/* Newline and \R settings, for use in compile contexts. The newline values
must be kept in step with values set in config.h and both sets must all be
greater than zero. */
@ -169,6 +203,7 @@ greater than zero. */
#define PCRE2_NEWLINE_CRLF 3
#define PCRE2_NEWLINE_ANY 4
#define PCRE2_NEWLINE_ANYCRLF 5
#define PCRE2_NEWLINE_NUL 6
#define PCRE2_BSR_UNICODE 1
#define PCRE2_BSR_ANYCRLF 2
@ -242,7 +277,8 @@ numbers must not be changed. */
#define PCRE2_ERROR_NOUNIQUESUBSTRING (-50)
#define PCRE2_ERROR_NULL (-51)
#define PCRE2_ERROR_RECURSELOOP (-52)
#define PCRE2_ERROR_RECURSIONLIMIT (-53)
#define PCRE2_ERROR_DEPTHLIMIT (-53)
#define PCRE2_ERROR_RECURSIONLIMIT (-53) /* Obsolete synonym */
#define PCRE2_ERROR_UNAVAILABLE (-54)
#define PCRE2_ERROR_UNSET (-55)
#define PCRE2_ERROR_BADOFFSETLIMIT (-56)
@ -252,6 +288,9 @@ numbers must not be changed. */
#define PCRE2_ERROR_BADSUBSPATTERN (-60)
#define PCRE2_ERROR_TOOMANYREPLACE (-61)
#define PCRE2_ERROR_BADSERIALIZEDDATA (-62)
#define PCRE2_ERROR_HEAPLIMIT (-63)
#define PCRE2_ERROR_CONVERT_SYNTAX (-64)
/* Request types for pcre2_pattern_info() */
@ -276,9 +315,12 @@ numbers must not be changed. */
#define PCRE2_INFO_NAMEENTRYSIZE 18
#define PCRE2_INFO_NAMETABLE 19
#define PCRE2_INFO_NEWLINE 20
#define PCRE2_INFO_RECURSIONLIMIT 21
#define PCRE2_INFO_DEPTHLIMIT 21
#define PCRE2_INFO_RECURSIONLIMIT 21 /* Obsolete synonym */
#define PCRE2_INFO_SIZE 22
#define PCRE2_INFO_HASBACKSLASHC 23
#define PCRE2_INFO_FRAMESIZE 24
#define PCRE2_INFO_HEAPLIMIT 25
/* Request types for pcre2_config(). */
@ -289,11 +331,13 @@ numbers must not be changed. */
#define PCRE2_CONFIG_MATCHLIMIT 4
#define PCRE2_CONFIG_NEWLINE 5
#define PCRE2_CONFIG_PARENSLIMIT 6
#define PCRE2_CONFIG_RECURSIONLIMIT 7
#define PCRE2_CONFIG_STACKRECURSE 8
#define PCRE2_CONFIG_DEPTHLIMIT 7
#define PCRE2_CONFIG_RECURSIONLIMIT 7 /* Obsolete synonym */
#define PCRE2_CONFIG_STACKRECURSE 8 /* Obsolete */
#define PCRE2_CONFIG_UNICODE 9
#define PCRE2_CONFIG_UNICODE_VERSION 10
#define PCRE2_CONFIG_VERSION 11
#define PCRE2_CONFIG_HEAPLIMIT 12
/* Types for code units in patterns and subject strings. */
@ -328,6 +372,9 @@ typedef struct pcre2_real_compile_context pcre2_compile_context; \
struct pcre2_real_match_context; \
typedef struct pcre2_real_match_context pcre2_match_context; \
\
struct pcre2_real_convert_context; \
typedef struct pcre2_real_convert_context pcre2_convert_context; \
\
struct pcre2_real_code; \
typedef struct pcre2_real_code pcre2_code; \
\
@ -386,170 +433,220 @@ expanded for each width below. Start with functions that give general
information. */
#define PCRE2_GENERAL_INFO_FUNCTIONS \
PCRE2_EXP_DECL int pcre2_config(uint32_t, void *);
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION pcre2_config(uint32_t, void *);
/* Functions for manipulating contexts. */
#define PCRE2_GENERAL_CONTEXT_FUNCTIONS \
PCRE2_EXP_DECL \
pcre2_general_context *pcre2_general_context_copy(pcre2_general_context *); \
PCRE2_EXP_DECL \
pcre2_general_context *pcre2_general_context_create( \
void *(*)(PCRE2_SIZE, void *), \
PCRE2_EXP_DECL pcre2_general_context PCRE2_CALL_CONVENTION \
*pcre2_general_context_copy(pcre2_general_context *); \
PCRE2_EXP_DECL pcre2_general_context PCRE2_CALL_CONVENTION \
*pcre2_general_context_create(void *(*)(PCRE2_SIZE, void *), \
void (*)(void *, void *), void *); \
PCRE2_EXP_DECL void pcre2_general_context_free(pcre2_general_context *);
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_general_context_free(pcre2_general_context *);
#define PCRE2_COMPILE_CONTEXT_FUNCTIONS \
PCRE2_EXP_DECL \
pcre2_compile_context *pcre2_compile_context_copy(pcre2_compile_context *); \
PCRE2_EXP_DECL \
pcre2_compile_context *pcre2_compile_context_create(pcre2_general_context *);\
PCRE2_EXP_DECL void pcre2_compile_context_free(pcre2_compile_context *); \
PCRE2_EXP_DECL int pcre2_set_bsr(pcre2_compile_context *, uint32_t); \
PCRE2_EXP_DECL int pcre2_set_character_tables(pcre2_compile_context *, \
const unsigned char *); \
PCRE2_EXP_DECL int pcre2_set_max_pattern_length(pcre2_compile_context *, \
PCRE2_SIZE); \
PCRE2_EXP_DECL int pcre2_set_newline(pcre2_compile_context *, uint32_t); \
PCRE2_EXP_DECL int pcre2_set_parens_nest_limit(pcre2_compile_context *, \
uint32_t); \
PCRE2_EXP_DECL int pcre2_set_compile_recursion_guard(\
pcre2_compile_context *, int (*)(uint32_t, void *), \
void *);
PCRE2_EXP_DECL pcre2_compile_context PCRE2_CALL_CONVENTION \
*pcre2_compile_context_copy(pcre2_compile_context *); \
PCRE2_EXP_DECL pcre2_compile_context PCRE2_CALL_CONVENTION \
*pcre2_compile_context_create(pcre2_general_context *);\
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_compile_context_free(pcre2_compile_context *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_bsr(pcre2_compile_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_character_tables(pcre2_compile_context *, const unsigned char *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_compile_extra_options(pcre2_compile_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_max_pattern_length(pcre2_compile_context *, PCRE2_SIZE); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_newline(pcre2_compile_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_parens_nest_limit(pcre2_compile_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_compile_recursion_guard(pcre2_compile_context *, \
int (*)(uint32_t, void *), void *);
#define PCRE2_MATCH_CONTEXT_FUNCTIONS \
PCRE2_EXP_DECL \
pcre2_match_context *pcre2_match_context_copy(pcre2_match_context *); \
PCRE2_EXP_DECL \
pcre2_match_context *pcre2_match_context_create(pcre2_general_context *); \
PCRE2_EXP_DECL void pcre2_match_context_free(pcre2_match_context *); \
PCRE2_EXP_DECL int pcre2_set_callout(pcre2_match_context *, \
PCRE2_EXP_DECL pcre2_match_context PCRE2_CALL_CONVENTION \
*pcre2_match_context_copy(pcre2_match_context *); \
PCRE2_EXP_DECL pcre2_match_context PCRE2_CALL_CONVENTION \
*pcre2_match_context_create(pcre2_general_context *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_match_context_free(pcre2_match_context *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_callout(pcre2_match_context *, \
int (*)(pcre2_callout_block *, void *), void *); \
PCRE2_EXP_DECL int pcre2_set_match_limit(pcre2_match_context *, \
uint32_t); \
PCRE2_EXP_DECL int pcre2_set_offset_limit(pcre2_match_context *, \
PCRE2_SIZE); \
PCRE2_EXP_DECL int pcre2_set_recursion_limit(pcre2_match_context *, \
uint32_t); \
PCRE2_EXP_DECL int pcre2_set_recursion_memory_management( \
pcre2_match_context *, void *(*)(PCRE2_SIZE, void *), \
void (*)(void *, void *), void *);
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_depth_limit(pcre2_match_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_heap_limit(pcre2_match_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_match_limit(pcre2_match_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_offset_limit(pcre2_match_context *, PCRE2_SIZE); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_recursion_limit(pcre2_match_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_recursion_memory_management(pcre2_match_context *, \
void *(*)(PCRE2_SIZE, void *), void (*)(void *, void *), void *);
#define PCRE2_CONVERT_CONTEXT_FUNCTIONS \
PCRE2_EXP_DECL pcre2_convert_context PCRE2_CALL_CONVENTION \
*pcre2_convert_context_copy(pcre2_convert_context *); \
PCRE2_EXP_DECL pcre2_convert_context PCRE2_CALL_CONVENTION \
*pcre2_convert_context_create(pcre2_general_context *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_convert_context_free(pcre2_convert_context *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_glob_escape(pcre2_convert_context *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_set_glob_separator(pcre2_convert_context *, uint32_t);
/* Functions concerned with compiling a pattern to PCRE internal code. */
#define PCRE2_COMPILE_FUNCTIONS \
PCRE2_EXP_DECL \
pcre2_code *pcre2_compile(PCRE2_SPTR, PCRE2_SIZE, uint32_t, \
int *, PCRE2_SIZE *, pcre2_compile_context *); \
PCRE2_EXP_DECL void pcre2_code_free(pcre2_code *); \
PCRE2_EXP_DECL \
pcre2_code *pcre2_code_copy(const pcre2_code *);
PCRE2_EXP_DECL pcre2_code PCRE2_CALL_CONVENTION \
*pcre2_compile(PCRE2_SPTR, PCRE2_SIZE, uint32_t, int *, PCRE2_SIZE *, \
pcre2_compile_context *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_code_free(pcre2_code *); \
PCRE2_EXP_DECL pcre2_code PCRE2_CALL_CONVENTION \
*pcre2_code_copy(const pcre2_code *); \
PCRE2_EXP_DECL pcre2_code PCRE2_CALL_CONVENTION \
*pcre2_code_copy_with_tables(const pcre2_code *);
/* Functions that give information about a compiled pattern. */
#define PCRE2_PATTERN_INFO_FUNCTIONS \
PCRE2_EXP_DECL int pcre2_pattern_info(const pcre2_code *, uint32_t, \
void *); \
PCRE2_EXP_DECL int pcre2_callout_enumerate(const pcre2_code *, \
int (*)(pcre2_callout_enumerate_block *, void *), \
void *);
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_pattern_info(const pcre2_code *, uint32_t, void *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_callout_enumerate(const pcre2_code *, \
int (*)(pcre2_callout_enumerate_block *, void *), void *);
/* Functions for running a match and inspecting the result. */
#define PCRE2_MATCH_FUNCTIONS \
PCRE2_EXP_DECL \
pcre2_match_data *pcre2_match_data_create(uint32_t, \
PCRE2_EXP_DECL pcre2_match_data PCRE2_CALL_CONVENTION \
*pcre2_match_data_create(uint32_t, pcre2_general_context *); \
PCRE2_EXP_DECL pcre2_match_data PCRE2_CALL_CONVENTION \
*pcre2_match_data_create_from_pattern(const pcre2_code *, \
pcre2_general_context *); \
PCRE2_EXP_DECL \
pcre2_match_data *pcre2_match_data_create_from_pattern(\
const pcre2_code *, \
pcre2_general_context *); \
PCRE2_EXP_DECL int pcre2_dfa_match(const pcre2_code *, PCRE2_SPTR, \
PCRE2_SIZE, PCRE2_SIZE, uint32_t, \
pcre2_match_data *, pcre2_match_context *, int *, \
PCRE2_SIZE); \
PCRE2_EXP_DECL int pcre2_match(const pcre2_code *, \
PCRE2_SPTR, PCRE2_SIZE, PCRE2_SIZE, uint32_t, \
pcre2_match_data *, pcre2_match_context *); \
PCRE2_EXP_DECL void pcre2_match_data_free(pcre2_match_data *); \
PCRE2_EXP_DECL PCRE2_SPTR pcre2_get_mark(pcre2_match_data *); \
PCRE2_EXP_DECL uint32_t pcre2_get_ovector_count(pcre2_match_data *); \
PCRE2_EXP_DECL PCRE2_SIZE *pcre2_get_ovector_pointer(pcre2_match_data *); \
PCRE2_EXP_DECL PCRE2_SIZE pcre2_get_startchar(pcre2_match_data *);
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_dfa_match(const pcre2_code *, PCRE2_SPTR, PCRE2_SIZE, PCRE2_SIZE, \
uint32_t, pcre2_match_data *, pcre2_match_context *, int *, PCRE2_SIZE); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_match(const pcre2_code *, PCRE2_SPTR, PCRE2_SIZE, PCRE2_SIZE, \
uint32_t, pcre2_match_data *, pcre2_match_context *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_match_data_free(pcre2_match_data *); \
PCRE2_EXP_DECL PCRE2_SPTR PCRE2_CALL_CONVENTION \
pcre2_get_mark(pcre2_match_data *); \
PCRE2_EXP_DECL uint32_t PCRE2_CALL_CONVENTION \
pcre2_get_ovector_count(pcre2_match_data *); \
PCRE2_EXP_DECL PCRE2_SIZE PCRE2_CALL_CONVENTION \
*pcre2_get_ovector_pointer(pcre2_match_data *); \
PCRE2_EXP_DECL PCRE2_SIZE PCRE2_CALL_CONVENTION \
pcre2_get_startchar(pcre2_match_data *);
/* Convenience functions for handling matched substrings. */
#define PCRE2_SUBSTRING_FUNCTIONS \
PCRE2_EXP_DECL int pcre2_substring_copy_byname(pcre2_match_data *, \
PCRE2_SPTR, PCRE2_UCHAR *, PCRE2_SIZE *); \
PCRE2_EXP_DECL int pcre2_substring_copy_bynumber(pcre2_match_data *, \
uint32_t, PCRE2_UCHAR *, PCRE2_SIZE *); \
PCRE2_EXP_DECL void pcre2_substring_free(PCRE2_UCHAR *); \
PCRE2_EXP_DECL int pcre2_substring_get_byname(pcre2_match_data *, \
PCRE2_SPTR, PCRE2_UCHAR **, PCRE2_SIZE *); \
PCRE2_EXP_DECL int pcre2_substring_get_bynumber(pcre2_match_data *, \
uint32_t, PCRE2_UCHAR **, PCRE2_SIZE *); \
PCRE2_EXP_DECL int pcre2_substring_length_byname(pcre2_match_data *, \
PCRE2_SPTR, PCRE2_SIZE *); \
PCRE2_EXP_DECL int pcre2_substring_length_bynumber(pcre2_match_data *, \
uint32_t, PCRE2_SIZE *); \
PCRE2_EXP_DECL int pcre2_substring_nametable_scan(const pcre2_code *, \
PCRE2_SPTR, PCRE2_SPTR *, PCRE2_SPTR *); \
PCRE2_EXP_DECL int pcre2_substring_number_from_name(\
const pcre2_code *, PCRE2_SPTR); \
PCRE2_EXP_DECL void pcre2_substring_list_free(PCRE2_SPTR *); \
PCRE2_EXP_DECL int pcre2_substring_list_get(pcre2_match_data *, \
PCRE2_UCHAR ***, PCRE2_SIZE **);
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_copy_byname(pcre2_match_data *, PCRE2_SPTR, PCRE2_UCHAR *, \
PCRE2_SIZE *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_copy_bynumber(pcre2_match_data *, uint32_t, PCRE2_UCHAR *, \
PCRE2_SIZE *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_substring_free(PCRE2_UCHAR *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_get_byname(pcre2_match_data *, PCRE2_SPTR, PCRE2_UCHAR **, \
PCRE2_SIZE *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_get_bynumber(pcre2_match_data *, uint32_t, PCRE2_UCHAR **, \
PCRE2_SIZE *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_length_byname(pcre2_match_data *, PCRE2_SPTR, PCRE2_SIZE *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_length_bynumber(pcre2_match_data *, uint32_t, PCRE2_SIZE *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_nametable_scan(const pcre2_code *, PCRE2_SPTR, PCRE2_SPTR *, \
PCRE2_SPTR *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_number_from_name(const pcre2_code *, PCRE2_SPTR); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_substring_list_free(PCRE2_SPTR *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substring_list_get(pcre2_match_data *, PCRE2_UCHAR ***, PCRE2_SIZE **);
/* Functions for serializing / deserializing compiled patterns. */
#define PCRE2_SERIALIZE_FUNCTIONS \
PCRE2_EXP_DECL int32_t pcre2_serialize_encode(const pcre2_code **, \
int32_t, uint8_t **, PCRE2_SIZE *, \
PCRE2_EXP_DECL int32_t PCRE2_CALL_CONVENTION \
pcre2_serialize_encode(const pcre2_code **, int32_t, uint8_t **, \
PCRE2_SIZE *, pcre2_general_context *); \
PCRE2_EXP_DECL int32_t PCRE2_CALL_CONVENTION \
pcre2_serialize_decode(pcre2_code **, int32_t, const uint8_t *, \
pcre2_general_context *); \
PCRE2_EXP_DECL int32_t pcre2_serialize_decode(pcre2_code **, int32_t, \
const uint8_t *, pcre2_general_context *); \
PCRE2_EXP_DECL int32_t pcre2_serialize_get_number_of_codes(const uint8_t *); \
PCRE2_EXP_DECL void pcre2_serialize_free(uint8_t *);
PCRE2_EXP_DECL int32_t PCRE2_CALL_CONVENTION \
pcre2_serialize_get_number_of_codes(const uint8_t *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_serialize_free(uint8_t *);
/* Convenience function for match + substitute. */
#define PCRE2_SUBSTITUTE_FUNCTION \
PCRE2_EXP_DECL int pcre2_substitute(const pcre2_code *, \
PCRE2_SPTR, PCRE2_SIZE, PCRE2_SIZE, uint32_t, \
pcre2_match_data *, pcre2_match_context *, \
PCRE2_SPTR, PCRE2_SIZE, PCRE2_UCHAR *, \
PCRE2_SIZE *);
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_substitute(const pcre2_code *, PCRE2_SPTR, PCRE2_SIZE, PCRE2_SIZE, \
uint32_t, pcre2_match_data *, pcre2_match_context *, PCRE2_SPTR, \
PCRE2_SIZE, PCRE2_UCHAR *, PCRE2_SIZE *);
/* Functions for converting pattern source strings. */
#define PCRE2_CONVERT_FUNCTIONS \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_pattern_convert(PCRE2_SPTR, PCRE2_SIZE, uint32_t, PCRE2_UCHAR **, \
PCRE2_SIZE *, pcre2_convert_context *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_converted_pattern_free(PCRE2_UCHAR *);
/* Functions for JIT processing */
#define PCRE2_JIT_FUNCTIONS \
PCRE2_EXP_DECL int pcre2_jit_compile(pcre2_code *, uint32_t); \
PCRE2_EXP_DECL int pcre2_jit_match(const pcre2_code *, \
PCRE2_SPTR, PCRE2_SIZE, PCRE2_SIZE, uint32_t, \
pcre2_match_data *, pcre2_match_context *); \
PCRE2_EXP_DECL void pcre2_jit_free_unused_memory(pcre2_general_context *); \
PCRE2_EXP_DECL \
pcre2_jit_stack *pcre2_jit_stack_create(PCRE2_SIZE, PCRE2_SIZE, \
pcre2_general_context *); \
PCRE2_EXP_DECL void pcre2_jit_stack_assign(pcre2_match_context *, \
pcre2_jit_callback, void *); \
PCRE2_EXP_DECL void pcre2_jit_stack_free(pcre2_jit_stack *);
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_jit_compile(pcre2_code *, uint32_t); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_jit_match(const pcre2_code *, PCRE2_SPTR, PCRE2_SIZE, PCRE2_SIZE, \
uint32_t, pcre2_match_data *, pcre2_match_context *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_jit_free_unused_memory(pcre2_general_context *); \
PCRE2_EXP_DECL pcre2_jit_stack PCRE2_CALL_CONVENTION \
*pcre2_jit_stack_create(PCRE2_SIZE, PCRE2_SIZE, pcre2_general_context *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_jit_stack_assign(pcre2_match_context *, pcre2_jit_callback, void *); \
PCRE2_EXP_DECL void PCRE2_CALL_CONVENTION \
pcre2_jit_stack_free(pcre2_jit_stack *);
/* Other miscellaneous functions. */
#define PCRE2_OTHER_FUNCTIONS \
PCRE2_EXP_DECL int pcre2_get_error_message(int, PCRE2_UCHAR *, PCRE2_SIZE); \
PCRE2_EXP_DECL \
const uint8_t *pcre2_maketables(pcre2_general_context *); \
PCRE2_EXP_DECL int PCRE2_CALL_CONVENTION \
pcre2_get_error_message(int, PCRE2_UCHAR *, PCRE2_SIZE); \
PCRE2_EXP_DECL const uint8_t PCRE2_CALL_CONVENTION \
*pcre2_maketables(pcre2_general_context *); \
/* Define macros that generate width-specific names from generic versions. The
@ -576,6 +673,7 @@ pcre2_compile are called by application code. */
#define pcre2_real_code PCRE2_SUFFIX(pcre2_real_code_)
#define pcre2_real_general_context PCRE2_SUFFIX(pcre2_real_general_context_)
#define pcre2_real_compile_context PCRE2_SUFFIX(pcre2_real_compile_context_)
#define pcre2_real_convert_context PCRE2_SUFFIX(pcre2_real_convert_context_)
#define pcre2_real_match_context PCRE2_SUFFIX(pcre2_real_match_context_)
#define pcre2_real_jit_stack PCRE2_SUFFIX(pcre2_real_jit_stack_)
#define pcre2_real_match_data PCRE2_SUFFIX(pcre2_real_match_data_)
@ -587,6 +685,7 @@ pcre2_compile are called by application code. */
#define pcre2_callout_enumerate_block PCRE2_SUFFIX(pcre2_callout_enumerate_block_)
#define pcre2_general_context PCRE2_SUFFIX(pcre2_general_context_)
#define pcre2_compile_context PCRE2_SUFFIX(pcre2_compile_context_)
#define pcre2_convert_context PCRE2_SUFFIX(pcre2_convert_context_)
#define pcre2_match_context PCRE2_SUFFIX(pcre2_match_context_)
#define pcre2_match_data PCRE2_SUFFIX(pcre2_match_data_)
@ -595,12 +694,17 @@ pcre2_compile are called by application code. */
#define pcre2_callout_enumerate PCRE2_SUFFIX(pcre2_callout_enumerate_)
#define pcre2_code_copy PCRE2_SUFFIX(pcre2_code_copy_)
#define pcre2_code_copy_with_tables PCRE2_SUFFIX(pcre2_code_copy_with_tables_)
#define pcre2_code_free PCRE2_SUFFIX(pcre2_code_free_)
#define pcre2_compile PCRE2_SUFFIX(pcre2_compile_)
#define pcre2_compile_context_copy PCRE2_SUFFIX(pcre2_compile_context_copy_)
#define pcre2_compile_context_create PCRE2_SUFFIX(pcre2_compile_context_create_)
#define pcre2_compile_context_free PCRE2_SUFFIX(pcre2_compile_context_free_)
#define pcre2_config PCRE2_SUFFIX(pcre2_config_)
#define pcre2_convert_context_copy PCRE2_SUFFIX(pcre2_convert_context_copy_)
#define pcre2_convert_context_create PCRE2_SUFFIX(pcre2_convert_context_create_)
#define pcre2_convert_context_free PCRE2_SUFFIX(pcre2_convert_context_free_)
#define pcre2_converted_pattern_free PCRE2_SUFFIX(pcre2_converted_pattern_free_)
#define pcre2_dfa_match PCRE2_SUFFIX(pcre2_dfa_match_)
#define pcre2_general_context_copy PCRE2_SUFFIX(pcre2_general_context_copy_)
#define pcre2_general_context_create PCRE2_SUFFIX(pcre2_general_context_create_)
@ -624,6 +728,7 @@ pcre2_compile are called by application code. */
#define pcre2_match_data_create PCRE2_SUFFIX(pcre2_match_data_create_)
#define pcre2_match_data_create_from_pattern PCRE2_SUFFIX(pcre2_match_data_create_from_pattern_)
#define pcre2_match_data_free PCRE2_SUFFIX(pcre2_match_data_free_)
#define pcre2_pattern_convert PCRE2_SUFFIX(pcre2_pattern_convert_)
#define pcre2_pattern_info PCRE2_SUFFIX(pcre2_pattern_info_)
#define pcre2_serialize_decode PCRE2_SUFFIX(pcre2_serialize_decode_)
#define pcre2_serialize_encode PCRE2_SUFFIX(pcre2_serialize_encode_)
@ -632,14 +737,17 @@ pcre2_compile are called by application code. */
#define pcre2_set_bsr PCRE2_SUFFIX(pcre2_set_bsr_)
#define pcre2_set_callout PCRE2_SUFFIX(pcre2_set_callout_)
#define pcre2_set_character_tables PCRE2_SUFFIX(pcre2_set_character_tables_)
#define pcre2_set_compile_extra_options PCRE2_SUFFIX(pcre2_set_compile_extra_options_)
#define pcre2_set_compile_recursion_guard PCRE2_SUFFIX(pcre2_set_compile_recursion_guard_)
#define pcre2_set_depth_limit PCRE2_SUFFIX(pcre2_set_depth_limit_)
#define pcre2_set_glob_escape PCRE2_SUFFIX(pcre2_set_glob_escape_)
#define pcre2_set_glob_separator PCRE2_SUFFIX(pcre2_set_glob_separator_)
#define pcre2_set_heap_limit PCRE2_SUFFIX(pcre2_set_heap_limit_)
#define pcre2_set_match_limit PCRE2_SUFFIX(pcre2_set_match_limit_)
#define pcre2_set_max_pattern_length PCRE2_SUFFIX(pcre2_set_max_pattern_length_)
#define pcre2_set_newline PCRE2_SUFFIX(pcre2_set_newline_)
#define pcre2_set_parens_nest_limit PCRE2_SUFFIX(pcre2_set_parens_nest_limit_)
#define pcre2_set_offset_limit PCRE2_SUFFIX(pcre2_set_offset_limit_)
#define pcre2_set_recursion_limit PCRE2_SUFFIX(pcre2_set_recursion_limit_)
#define pcre2_set_recursion_memory_management PCRE2_SUFFIX(pcre2_set_recursion_memory_management_)
#define pcre2_substitute PCRE2_SUFFIX(pcre2_substitute_)
#define pcre2_substring_copy_byname PCRE2_SUFFIX(pcre2_substring_copy_byname_)
#define pcre2_substring_copy_bynumber PCRE2_SUFFIX(pcre2_substring_copy_bynumber_)
@ -653,6 +761,11 @@ pcre2_compile are called by application code. */
#define pcre2_substring_nametable_scan PCRE2_SUFFIX(pcre2_substring_nametable_scan_)
#define pcre2_substring_number_from_name PCRE2_SUFFIX(pcre2_substring_number_from_name_)
/* Keep this old function name for backwards compatibility */
#define pcre2_set_recursion_limit PCRE2_SUFFIX(pcre2_set_recursion_limit_)
/* Keep this obsolete function for backwards compatibility: it is now a noop. */
#define pcre2_set_recursion_memory_management PCRE2_SUFFIX(pcre2_set_recursion_memory_management_)
/* Now generate all three sets of width-specific structures and function
prototypes. */
@ -663,6 +776,8 @@ PCRE2_STRUCTURE_LIST \
PCRE2_GENERAL_INFO_FUNCTIONS \
PCRE2_GENERAL_CONTEXT_FUNCTIONS \
PCRE2_COMPILE_CONTEXT_FUNCTIONS \
PCRE2_CONVERT_CONTEXT_FUNCTIONS \
PCRE2_CONVERT_FUNCTIONS \
PCRE2_MATCH_CONTEXT_FUNCTIONS \
PCRE2_COMPILE_FUNCTIONS \
PCRE2_PATTERN_INFO_FUNCTIONS \
@ -692,6 +807,7 @@ PCRE2_TYPES_STRUCTURES_AND_FUNCTIONS
#undef PCRE2_GENERAL_INFO_FUNCTIONS
#undef PCRE2_GENERAL_CONTEXT_FUNCTIONS
#undef PCRE2_COMPILE_CONTEXT_FUNCTIONS
#undef PCRE2_CONVERT_CONTEXT_FUNCTIONS
#undef PCRE2_MATCH_CONTEXT_FUNCTIONS
#undef PCRE2_COMPILE_FUNCTIONS
#undef PCRE2_PATTERN_INFO_FUNCTIONS
@ -729,4 +845,6 @@ PCRE2_SUFFIX a no-op. Otherwise, generate an error. */
} /* extern "C" */
#endif
#endif /* End of pcre2.h */
#endif /* PCRE2_H_IDEMPOTENT_GUARD */
/* End of pcre2.h */

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -588,7 +588,7 @@ for(;;)
case OP_ASSERTBACK:
case OP_ASSERTBACK_NOT:
case OP_ONCE:
case OP_ONCE_NC:
/* Atomic sub-patterns and assertions can always auto-possessify their
last iterator. However, if the group was entered as a result of checking
a previous iterator, this is not possible. */
@ -600,12 +600,14 @@ for(;;)
continue;
case OP_ONCE:
case OP_ONCE_NC:
case OP_BRA:
case OP_CBRA:
next_code = code + GET(code, 1);
code += PRIV(OP_lengths)[c];
/* Check each branch. We have to recurse a level for all but the last
branch. */
while (*next_code == OP_ALT)
{
if (!compare_opcodes(code, utf, cb, base_list, base_end, rec_limit))
@ -621,8 +623,8 @@ for(;;)
case OP_BRAMINZERO:
next_code = code + 1;
if (*next_code != OP_BRA && *next_code != OP_CBRA
&& *next_code != OP_ONCE && *next_code != OP_ONCE_NC) return FALSE;
if (*next_code != OP_BRA && *next_code != OP_CBRA &&
*next_code != OP_ONCE) return FALSE;
do next_code += GET(next_code, 1); while (*next_code == OP_ALT);
@ -1046,8 +1048,10 @@ but some compilers complain about an unreachable statement. */
/* Replaces single character iterations with their possessive alternatives
if appropriate. This function modifies the compiled opcode! Hitting a
non-existant opcode may indicate a bug in PCRE2, but it can also be caused if a
bad UTF string was compiled with PCRE2_NO_UTF_CHECK.
non-existent opcode may indicate a bug in PCRE2, but it can also be caused if a
bad UTF string was compiled with PCRE2_NO_UTF_CHECK. The rec_limit catches
overly complicated or large patterns. In these cases, the check just stops,
leaving the remainder of the pattern unpossessified.
Arguments:
code points to start of the byte code
@ -1061,17 +1065,17 @@ Returns: 0 for success
int
PRIV(auto_possessify)(PCRE2_UCHAR *code, BOOL utf, const compile_block *cb)
{
register PCRE2_UCHAR c;
PCRE2_UCHAR c;
PCRE2_SPTR end;
PCRE2_UCHAR *repeat_opcode;
uint32_t list[8];
int rec_limit;
int rec_limit = 1000; /* Was 10,000 but clang+ASAN uses a lot of stack. */
for (;;)
{
c = *code;
if (c > OP_TABLE_LENGTH) return -1; /* Something gone wrong */
if (c >= OP_TABLE_LENGTH) return -1; /* Something gone wrong */
if (c >= OP_STAR && c <= OP_TYPEPOSUPTO)
{
@ -1080,7 +1084,6 @@ for (;;)
get_chr_property_list(code, utf, cb->fcc, list) : NULL;
list[1] = c == OP_STAR || c == OP_PLUS || c == OP_QUERY || c == OP_UPTO;
rec_limit = 1000;
if (end != NULL && compare_opcodes(end, utf, cb, list, end, &rec_limit))
{
switch(c)
@ -1137,7 +1140,6 @@ for (;;)
list[1] = (c & 1) == 0;
rec_limit = 1000;
if (compare_opcodes(end, utf, cb, list, end, &rec_limit))
{
switch (c)

File diff suppressed because it is too large Load Diff

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -84,13 +84,14 @@ if (where == NULL) /* Requests a length */
return PCRE2_ERROR_BADOPTION;
case PCRE2_CONFIG_BSR:
case PCRE2_CONFIG_HEAPLIMIT:
case PCRE2_CONFIG_JIT:
case PCRE2_CONFIG_LINKSIZE:
case PCRE2_CONFIG_MATCHLIMIT:
case PCRE2_CONFIG_DEPTHLIMIT:
case PCRE2_CONFIG_NEWLINE:
case PCRE2_CONFIG_PARENSLIMIT:
case PCRE2_CONFIG_RECURSIONLIMIT:
case PCRE2_CONFIG_STACKRECURSE:
case PCRE2_CONFIG_STACKRECURSE: /* Obsolete */
case PCRE2_CONFIG_UNICODE:
return sizeof(uint32_t);
@ -116,6 +117,10 @@ switch (what)
#endif
break;
case PCRE2_CONFIG_HEAPLIMIT:
*((uint32_t *)where) = HEAP_LIMIT;
break;
case PCRE2_CONFIG_JIT:
#ifdef SUPPORT_JIT
*((uint32_t *)where) = 1;
@ -143,6 +148,10 @@ switch (what)
*((uint32_t *)where) = MATCH_LIMIT;
break;
case PCRE2_CONFIG_DEPTHLIMIT:
*((uint32_t *)where) = MATCH_LIMIT_DEPTH;
break;
case PCRE2_CONFIG_NEWLINE:
*((uint32_t *)where) = NEWLINE_DEFAULT;
break;
@ -151,16 +160,11 @@ switch (what)
*((uint32_t *)where) = PARENS_NEST_LIMIT;
break;
case PCRE2_CONFIG_RECURSIONLIMIT:
*((uint32_t *)where) = MATCH_LIMIT_RECURSION;
break;
/* This is now obsolete. The stack is no longer used via recursion for
handling backtracking in pcre2_match(). */
case PCRE2_CONFIG_STACKRECURSE:
#ifdef HEAP_MATCH_RECURSE
*((uint32_t *)where) = 0;
#else
*((uint32_t *)where) = 1;
#endif
break;
case PCRE2_CONFIG_UNICODE_VERSION:

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -138,7 +138,8 @@ const pcre2_compile_context PRIV(default_compile_context) = {
PCRE2_UNSET, /* Max pattern length */
BSR_DEFAULT, /* Backslash R default */
NEWLINE_DEFAULT, /* Newline convention */
PARENS_NEST_LIMIT }; /* As it says */
PARENS_NEST_LIMIT, /* As it says */
0 }; /* Extra options */
/* The create function copies the default into the new memory, but must
override the default memory handling functions if a gcontext was provided. */
@ -161,9 +162,6 @@ when no context is supplied to a match function. */
const pcre2_match_context PRIV(default_match_context) = {
{ default_malloc, default_free, NULL },
#ifdef HEAP_MATCH_RECURSE
{ default_malloc, default_free, NULL },
#endif
#ifdef SUPPORT_JIT
NULL,
NULL,
@ -171,8 +169,9 @@ const pcre2_match_context PRIV(default_match_context) = {
NULL,
NULL,
PCRE2_UNSET, /* Offset limit */
HEAP_LIMIT,
MATCH_LIMIT,
MATCH_LIMIT_RECURSION };
MATCH_LIMIT_DEPTH };
/* The create function copies the default into the new memory, but must
override the default memory handling functions if a gcontext was provided. */
@ -190,6 +189,36 @@ return mcontext;
}
/* A default convert context is set up to save having to initialize at run time
when no context is supplied to the convert function. */
const pcre2_convert_context PRIV(default_convert_context) = {
{ default_malloc, default_free, NULL }, /* Default memory handling */
#ifdef _WIN32
CHAR_BACKSLASH, /* Default path separator */
CHAR_GRAVE_ACCENT /* Default escape character */
#else /* Not Windows */
CHAR_SLASH, /* Default path separator */
CHAR_BACKSLASH /* Default escape character */
#endif
};
/* The create function copies the default into the new memory, but must
override the default memory handling functions if a gcontext was provided. */
PCRE2_EXP_DEFN pcre2_convert_context * PCRE2_CALL_CONVENTION
pcre2_convert_context_create(pcre2_general_context *gcontext)
{
pcre2_convert_context *ccontext = PRIV(memctl_malloc)(
sizeof(pcre2_real_convert_context), (pcre2_memctl *)gcontext);
if (ccontext == NULL) return NULL;
*ccontext = PRIV(default_convert_context);
if (gcontext != NULL)
*((pcre2_memctl *)ccontext) = *((pcre2_memctl *)gcontext);
return ccontext;
}
/*************************************************
* Context copy functions *
*************************************************/
@ -231,11 +260,22 @@ return new;
PCRE2_EXP_DEFN pcre2_convert_context * PCRE2_CALL_CONVENTION
pcre2_convert_context_copy(pcre2_convert_context *ccontext)
{
pcre2_convert_context *new =
ccontext->memctl.malloc(sizeof(pcre2_real_convert_context),
ccontext->memctl.memory_data);
if (new == NULL) return NULL;
memcpy(new, ccontext, sizeof(pcre2_real_convert_context));
return new;
}
/*************************************************
* Context free functions *
*************************************************/
PCRE2_EXP_DEFN void PCRE2_CALL_CONVENTION
pcre2_general_context_free(pcre2_general_context *gcontext)
{
@ -260,6 +300,12 @@ if (mcontext != NULL)
}
PCRE2_EXP_DEFN void PCRE2_CALL_CONVENTION
pcre2_convert_context_free(pcre2_convert_context *ccontext)
{
if (ccontext != NULL)
ccontext->memctl.free(ccontext, ccontext->memctl.memory_data);
}
/*************************************************
@ -271,7 +317,7 @@ data is given. Only some of the functions are able to test the validity of the
data. */
/* ------------ Compile contexts ------------ */
/* ------------ Compile context ------------ */
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_character_tables(pcre2_compile_context *ccontext,
@ -313,6 +359,7 @@ switch(newline)
case PCRE2_NEWLINE_CRLF:
case PCRE2_NEWLINE_ANY:
case PCRE2_NEWLINE_ANYCRLF:
case PCRE2_NEWLINE_NUL:
ccontext->newline_convention = newline;
return 0;
@ -328,6 +375,13 @@ ccontext->parens_nest_limit = limit;
return 0;
}
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_compile_extra_options(pcre2_compile_context *ccontext, uint32_t options)
{
ccontext->extra_options = options;
return 0;
}
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_compile_recursion_guard(pcre2_compile_context *ccontext,
int (*guard)(uint32_t, void *), void *user_data)
@ -338,7 +392,7 @@ return 0;
}
/* ------------ Match contexts ------------ */
/* ------------ Match context ------------ */
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_callout(pcre2_match_context *mcontext,
@ -349,6 +403,13 @@ mcontext->callout_data = callout_data;
return 0;
}
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_heap_limit(pcre2_match_context *mcontext, uint32_t limit)
{
mcontext->heap_limit = limit;
return 0;
}
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_match_limit(pcre2_match_context *mcontext, uint32_t limit)
{
@ -356,6 +417,13 @@ mcontext->match_limit = limit;
return 0;
}
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_depth_limit(pcre2_match_context *mcontext, uint32_t limit)
{
mcontext->depth_limit = limit;
return 0;
}
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_offset_limit(pcre2_match_context *mcontext, PCRE2_SIZE limit)
{
@ -363,11 +431,13 @@ mcontext->offset_limit = limit;
return 0;
}
/* This function became obsolete at release 10.30. It is kept as a synonym for
backwards compatibility. */
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_recursion_limit(pcre2_match_context *mcontext, uint32_t limit)
{
mcontext->recursion_limit = limit;
return 0;
return pcre2_set_depth_limit(mcontext, limit);
}
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
@ -375,17 +445,32 @@ pcre2_set_recursion_memory_management(pcre2_match_context *mcontext,
void *(*mymalloc)(size_t, void *), void (*myfree)(void *, void *),
void *mydata)
{
#ifdef HEAP_MATCH_RECURSE
mcontext->stack_memctl.malloc = mymalloc;
mcontext->stack_memctl.free = myfree;
mcontext->stack_memctl.memory_data = mydata;
#else
(void)mcontext;
(void)mymalloc;
(void)myfree;
(void)mydata;
#endif
return 0;
}
/* ------------ Convert context ------------ */
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_glob_separator(pcre2_convert_context *ccontext, uint32_t separator)
{
if (separator != CHAR_SLASH && separator != CHAR_BACKSLASH &&
separator != CHAR_DOT) return PCRE2_ERROR_BADDATA;
ccontext->glob_separator = separator;
return 0;
}
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_set_glob_escape(pcre2_convert_context *ccontext, uint32_t escape)
{
if (escape > 255 || (escape != 0 && !ispunct(escape)))
return PCRE2_ERROR_BADDATA;
ccontext->glob_escape = escape;
return 0;
}
/* End of pcre2_context.c */

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -83,7 +83,7 @@ in others, so I abandoned this code. */
#include "pcre2_internal.h"
#define PUBLIC_DFA_MATCH_OPTIONS \
(PCRE2_ANCHORED|PCRE2_NOTBOL|PCRE2_NOTEOL|PCRE2_NOTEMPTY| \
(PCRE2_ANCHORED|PCRE2_ENDANCHORED|PCRE2_NOTBOL|PCRE2_NOTEOL|PCRE2_NOTEMPTY| \
PCRE2_NOTEMPTY_ATSTART|PCRE2_NO_UTF_CHECK|PCRE2_PARTIAL_HARD| \
PCRE2_PARTIAL_SOFT|PCRE2_DFA_SHORTEST|PCRE2_DFA_RESTART)
@ -172,7 +172,7 @@ static const uint8_t coptable[] = {
0, /* Assert not */
0, /* Assert behind */
0, /* Assert behind not */
0, 0, /* ONCE, ONCE_NC */
0, /* ONCE */
0, 0, 0, 0, 0, /* BRA, BRAPOS, CBRA, CBRAPOS, COND */
0, 0, 0, 0, 0, /* SBRA, SBRAPOS, SCBRA, SCBRAPOS, SCOND */
0, 0, /* CREF, DNCREF */
@ -245,7 +245,7 @@ static const uint8_t poptable[] = {
0, /* Assert not */
0, /* Assert behind */
0, /* Assert behind not */
0, 0, /* ONCE, ONCE_NC */
0, /* ONCE */
0, 0, 0, 0, 0, /* BRA, BRAPOS, CBRA, CBRAPOS, COND */
0, 0, 0, 0, 0, /* SBRA, SBRAPOS, SCBRA, SCBRAPOS, SCOND */
0, 0, /* CREF, DNCREF */
@ -371,18 +371,14 @@ internal_dfa_match(
uint32_t offsetcount,
int *workspace,
int wscount,
int rlevel)
uint32_t rlevel)
{
stateblock *active_states, *new_states, *temp_states;
stateblock *next_active_state, *next_new_state;
const uint8_t *ctypes, *lcc, *fcc;
PCRE2_SPTR ptr;
PCRE2_SPTR end_code;
PCRE2_SPTR first_op;
dfa_recursion_info new_recursive;
int active_count, new_count, match_count;
/* Some fields in the mb block are frequently referenced, so we load them into
@ -400,7 +396,8 @@ BOOL utf = FALSE;
BOOL reset_could_continue = FALSE;
rlevel++;
if (mb->match_call_count++ >= mb->match_limit) return PCRE2_ERROR_MATCHLIMIT;
if (rlevel++ > mb->match_limit_depth) return PCRE2_ERROR_DEPTHLIMIT;
offsetcount &= (uint32_t)(-2); /* Round down */
wscount -= 2;
@ -417,21 +414,15 @@ active_states = (stateblock *)(workspace + 2);
next_new_state = new_states = active_states + wscount;
new_count = 0;
first_op = this_start_code + 1 + LINK_SIZE +
((*this_start_code == OP_CBRA || *this_start_code == OP_SCBRA ||
*this_start_code == OP_CBRAPOS || *this_start_code == OP_SCBRAPOS)
? IMM2_SIZE:0);
/* The first thing in any (sub) pattern is a bracket of some sort. Push all
the alternative states onto the list, and find out where the end is. This
makes is possible to use this function recursively, when we want to stop at a
matching internal ket rather than at the end.
If the first opcode in the first alternative is OP_REVERSE, we are dealing with
a backward assertion. In that case, we have to find out the maximum amount to
move back, and set up each alternative appropriately. */
If we are dealing with a backward assertion we have to find out the maximum
amount to move back, and set up each alternative appropriately. */
if (*first_op == OP_REVERSE)
if (*this_start_code == OP_ASSERTBACK || *this_start_code == OP_ASSERTBACK_NOT)
{
size_t max_back = 0;
size_t gone_back;
@ -476,15 +467,17 @@ if (*first_op == OP_REVERSE)
if (current_subject < mb->start_used_ptr)
mb->start_used_ptr = current_subject;
/* Now we can process the individual branches. */
/* Now we can process the individual branches. There will be an OP_REVERSE at
the start of each branch, except when the length of the branch is zero. */
end_code = this_start_code;
do
{
size_t back = (size_t)GET(end_code, 2+LINK_SIZE);
uint32_t revlen = (end_code[1+LINK_SIZE] == OP_REVERSE)? 1 + LINK_SIZE : 0;
size_t back = (revlen == 0)? 0 : (size_t)GET(end_code, 2+LINK_SIZE);
if (back <= gone_back)
{
int bstate = (int)(end_code - start_code + 2 + 2*LINK_SIZE);
int bstate = (int)(end_code - start_code + 1 + LINK_SIZE + revlen);
ADD_NEW_DATA(-bstate, 0, (int)(gone_back - back));
}
end_code += GET(end_code, 1);
@ -697,7 +690,7 @@ for (;;)
case OP_TABLE_LENGTH +
((sizeof(coptable) == OP_TABLE_LENGTH) &&
(sizeof(poptable) == OP_TABLE_LENGTH)):
break;
return 0;
/* ========================================================================== */
/* Reached a closing bracket. If not at the end of the pattern, carry
@ -1386,8 +1379,46 @@ for (;;)
if (!utf) d = *nptr; else { GETCHARLEN(d, nptr, dlen); }
rgb = UCD_GRAPHBREAK(d);
if ((PRIV(ucp_gbtable)[lgb] & (1u << rgb)) == 0) break;
ncount++;
/* Not breaking between Regional Indicators is allowed only if
there are an even number of preceding RIs. */
if (lgb == ucp_gbRegionalIndicator &&
rgb == ucp_gbRegionalIndicator)
{
int ricount = 0;
PCRE2_SPTR bptr = nptr - 1;
#ifdef SUPPORT_UNICODE
if (utf) BACKCHAR(bptr);
#endif
/* bptr is pointing to the left-hand character */
while (bptr > mb->start_subject)
{
bptr--;
#ifdef SUPPORT_UNICODE
if (utf)
{
BACKCHAR(bptr);
GETCHAR(d, bptr);
}
else
#endif
d = *bptr;
if (UCD_GRAPHBREAK(d) != ucp_gbRegionalIndicator) break;
ricount++;
}
if ((ricount & 1) != 0) break; /* Grapheme break required */
}
/* If Extend follows E_Base[_GAZ] do not update lgb; this allows
any number of Extend before a following E_Modifier. */
if (rgb != ucp_gbExtend ||
(lgb != ucp_gbE_Base && lgb != ucp_gbE_Base_GAZ))
lgb = rgb;
ncount++;
nptr += dlen;
}
count++;
@ -1648,8 +1679,46 @@ for (;;)
if (!utf) d = *nptr; else { GETCHARLEN(d, nptr, dlen); }
rgb = UCD_GRAPHBREAK(d);
if ((PRIV(ucp_gbtable)[lgb] & (1u << rgb)) == 0) break;
ncount++;
/* Not breaking between Regional Indicators is allowed only if
there are an even number of preceding RIs. */
if (lgb == ucp_gbRegionalIndicator &&
rgb == ucp_gbRegionalIndicator)
{
int ricount = 0;
PCRE2_SPTR bptr = nptr - 1;
#ifdef SUPPORT_UNICODE
if (utf) BACKCHAR(bptr);
#endif
/* bptr is pointing to the left-hand character */
while (bptr > mb->start_subject)
{
bptr--;
#ifdef SUPPORT_UNICODE
if (utf)
{
BACKCHAR(bptr);
GETCHAR(d, bptr);
}
else
#endif
d = *bptr;
if (UCD_GRAPHBREAK(d) != ucp_gbRegionalIndicator) break;
ricount++;
}
if ((ricount & 1) != 0) break; /* Grapheme break required */
}
/* If Extend follows E_Base[_GAZ] do not update lgb; this allows
any number of Extend before a following E_Modifier. */
if (rgb != ucp_gbExtend ||
(lgb != ucp_gbE_Base && lgb != ucp_gbE_Base_GAZ))
lgb = rgb;
ncount++;
nptr += dlen;
}
ADD_NEW_DATA(-(state_offset + count), 0, ncount);
@ -1919,8 +1988,46 @@ for (;;)
if (!utf) d = *nptr; else { GETCHARLEN(d, nptr, dlen); }
rgb = UCD_GRAPHBREAK(d);
if ((PRIV(ucp_gbtable)[lgb] & (1u << rgb)) == 0) break;
ncount++;
/* Not breaking between Regional Indicators is allowed only if
there are an even number of preceding RIs. */
if (lgb == ucp_gbRegionalIndicator &&
rgb == ucp_gbRegionalIndicator)
{
int ricount = 0;
PCRE2_SPTR bptr = nptr - 1;
#ifdef SUPPORT_UNICODE
if (utf) BACKCHAR(bptr);
#endif
/* bptr is pointing to the left-hand character */
while (bptr > mb->start_subject)
{
bptr--;
#ifdef SUPPORT_UNICODE
if (utf)
{
BACKCHAR(bptr);
GETCHAR(d, bptr);
}
else
#endif
d = *bptr;
if (UCD_GRAPHBREAK(d) != ucp_gbRegionalIndicator) break;
ricount++;
}
if ((ricount & 1) != 0) break; /* Grapheme break required */
}
/* If Extend follows E_Base[_GAZ] do not update lgb; this allows
any number of Extend before a following E_Modifier. */
if (rgb != ucp_gbExtend ||
(lgb != ucp_gbE_Base && lgb != ucp_gbE_Base_GAZ))
lgb = rgb;
ncount++;
nptr += dlen;
}
if (nptr >= end_subject && (mb->moptions & PCRE2_PARTIAL_HARD) != 0)
@ -2109,8 +2216,46 @@ for (;;)
if (!utf) d = *nptr; else { GETCHARLEN(d, nptr, dlen); }
rgb = UCD_GRAPHBREAK(d);
if ((PRIV(ucp_gbtable)[lgb] & (1u << rgb)) == 0) break;
ncount++;
/* Not breaking between Regional Indicators is allowed only if
there are an even number of preceding RIs. */
if (lgb == ucp_gbRegionalIndicator &&
rgb == ucp_gbRegionalIndicator)
{
int ricount = 0;
PCRE2_SPTR bptr = nptr - 1;
#ifdef SUPPORT_UNICODE
if (utf) BACKCHAR(bptr);
#endif
/* bptr is pointing to the left-hand character */
while (bptr > mb->start_subject)
{
bptr--;
#ifdef SUPPORT_UNICODE
if (utf)
{
BACKCHAR(bptr);
GETCHAR(d, bptr);
}
else
#endif
d = *bptr;
if (UCD_GRAPHBREAK(d) != ucp_gbRegionalIndicator) break;
ricount++;
}
if ((ricount & 1) != 0) break; /* Grapheme break required */
}
/* If Extend follows E_Base[_GAZ] do not update lgb; this allows
any number of Extend before a following E_Modifier. */
if (rgb != ucp_gbExtend ||
(lgb != ucp_gbE_Base && lgb != ucp_gbE_Base_GAZ))
lgb = rgb;
ncount++;
nptr += dlen;
}
if (nptr >= end_subject && (mb->moptions & PCRE2_PARTIAL_HARD) != 0)
@ -2136,6 +2281,7 @@ for (;;)
case 0x2029:
#endif /* Not EBCDIC */
if (mb->bsr_convention == PCRE2_BSR_ANYCRLF) break;
/* Fall through */
case CHAR_LF:
ADD_NEW(state_offset + 1, 0);
@ -2539,11 +2685,13 @@ for (;;)
if (isinclass)
{
int max = (int)GET2(ecode, 1 + IMM2_SIZE);
if (*ecode == OP_CRPOSRANGE)
if (*ecode == OP_CRPOSRANGE && count >= (int)GET2(ecode, 1))
{
active_count--; /* Remove non-match possibility */
next_active_state--;
}
if (++count >= max && max != 0) /* Max 0 => no limit */
{ ADD_NEW(next_state_offset + 1 + 2 * IMM2_SIZE, 0); }
else
@ -2591,7 +2739,7 @@ for (;;)
sizeof(local_workspace)/sizeof(int), /* size of same */
rlevel); /* function recursion level */
if (rc == PCRE2_ERROR_DFA_UITEM) return rc;
if (rc < 0 && rc != PCRE2_ERROR_NOMATCH) return rc;
if ((rc >= 0) == (codevalue == OP_ASSERT || codevalue == OP_ASSERTBACK))
{ ADD_ACTIVE((int)(endasscode + LINK_SIZE + 1 - start_code), 0); }
}
@ -2710,7 +2858,7 @@ for (;;)
sizeof(local_workspace)/sizeof(int), /* size of same */
rlevel); /* function recursion level */
if (rc == PCRE2_ERROR_DFA_UITEM) return rc;
if (rc < 0 && rc != PCRE2_ERROR_NOMATCH) return rc;
if ((rc >= 0) ==
(condcode == OP_ASSERT || condcode == OP_ASSERTBACK))
{ ADD_ACTIVE((int)(endasscode + LINK_SIZE + 1 - start_code), 0); }
@ -2889,7 +3037,6 @@ for (;;)
/*-----------------------------------------------------------------*/
case OP_ONCE:
case OP_ONCE_NC:
{
PCRE2_SIZE local_offsets[2];
int local_workspace[1000];
@ -3069,7 +3216,7 @@ for (;;)
)
)
match_count = PCRE2_ERROR_PARTIAL;
break; /* In effect, "return", but see the comment below */
break; /* Exit from loop along the subject string */
}
/* One or more states are active for the next character. */
@ -3077,11 +3224,13 @@ for (;;)
ptr += clen; /* Advance to next subject character */
} /* Loop to move along the subject string */
/* Control gets here from "break" a few lines above. We do it this way because
if we use "return" above, we have compiler trouble. Some compilers warn if
there's nothing here because they think the function doesn't return a value. On
the other hand, if we put a dummy statement here, some more clever compilers
complain that it can't be reached. Sigh. */
/* Control gets here from "break" a few lines above. If we have a match and
PCRE2_ENDANCHORED is set, the match fails. */
if (match_count >= 0 &&
((mb->moptions | mb->poptions) & PCRE2_ENDANCHORED) != 0 &&
ptr < end_subject)
match_count = PCRE2_ERROR_NOMATCH;
return match_count;
}
@ -3115,7 +3264,7 @@ Returns: > 0 => number of match offset pairs placed in offsets
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_dfa_match(const pcre2_code *code, PCRE2_SPTR subject, PCRE2_SIZE length,
PCRE2_SIZE start_offset, uint32_t options, pcre2_match_data *match_data,
pcre2_match_context *mcontext, int *workspace, size_t wscount)
pcre2_match_context *mcontext, int *workspace, PCRE2_SIZE wscount)
{
const pcre2_real_code *re = (const pcre2_real_code *)code;
@ -3154,6 +3303,13 @@ if (re == NULL || subject == NULL || workspace == NULL || match_data == NULL)
if (wscount < 20) return PCRE2_ERROR_DFA_WSSIZE;
if (start_offset > length) return PCRE2_ERROR_BADOFFSET;
/* Partial matching and PCRE2_ENDANCHORED are currently not allowed at the same
time. */
if ((options & (PCRE2_PARTIAL_HARD|PCRE2_PARTIAL_SOFT)) != 0 &&
((re->overall_options | options) & PCRE2_ENDANCHORED) != 0)
return PCRE2_ERROR_BADOPTION;
/* Check that the first field in the block is the magic number. If it is not,
return with PCRE2_ERROR_BADMAGIC. */
@ -3216,6 +3372,8 @@ if (mcontext == NULL)
{
mb->callout = NULL;
mb->memctl = re->memctl;
mb->match_limit = PRIV(default_match_context).match_limit;
mb->match_limit_depth = PRIV(default_match_context).depth_limit;
}
else
{
@ -3228,8 +3386,16 @@ else
mb->callout = mcontext->callout;
mb->callout_data = mcontext->callout_data;
mb->memctl = mcontext->memctl;
mb->match_limit = mcontext->match_limit;
mb->match_limit_depth = mcontext->depth_limit;
}
if (mb->match_limit > re->limit_match)
mb->match_limit = re->limit_match;
if (mb->match_limit_depth > re->limit_depth)
mb->match_limit_depth = re->limit_depth;
mb->start_code = (PCRE2_UCHAR *)((uint8_t *)re + sizeof(pcre2_real_code)) +
re->name_count * re->name_entry_size;
mb->tables = re->tables;
@ -3238,6 +3404,7 @@ mb->end_subject = end_subject;
mb->start_offset = start_offset;
mb->moptions = options;
mb->poptions = re->overall_options;
mb->match_call_count = 0;
/* Process the \R and newline settings. */
@ -3255,6 +3422,11 @@ switch(re->newline_convention)
mb->nl[0] = CHAR_NL;
break;
case PCRE2_NEWLINE_NUL:
mb->nllen = 1;
mb->nl[0] = CHAR_NUL;
break;
case PCRE2_NEWLINE_CRLF:
mb->nllen = 2;
mb->nl[0] = CHAR_CR;
@ -3321,15 +3493,10 @@ if (utf && (options & PCRE2_NO_UTF_CHECK) == 0)
}
#endif /* SUPPORT_UNICODE */
/* Set up the first code unit to match, if available. The first_codeunit value
is never set for an anchored regular expression, but the anchoring may be
forced at run time, so we have to test for anchoring. The first code unit may
be unset for an unanchored pattern, of course. If there's no first code unit
there may be a bitmap of possible first characters. */
/* Set up the first code unit to match, if available. If there's no first code
unit there may be a bitmap of possible first characters. */
if (!anchored)
{
if ((re->flags & PCRE2_FIRSTSET) != 0)
if ((re->flags & PCRE2_FIRSTSET) != 0)
{
has_first_cu = TRUE;
first_cu = first_cu2 = (PCRE2_UCHAR)(re->first_codeunit);
@ -3342,13 +3509,11 @@ if (!anchored)
#endif
}
}
else
else
if (!startline && (re->flags & PCRE2_FIRSTMAPSET) != 0)
start_bits = re->start_bitmap;
}
/* For anchored or unanchored matches, there may be a "last known required
character" set. */
/* There may be a "last known required code unit" set. */
if ((re->flags & PCRE2_LASTSET) != 0)
{
@ -3394,8 +3559,8 @@ for (;;)
/* If firstline is TRUE, the start of the match is constrained to the first
line of a multiline string. That is, the match must be before or at the
first newline. Implement this by temporarily adjusting end_subject so that
we stop the optimization scans at a newline. If the match fails at the
newline, later code breaks this loop. */
we stop the optimization scans for a first code unit at a newline. If the
match fails at the newline, later code breaks this loop. */
if (firstline)
{
@ -3415,21 +3580,87 @@ for (;;)
end_subject = t;
}
/* Advance to a unique first code unit if there is one. */
/* Anchored: check the first code unit if one is recorded. This may seem
pointless but it can help in detecting a no match case without scanning for
the required code unit. */
if (has_first_cu)
if (anchored)
{
PCRE2_UCHAR smc;
if (first_cu != first_cu2)
while (start_match < end_subject &&
(smc = UCHAR21TEST(start_match)) != first_cu && smc != first_cu2)
start_match++;
else
while (start_match < end_subject && UCHAR21TEST(start_match) != first_cu)
start_match++;
if (has_first_cu || start_bits != NULL)
{
BOOL ok = start_match < end_subject;
if (ok)
{
PCRE2_UCHAR c = UCHAR21TEST(start_match);
ok = has_first_cu && (c == first_cu || c == first_cu2);
if (!ok && start_bits != NULL)
{
#if PCRE2_CODE_UNIT_WIDTH != 8
if (c > 255) c = 255;
#endif
ok = (start_bits[c/8] & (1 << (c&7))) != 0;
}
}
if (!ok) break;
}
}
/* Or to just after a linebreak for a multiline match */
/* Not anchored. Advance to a unique first code unit if there is one. In
8-bit mode, the use of memchr() gives a big speed up, even though we have
to call it twice in caseless mode, in order to find the earliest occurrence
of the character in either of its cases. */
else
{
if (has_first_cu)
{
if (first_cu != first_cu2) /* Caseless */
{
#if PCRE2_CODE_UNIT_WIDTH != 8
PCRE2_UCHAR smc;
while (start_match < end_subject &&
(smc = UCHAR21TEST(start_match)) != first_cu &&
smc != first_cu2)
start_match++;
#else /* 8-bit code units */
PCRE2_SPTR pp1 =
memchr(start_match, first_cu, end_subject-start_match);
PCRE2_SPTR pp2 =
memchr(start_match, first_cu2, end_subject-start_match);
if (pp1 == NULL)
start_match = (pp2 == NULL)? end_subject : pp2;
else
start_match = (pp2 == NULL || pp1 < pp2)? pp1 : pp2;
#endif
}
/* The caseful case */
else
{
#if PCRE2_CODE_UNIT_WIDTH != 8
while (start_match < end_subject && UCHAR21TEST(start_match) !=
first_cu)
start_match++;
#else
start_match = memchr(start_match, first_cu, end_subject - start_match);
if (start_match == NULL) start_match = end_subject;
#endif
}
/* If we can't find the required code unit, break the bumpalong loop,
to force a match failure, except when doing partial matching, when we
let the next cycle run at the end of the subject. To see why, consider
the pattern /(?<=abc)def/, which partially matches "abc", even though
the string does not contain the starting character "d". */
if ((mb->moptions & (PCRE2_PARTIAL_HARD|PCRE2_PARTIAL_SOFT)) == 0 &&
start_match >= end_subject)
break;
}
/* If there's no first code unit, advance to just after a linebreak for a
multiline match if required. */
else if (startline)
{
@ -3451,8 +3682,8 @@ for (;;)
start_match++;
/* If we have just passed a CR and the newline option is ANY or
ANYCRLF, and we are now at a LF, advance the match position by one more
code unit. */
ANYCRLF, and we are now at a LF, advance the match position by one
more code unit. */
if (start_match[-1] == CHAR_CR &&
(mb->nltype == NLTYPE_ANY || mb->nltype == NLTYPE_ANYCRLF) &&
@ -3462,15 +3693,16 @@ for (;;)
}
}
/* Or to a non-unique first code unit if any have been identified. The
bitmap contains only 256 bits. When code units are 16 or 32 bits wide, all
code units greater than 254 set the 255 bit. */
/* If there's no first code unit or a requirement for a multiline line
start, advance to a non-unique first code unit if any have been
identified. The bitmap contains only 256 bits. When code units are 16 or
32 bits wide, all code units greater than 254 set the 255 bit. */
else if (start_bits != NULL)
{
while (start_match < end_subject)
{
register uint32_t c = UCHAR21TEST(start_match);
uint32_t c = UCHAR21TEST(start_match);
#if PCRE2_CODE_UNIT_WIDTH != 8
if (c > 255) c = 255;
#endif
@ -3478,6 +3710,7 @@ for (;;)
start_match++;
}
}
} /* End of first code unit handling */
/* Restore fudged end_subject */
@ -3510,7 +3743,7 @@ for (;;)
if (has_req_cu && end_subject - start_match < REQ_CU_MAX)
{
register PCRE2_SPTR p = start_match + (has_first_cu? 1:0);
PCRE2_SPTR p = start_match + (has_first_cu? 1:0);
/* We don't need to repeat the search if we haven't yet reached the
place we found it at last time. */
@ -3521,7 +3754,7 @@ for (;;)
{
while (p < end_subject)
{
register uint32_t pp = UCHAR21INCTEST(p);
uint32_t pp = UCHAR21INCTEST(p);
if (pp == req_cu || pp == req_cu2) { p--; break; }
}
}

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -91,13 +91,13 @@ static const unsigned char compile_error_texts[] =
"failed to allocate heap memory\0"
"unmatched closing parenthesis\0"
"internal error: code overflow\0"
"letter or underscore expected after (?< or (?'\0"
"missing closing parenthesis for condition\0"
/* 25 */
"lookbehind assertion is not fixed length\0"
"malformed number or name after (?(\0"
"a relative value of zero is not allowed\0"
"conditional group contains more than two branches\0"
"assertion expected after (?( or (?(?C)\0"
"(?R or (?[+-]digits must be followed by )\0"
"digit expected after (?+ or (?-\0"
/* 30 */
"unknown POSIX class name\0"
"internal error in pcre2_study(): should not occur\0"
@ -105,7 +105,7 @@ static const unsigned char compile_error_texts[] =
"parentheses are too deeply nested (stack check)\0"
"character code point value in \\x{} or \\o{} is too large\0"
/* 35 */
"invalid condition (?(0)\0"
"lookbehind is too complicated\0"
"\\C is not allowed in a lookbehind assertion in UTF-" XSTRING(PCRE2_CODE_UNIT_WIDTH) " mode\0"
"PCRE does not support \\L, \\l, \\N{name}, \\U, or \\u\0"
"number after (?C is greater than 255\0"
@ -132,13 +132,13 @@ static const unsigned char compile_error_texts[] =
"missing opening brace after \\o\0"
"internal error: unknown newline setting\0"
"\\g is not followed by a braced, angle-bracketed, or quoted name/number or by a plain number\0"
"a numbered reference must not be zero\0"
"(?R (recursive pattern call) must be followed by a closing parenthesis\0"
"an argument is not allowed for (*ACCEPT), (*FAIL), or (*COMMIT)\0"
/* 60 */
"(*VERB) not recognized or malformed\0"
"number is too big\0"
"group number is too big\0"
"subpattern name expected\0"
"digit expected after (?+\0"
"internal error: parsed pattern overflow\0"
"non-octal character in \\o{} (closing brace missing?)\0"
/* 65 */
"different names for subpatterns of the same number are not allowed\0"
@ -151,9 +151,9 @@ static const unsigned char compile_error_texts[] =
#endif
"\\k is not followed by a braced, angle-bracketed, or quoted name\0"
/* 70 */
"internal error: unknown opcode in find_fixedlength()\0"
"internal error: unknown meta code in check_lookbehinds()\0"
"\\N is not supported in a class\0"
"SPARE ERROR\0"
"callout string is too long\0"
"disallowed Unicode code point (>= 0xd800 && <= 0xdfff)\0"
"using UTF is disabled by the application\0"
/* 75 */
@ -161,7 +161,7 @@ static const unsigned char compile_error_texts[] =
"name is too long in (*MARK), (*PRUNE), (*SKIP), or (*THEN)\0"
"character code point value in \\u.... sequence is too large\0"
"digits missing in \\x{} or \\o{}\0"
"syntax error in (?(VERSION condition\0"
"syntax error or number too big in (?(VERSION condition\0"
/* 80 */
"internal error: unknown opcode in auto_possessify()\0"
"missing terminating delimiter for callout with string argument\0"
@ -173,6 +173,11 @@ static const unsigned char compile_error_texts[] =
"regular expression is too complicated\0"
"lookbehind assertion is too long\0"
"pattern string is longer than the limit set by the application\0"
"internal error: unknown code in parsed pattern\0"
/* 90 */
"internal error: bad code value in parsed_skip()\0"
"PCRE2_EXTRA_ALLOW_SURROGATE_ESCAPES is not allowed in UTF-16 mode\0"
"invalid option bits with PCRE2_LITERAL\0"
;
/* Match-time and UTF error texts are in the same format. */
@ -241,7 +246,7 @@ static const unsigned char match_error_texts[] =
"non-unique substring name\0"
"NULL argument passed\0"
"nested recursion at the same subject position\0"
"recursion limit exceeded\0"
"matching depth limit exceeded\0"
"requested value is not available\0"
/* 55 */
"requested value is not set\0"
@ -253,6 +258,8 @@ static const unsigned char match_error_texts[] =
"match with end before start is not supported\0"
"too many replacements (more than INT_MAX)\0"
"bad serialized data\0"
"heap limit exceeded\0"
"invalid syntax\0"
;
@ -268,17 +275,17 @@ distinct.
Arguments:
enumber error number
buffer where to put the message (zero terminated)
size size of the buffer
size size of the buffer in code units
Returns: length of message if all is well
negative on error
*/
PCRE2_EXP_DEFN int PCRE2_CALL_CONVENTION
pcre2_get_error_message(int enumber, PCRE2_UCHAR *buffer, size_t size)
pcre2_get_error_message(int enumber, PCRE2_UCHAR *buffer, PCRE2_SIZE size)
{
const unsigned char *message;
size_t i;
PCRE2_SIZE i;
int n;
if (size == 0) return PCRE2_ERROR_NOMEMORY;
@ -301,8 +308,8 @@ else /* Invalid error number */
for (; n > 0; n--)
{
while (*message++ != CHAR_NULL) {};
if (*message == CHAR_NULL) return PCRE2_ERROR_BADDATA;
while (*message++ != CHAR_NUL) {};
if (*message == CHAR_NUL) return PCRE2_ERROR_BADDATA;
}
for (i = 0; *message != 0; i++)

View File

@ -71,7 +71,7 @@ PRIV(find_bracket)(PCRE2_SPTR code, BOOL utf, int number)
{
for (;;)
{
register PCRE2_UCHAR c = *code;
PCRE2_UCHAR c = *code;
if (c == OP_END) return NULL;

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -142,20 +142,6 @@ pcre2_match() because of the way it backtracks. */
#define PCRE2_SPTR CUSTOM_SUBJECT_PTR
#endif
/* When compiling with the MSVC compiler, it is sometimes necessary to include
a "calling convention" before exported function names. (This is secondhand
information; I know nothing about MSVC myself). For example, something like
void __cdecl function(....)
might be needed. In order so make this easy, all the exported functions have
PCRE2_CALL_CONVENTION just before their names. It is rarely needed; if not
set, we ensure here that it has no effect. */
#ifndef PCRE2_CALL_CONVENTION
#define PCRE2_CALL_CONVENTION
#endif
/* When checking for integer overflow in pcre2_compile(), we need to handle
large integers. If a 64-bit integer type is available, we can use that.
Otherwise we have to cast to double, which of course requires floating point
@ -254,6 +240,16 @@ not rely on this. */
#define COMPILE_ERROR_BASE 100
/* The initial frames vector for remembering backtracking points in
pcre2_match() is allocated on the system stack, of this size (bytes). The size
must be a multiple of sizeof(PCRE2_SPTR) in all environments, so making it a
multiple of 8 is best. Typical frame sizes are a few hundred bytes (it depends
on the number of capturing parentheses) so 20K handles quite a few frames. A
larger vector on the heap is obtained for patterns that need more frames. The
maximum size of this can be limited. */
#define START_FRAMES_SIZE 20480
/* Define the default BSR convention. */
#ifdef BSR_ANYCRLF
@ -561,9 +557,14 @@ enum { PCRE2_MATCHEDBY_INTERPRETER, /* pcre2_match() */
#define MAGIC_NUMBER 0x50435245UL /* 'PCRE' */
/* The maximum remaining length of subject we are prepared to search for a
req_unit match. */
req_unit match. In 8-bit mode, memchr() is used and is much faster than the
search loop that has to be used in 16-bit and 32-bit modes. */
#if PCRE2_CODE_UNIT_WIDTH == 8
#define REQ_CU_MAX 2000
#else
#define REQ_CU_MAX 1000
#endif
/* Offsets for the bitmap tables in the cbits set of tables. Each table
contains a set of bits for a class map. Some classes are built by combining
@ -682,7 +683,7 @@ a positive value. */
/* The remaining definitions work in both environments. */
#define CHAR_NULL '\0'
#define CHAR_NUL '\0'
#define CHAR_HT '\t'
#define CHAR_VT '\v'
#define CHAR_FF '\f'
@ -923,6 +924,7 @@ a positive value. */
#define STRING_CRLF_RIGHTPAR "CRLF)"
#define STRING_ANY_RIGHTPAR "ANY)"
#define STRING_ANYCRLF_RIGHTPAR "ANYCRLF)"
#define STRING_NUL_RIGHTPAR "NUL)"
#define STRING_BSR_ANYCRLF_RIGHTPAR "BSR_ANYCRLF)"
#define STRING_BSR_UNICODE_RIGHTPAR "BSR_UNICODE)"
#define STRING_UTF8_RIGHTPAR "UTF8)"
@ -936,7 +938,9 @@ a positive value. */
#define STRING_NO_START_OPT_RIGHTPAR "NO_START_OPT)"
#define STRING_NOTEMPTY_RIGHTPAR "NOTEMPTY)"
#define STRING_NOTEMPTY_ATSTART_RIGHTPAR "NOTEMPTY_ATSTART)"
#define STRING_LIMIT_HEAP_EQ "LIMIT_HEAP="
#define STRING_LIMIT_MATCH_EQ "LIMIT_MATCH="
#define STRING_LIMIT_DEPTH_EQ "LIMIT_DEPTH="
#define STRING_LIMIT_RECURSION_EQ "LIMIT_RECURSION="
#define STRING_MARK "MARK"
@ -958,7 +962,7 @@ only. */
#define CHAR_ESC '\033'
#define CHAR_DEL '\177'
#define CHAR_NULL '\0'
#define CHAR_NUL '\0'
#define CHAR_SPACE '\040'
#define CHAR_EXCLAMATION_MARK '\041'
#define CHAR_QUOTATION_MARK '\042'
@ -1196,6 +1200,7 @@ only. */
#define STRING_CRLF_RIGHTPAR STR_C STR_R STR_L STR_F STR_RIGHT_PARENTHESIS
#define STRING_ANY_RIGHTPAR STR_A STR_N STR_Y STR_RIGHT_PARENTHESIS
#define STRING_ANYCRLF_RIGHTPAR STR_A STR_N STR_Y STR_C STR_R STR_L STR_F STR_RIGHT_PARENTHESIS
#define STRING_NUL_RIGHTPAR STR_N STR_U STR_L STR_RIGHT_PARENTHESIS
#define STRING_BSR_ANYCRLF_RIGHTPAR STR_B STR_S STR_R STR_UNDERSCORE STR_A STR_N STR_Y STR_C STR_R STR_L STR_F STR_RIGHT_PARENTHESIS
#define STRING_BSR_UNICODE_RIGHTPAR STR_B STR_S STR_R STR_UNDERSCORE STR_U STR_N STR_I STR_C STR_O STR_D STR_E STR_RIGHT_PARENTHESIS
#define STRING_UTF8_RIGHTPAR STR_U STR_T STR_F STR_8 STR_RIGHT_PARENTHESIS
@ -1209,7 +1214,9 @@ only. */
#define STRING_NO_START_OPT_RIGHTPAR STR_N STR_O STR_UNDERSCORE STR_S STR_T STR_A STR_R STR_T STR_UNDERSCORE STR_O STR_P STR_T STR_RIGHT_PARENTHESIS
#define STRING_NOTEMPTY_RIGHTPAR STR_N STR_O STR_T STR_E STR_M STR_P STR_T STR_Y STR_RIGHT_PARENTHESIS
#define STRING_NOTEMPTY_ATSTART_RIGHTPAR STR_N STR_O STR_T STR_E STR_M STR_P STR_T STR_Y STR_UNDERSCORE STR_A STR_T STR_S STR_T STR_A STR_R STR_T STR_RIGHT_PARENTHESIS
#define STRING_LIMIT_HEAP_EQ STR_L STR_I STR_M STR_I STR_T STR_UNDERSCORE STR_H STR_E STR_A STR_P STR_EQUALS_SIGN
#define STRING_LIMIT_MATCH_EQ STR_L STR_I STR_M STR_I STR_T STR_UNDERSCORE STR_M STR_A STR_T STR_C STR_H STR_EQUALS_SIGN
#define STRING_LIMIT_DEPTH_EQ STR_L STR_I STR_M STR_I STR_T STR_UNDERSCORE STR_D STR_E STR_P STR_T STR_H STR_EQUALS_SIGN
#define STRING_LIMIT_RECURSION_EQ STR_L STR_I STR_M STR_I STR_T STR_UNDERSCORE STR_R STR_E STR_C STR_U STR_R STR_S STR_I STR_O STR_N STR_EQUALS_SIGN
#define STRING_MARK STR_M STR_A STR_R STR_K
@ -1298,23 +1305,16 @@ mode rather than an escape sequence. It is also used for [^] in JavaScript
compatibility mode, and for \C in non-utf mode. In non-DOTALL mode, "." behaves
like \N.
The special values ESC_DU, ESC_du, etc. are used instead of ESC_D, ESC_d, etc.
when PCRE2_UCP is set and replacement of \d etc by \p sequences is required.
They must be contiguous, and remain in order so that the replacements can be
looked up from a table.
Negative numbers are used to encode a backreference (\1, \2, \3, etc.) in
check_escape(). There are two tests in the code for an escape
greater than ESC_b and less than ESC_Z to detect the types that may be
repeated. These are the types that consume characters. If any new escapes are
put in between that don't consume a character, that code will have to change.
*/
check_escape(). There are tests in the code for an escape greater than ESC_b
and less than ESC_Z to detect the types that may be repeated. These are the
types that consume characters. If any new escapes are put in between that don't
consume a character, that code will have to change. */
enum { ESC_A = 1, ESC_G, ESC_K, ESC_B, ESC_b, ESC_D, ESC_d, ESC_S, ESC_s,
ESC_W, ESC_w, ESC_N, ESC_dum, ESC_C, ESC_P, ESC_p, ESC_R, ESC_H,
ESC_h, ESC_V, ESC_v, ESC_X, ESC_Z, ESC_z,
ESC_E, ESC_Q, ESC_g, ESC_k,
ESC_DU, ESC_du, ESC_SU, ESC_su, ESC_WU, ESC_wu };
ESC_E, ESC_Q, ESC_g, ESC_k };
/********************** Opcode definitions ******************/
@ -1380,7 +1380,8 @@ enum {
OP_CIRC, /* 27 Start of line - not multiline */
OP_CIRCM, /* 28 Start of line - multiline */
/* Single characters; caseful must precede the caseless ones */
/* Single characters; caseful must precede the caseless ones, and these
must remain in this order, and adjacent. */
OP_CHAR, /* 29 Match one character, casefully */
OP_CHARI, /* 30 Match one character, caselessly */
@ -1530,68 +1531,67 @@ enum {
OP_ASSERTBACK, /* 128 Positive lookbehind */
OP_ASSERTBACK_NOT, /* 129 Negative lookbehind */
/* ONCE, ONCE_NC, BRA, BRAPOS, CBRA, CBRAPOS, and COND must come immediately
after the assertions, with ONCE first, as there's a test for >= ONCE for a
subpattern that isn't an assertion. The POS versions must immediately follow
the non-POS versions in each case. */
/* ONCE, BRA, BRAPOS, CBRA, CBRAPOS, and COND must come immediately after the
assertions, with ONCE first, as there's a test for >= ONCE for a subpattern
that isn't an assertion. The POS versions must immediately follow the non-POS
versions in each case. */
OP_ONCE, /* 130 Atomic group, contains captures */
OP_ONCE_NC, /* 131 Atomic group containing no captures */
OP_BRA, /* 132 Start of non-capturing bracket */
OP_BRAPOS, /* 133 Ditto, with unlimited, possessive repeat */
OP_CBRA, /* 134 Start of capturing bracket */
OP_CBRAPOS, /* 135 Ditto, with unlimited, possessive repeat */
OP_COND, /* 136 Conditional group */
OP_BRA, /* 131 Start of non-capturing bracket */
OP_BRAPOS, /* 132 Ditto, with unlimited, possessive repeat */
OP_CBRA, /* 133 Start of capturing bracket */
OP_CBRAPOS, /* 134 Ditto, with unlimited, possessive repeat */
OP_COND, /* 135 Conditional group */
/* These five must follow the previous five, in the same order. There's a
check for >= SBRA to distinguish the two sets. */
OP_SBRA, /* 137 Start of non-capturing bracket, check empty */
OP_SBRAPOS, /* 138 Ditto, with unlimited, possessive repeat */
OP_SCBRA, /* 139 Start of capturing bracket, check empty */
OP_SCBRAPOS, /* 140 Ditto, with unlimited, possessive repeat */
OP_SCOND, /* 141 Conditional group, check empty */
OP_SBRA, /* 136 Start of non-capturing bracket, check empty */
OP_SBRAPOS, /* 137 Ditto, with unlimited, possessive repeat */
OP_SCBRA, /* 138 Start of capturing bracket, check empty */
OP_SCBRAPOS, /* 139 Ditto, with unlimited, possessive repeat */
OP_SCOND, /* 140 Conditional group, check empty */
/* The next two pairs must (respectively) be kept together. */
OP_CREF, /* 142 Used to hold a capture number as condition */
OP_DNCREF, /* 143 Used to point to duplicate names as a condition */
OP_RREF, /* 144 Used to hold a recursion number as condition */
OP_DNRREF, /* 145 Used to point to duplicate names as a condition */
OP_FALSE, /* 146 Always false (used by DEFINE and VERSION) */
OP_TRUE, /* 147 Always true (used by VERSION) */
OP_CREF, /* 141 Used to hold a capture number as condition */
OP_DNCREF, /* 142 Used to point to duplicate names as a condition */
OP_RREF, /* 143 Used to hold a recursion number as condition */
OP_DNRREF, /* 144 Used to point to duplicate names as a condition */
OP_FALSE, /* 145 Always false (used by DEFINE and VERSION) */
OP_TRUE, /* 146 Always true (used by VERSION) */
OP_BRAZERO, /* 148 These two must remain together and in this */
OP_BRAMINZERO, /* 149 order. */
OP_BRAPOSZERO, /* 150 */
OP_BRAZERO, /* 147 These two must remain together and in this */
OP_BRAMINZERO, /* 148 order. */
OP_BRAPOSZERO, /* 149 */
/* These are backtracking control verbs */
OP_MARK, /* 151 always has an argument */
OP_PRUNE, /* 152 */
OP_PRUNE_ARG, /* 153 same, but with argument */
OP_SKIP, /* 154 */
OP_SKIP_ARG, /* 155 same, but with argument */
OP_THEN, /* 156 */
OP_THEN_ARG, /* 157 same, but with argument */
OP_COMMIT, /* 158 */
OP_MARK, /* 150 always has an argument */
OP_PRUNE, /* 151 */
OP_PRUNE_ARG, /* 152 same, but with argument */
OP_SKIP, /* 153 */
OP_SKIP_ARG, /* 154 same, but with argument */
OP_THEN, /* 155 */
OP_THEN_ARG, /* 156 same, but with argument */
OP_COMMIT, /* 157 */
/* These are forced failure and success verbs */
OP_FAIL, /* 159 */
OP_ACCEPT, /* 160 */
OP_ASSERT_ACCEPT, /* 161 Used inside assertions */
OP_CLOSE, /* 162 Used before OP_ACCEPT to close open captures */
OP_FAIL, /* 158 */
OP_ACCEPT, /* 159 */
OP_ASSERT_ACCEPT, /* 160 Used inside assertions */
OP_CLOSE, /* 161 Used before OP_ACCEPT to close open captures */
/* This is used to skip a subpattern with a {0} quantifier */
OP_SKIPZERO, /* 163 */
OP_SKIPZERO, /* 162 */
/* This is used to identify a DEFINE group during compilation so that it can
be checked for having only one branch. It is changed to OP_FALSE before
compilation finishes. */
OP_DEFINE, /* 164 */
OP_DEFINE, /* 163 */
/* This is not an opcode, but is used to check that tables indexed by opcode
are the correct length, in order to catch updating errors - there have been
@ -1638,7 +1638,7 @@ some cases doesn't actually use these names at all). */
"Recurse", "Callout", "CalloutStr", \
"Alt", "Ket", "KetRmax", "KetRmin", "KetRpos", \
"Reverse", "Assert", "Assert not", "AssertB", "AssertB not", \
"Once", "Once_NC", \
"Once", \
"Bra", "BraPos", "CBra", "CBraPos", \
"Cond", \
"SBra", "SBraPos", "SCBra", "SCBraPos", \
@ -1722,7 +1722,6 @@ in UTF-8 mode. The code that uses this table must know about such things. */
1+LINK_SIZE, /* Assert behind */ \
1+LINK_SIZE, /* Assert behind not */ \
1+LINK_SIZE, /* ONCE */ \
1+LINK_SIZE, /* ONCE_NC */ \
1+LINK_SIZE, /* BRA */ \
1+LINK_SIZE, /* BRAPOS */ \
1+LINK_SIZE+IMM2_SIZE, /* CBRA */ \
@ -1794,10 +1793,17 @@ typedef struct {
/* UCD access macros */
#define UCD_BLOCK_SIZE 128
#define GET_UCD(ch) (PRIV(ucd_records) + \
#define REAL_GET_UCD(ch) (PRIV(ucd_records) + \
PRIV(ucd_stage2)[PRIV(ucd_stage1)[(int)(ch) / UCD_BLOCK_SIZE] * \
UCD_BLOCK_SIZE + (int)(ch) % UCD_BLOCK_SIZE])
#if PCRE2_CODE_UNIT_WIDTH == 32
#define GET_UCD(ch) ((ch > MAX_UTF_CODE_POINT)? \
PRIV(dummy_ucd_record) : REAL_GET_UCD(ch))
#else
#define GET_UCD(ch) REAL_GET_UCD(ch)
#endif
#define UCD_CHARTYPE(ch) GET_UCD(ch)->chartype
#define UCD_SCRIPT(ch) GET_UCD(ch)->script
#define UCD_CATEGORY(ch) PRIV(ucp_gentype)[UCD_CHARTYPE(ch)]
@ -1852,8 +1858,12 @@ extern const uint8_t PRIV(utf8_table4)[];
#define _pcre2_callout_end_delims PCRE2_SUFFIX(_pcre2_callout_end_delims_)
#define _pcre2_callout_start_delims PCRE2_SUFFIX(_pcre2_callout_start_delims_)
#define _pcre2_default_compile_context PCRE2_SUFFIX(_pcre2_default_compile_context_)
#define _pcre2_default_convert_context PCRE2_SUFFIX(_pcre2_default_convert_context_)
#define _pcre2_default_match_context PCRE2_SUFFIX(_pcre2_default_match_context_)
#define _pcre2_default_tables PCRE2_SUFFIX(_pcre2_default_tables_)
#if PCRE2_CODE_UNIT_WIDTH == 32
#define _pcre2_dummy_ucd_record PCRE2_SUFFIX(_pcre2_dummy_ucd_record_)
#endif
#define _pcre2_hspace_list PCRE2_SUFFIX(_pcre2_hspace_list_)
#define _pcre2_vspace_list PCRE2_SUFFIX(_pcre2_vspace_list_)
#define _pcre2_ucd_caseless_sets PCRE2_SUFFIX(_pcre2_ucd_caseless_sets_)
@ -1872,12 +1882,16 @@ extern const uint8_t PRIV(OP_lengths)[];
extern const uint32_t PRIV(callout_end_delims)[];
extern const uint32_t PRIV(callout_start_delims)[];
extern const pcre2_compile_context PRIV(default_compile_context);
extern const pcre2_convert_context PRIV(default_convert_context);
extern const pcre2_match_context PRIV(default_match_context);
extern const uint8_t PRIV(default_tables)[];
extern const uint32_t PRIV(hspace_list)[];
extern const uint32_t PRIV(vspace_list)[];
extern const uint32_t PRIV(ucd_caseless_sets)[];
extern const ucd_record PRIV(ucd_records)[];
#if PCRE2_CODE_UNIT_WIDTH == 32
extern const ucd_record PRIV(dummy_ucd_record)[];
#endif
extern const uint8_t PRIV(ucd_stage1)[];
extern const uint16_t PRIV(ucd_stage2)[];
extern const uint32_t PRIV(ucp_gbtable)[];

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -54,6 +54,7 @@ just to undefine them all. */
#undef ACROSSCHAR
#undef BACKCHAR
#undef BYTES2CU
#undef CHMAX_255
#undef CU2BYTES
#undef FORWARDCHAR
#undef FORWARDCHARTEST
@ -140,7 +141,7 @@ values of 3 or 4 are also supported. */
#undef LINK_SIZE
#define LINK_SIZE 1
#define PUT(a,n,d) \
(a[n] = (d))
(a[n] = (PCRE2_UCHAR)(d))
#define GET(a,n) \
(a[n])
#define MAX_PATTERN_SIZE (1 << 16)
@ -200,21 +201,26 @@ arithmetic results in a signed value. Hence the cast. */
#endif
/* Other macros that are different for 8-bit mode. The MAX_255 macro checks
whether its argument is less than 256. The maximum length of a MARK name must
fit in one code unit; currently it is set to 255 or 65535. The TABLE_GET macro
is used to access elements of tables containing exactly 256 items. When code
points can be greater than 255, a check is needed before accessing these
tables. */
whether its argument, which is assumed to be one code unit, is less than 256.
The CHMAX_255 macro does not assume one code unit. The maximum length of a MARK
name must fit in one code unit; currently it is set to 255 or 65535. The
TABLE_GET macro is used to access elements of tables containing exactly 256
items. When code points can be greater than 255, a check is needed before
accessing these tables. */
#if PCRE2_CODE_UNIT_WIDTH == 8
#define MAX_255(c) TRUE
#define MAX_MARK ((1u << 8) - 1)
#ifdef SUPPORT_UNICODE
#define SUPPORT_WIDE_CHARS
#define CHMAX_255(c) ((c) <= 255u)
#else
#define CHMAX_255(c) TRUE
#endif /* SUPPORT_UNICODE */
#define TABLE_GET(c, table, default) ((table)[c])
#else /* Code units are 16 or 32 bits */
#define CHMAX_255(c) ((c) <= 255u)
#define MAX_255(c) ((c) <= 255u)
#define MAX_MARK ((1u << 16) - 1)
#define SUPPORT_WIDE_CHARS
@ -566,15 +572,13 @@ typedef struct pcre2_real_compile_context {
uint16_t bsr_convention;
uint16_t newline_convention;
uint32_t parens_nest_limit;
uint32_t extra_options;
} pcre2_real_compile_context;
/* The real match context structure. */
typedef struct pcre2_real_match_context {
pcre2_memctl memctl;
#ifdef HEAP_MATCH_RECURSE
pcre2_memctl stack_memctl;
#endif
#ifdef SUPPORT_JIT
pcre2_jit_callback jit_callback;
void *jit_callback_data;
@ -582,10 +586,19 @@ typedef struct pcre2_real_match_context {
int (*callout)(pcre2_callout_block *, void *);
void *callout_data;
PCRE2_SIZE offset_limit;
uint32_t heap_limit;
uint32_t match_limit;
uint32_t recursion_limit;
uint32_t depth_limit;
} pcre2_real_match_context;
/* The real convert context structure. */
typedef struct pcre2_real_convert_context {
pcre2_memctl memctl;
uint32_t glob_separator;
uint32_t glob_escape;
} pcre2_real_convert_context;
/* The real compiled code structure. The type for the blocksize field is
defined specially because it is required in pcre2_serialize_decode() when
copying the size from possibly unaligned memory into a variable of the same
@ -611,8 +624,9 @@ typedef struct pcre2_real_code {
uint32_t compile_options; /* Options passed to pcre2_compile() */
uint32_t overall_options; /* Options after processing the pattern */
uint32_t flags; /* Various state flags */
uint32_t limit_heap; /* Limit set in the pattern */
uint32_t limit_match; /* Limit set in the pattern */
uint32_t limit_recursion; /* Limit set in the pattern */
uint32_t limit_depth; /* Limit set in the pattern */
uint32_t first_codeunit; /* Starting code unit */
uint32_t last_codeunit; /* This codeunit must be seen */
uint16_t bsr_convention; /* What \R matches */
@ -625,7 +639,11 @@ typedef struct pcre2_real_code {
uint16_t name_count; /* Number of name entries in the table */
} pcre2_real_code;
/* The real match data structure. */
/* The real match data structure. Define ovector large so that array bound
checkers don't grumble. Memory for this structure is obtained by calling
pcre2_match_data_create(), which sets the size as the offset of ovector plus
pairs of elements for each capturing group. (See also the heapframe structure
below.) */
typedef struct pcre2_real_match_data {
pcre2_memctl memctl;
@ -638,7 +656,7 @@ typedef struct pcre2_real_match_data {
uint16_t matchedby; /* Type of match (normal, JIT, DFA) */
uint16_t oveccount; /* Number of pairs */
int rc; /* The return code from the match */
PCRE2_SIZE ovector[1]; /* The first field */
PCRE2_SIZE ovector[10000];/* The first field */
} pcre2_real_match_data;
@ -648,18 +666,24 @@ typedef struct pcre2_real_match_data {
#ifndef PCRE2_PCRE2TEST
/* Structure for checking for mutual recursion when scanning compiled code. */
/* Structures for checking for mutual recursion when scanning compiled or
parsed code. */
typedef struct recurse_check {
struct recurse_check *prev;
PCRE2_SPTR group;
} recurse_check;
typedef struct parsed_recurse_check {
struct parsed_recurse_check *prev;
uint32_t *groupptr;
} parsed_recurse_check;
/* Structure for building a cache when filling in recursion offsets. */
typedef struct recurse_cache {
PCRE2_SPTR group;
int recno;
int groupnumber;
} recurse_cache;
/* Structure for maintaining a chain of pointers to the currently incomplete
@ -693,9 +717,10 @@ typedef struct compile_block {
PCRE2_SPTR start_code; /* The start of the compiled code */
PCRE2_SPTR start_pattern; /* The start of the pattern */
PCRE2_SPTR end_pattern; /* The end of the pattern */
PCRE2_SPTR nestptr[2]; /* Pointer(s) saved for string substitution */
PCRE2_UCHAR *name_table; /* The name/number table */
size_t workspace_size; /* Size of workspace */
PCRE2_SIZE workspace_size; /* Size of workspace */
PCRE2_SIZE small_ref_offset[10]; /* Offsets for \1 to \9 */
PCRE2_SIZE erroroffset; /* Offset of error in pattern */
uint16_t names_found; /* Number of entries so far */
uint16_t name_entry_size; /* Size of each entry */
open_capitem *open_caps; /* Chain of open capture items */
@ -703,13 +728,17 @@ typedef struct compile_block {
uint32_t named_group_list_size; /* Number of entries in the list */
uint32_t external_options; /* External (initial) options */
uint32_t external_flags; /* External flag bits to be set */
uint32_t bracount; /* Count of capturing parens as we compile */
uint32_t final_bracount; /* Saved value after first pass */
uint32_t bracount; /* Count of capturing parentheses */
uint32_t lastcapture; /* Last capture encountered */
uint32_t *parsed_pattern; /* Parsed pattern buffer */
uint32_t *parsed_pattern_end; /* Parsed pattern should not get here */
uint32_t *groupinfo; /* Group info vector */
uint32_t top_backref; /* Maximum back reference */
uint32_t backref_map; /* Bitmap of low back refs */
uint32_t nltype; /* Newline type */
uint32_t nllen; /* Newline string length */
uint32_t class_range_start; /* Overall class range start */
uint32_t class_range_end; /* Overall class range end */
PCRE2_UCHAR nl[4]; /* Newline string when fixed length */
int max_lookbehind; /* Maximum lookbehind (characters) */
int parens_depth; /* Depth of nested parentheses */
@ -718,9 +747,7 @@ typedef struct compile_block {
BOOL had_accept; /* (*ACCEPT) encountered */
BOOL had_pruneorskip; /* (*PRUNE) or (*SKIP) encountered */
BOOL had_recurse; /* Had a recursion or subroutine call */
BOOL check_lookbehind; /* Lookbehinds need later checking */
BOOL dupnames; /* Duplicate names exist */
BOOL iscondassert; /* Next assert is a condition */
} compile_block;
/* Structure for keeping the properties of the in-memory stack used
@ -731,27 +758,8 @@ typedef struct pcre2_real_jit_stack {
void* stack;
} pcre2_real_jit_stack;
/* Structure for keeping a chain of heap blocks used for saving ovectors
during pattern recursion when the ovector is larger than can be saved on
the system stack. */
typedef struct ovecsave_frame {
struct ovecsave_frame *next; /* Next frame on free chain */
PCRE2_SIZE saved_ovec[1]; /* First vector element */
} ovecsave_frame;
/* Structure for items in a linked list that represents an explicit recursive
call within the pattern; used by pcre_match(). */
typedef struct recursion_info {
struct recursion_info *prevrec; /* Previous recursion record (or NULL) */
unsigned int group_num; /* Number of group that was called */
PCRE2_SIZE *ovec_save; /* Pointer to saved ovector frame */
uint32_t saved_capture_last; /* Last capture number */
PCRE2_SPTR subject_position; /* Position at start of recursion */
} recursion_info;
/* A similar structure for pcre_dfa_match(). */
call within the pattern when running pcre_dfa_match(). */
typedef struct dfa_recursion_info {
struct dfa_recursion_info *prevrec;
@ -759,35 +767,75 @@ typedef struct dfa_recursion_info {
uint32_t group_num;
} dfa_recursion_info;
/* Structure for building a chain of data for holding the values of the subject
pointer at the start of each subpattern, so as to detect when an empty string
has been matched by a subpattern - to break infinite loops; used by
pcre2_match(). */
/* Structure for "stack" frames that are used for remembering backtracking
positions during matching. As these are used in a vector, with the ovector item
being extended, the size of the structure must be a multiple of PCRE2_SIZE. The
only way to check this at compile time is to force an error by generating an
array with a negative size. By putting this in a typedef (which is never used),
we don't generate any code when all is well. */
typedef struct eptrblock {
struct eptrblock *epb_prev;
PCRE2_SPTR epb_saved_eptr;
} eptrblock;
typedef struct heapframe {
/* The first set of fields are variables that have to be preserved over calls
to RRMATCH(), but which do not need to be copied to new frames. */
PCRE2_SPTR ecode; /* The current position in the pattern */
PCRE2_SPTR temp_sptr[2]; /* Used for short-term PCRE_SPTR values */
PCRE2_SIZE length; /* Used for character, string, or code lengths */
PCRE2_SIZE back_frame; /* Amount to subtract on RRETURN */
PCRE2_SIZE temp_size; /* Used for short-term PCRE2_SIZE values */
uint32_t rdepth; /* "Recursion" depth */
uint32_t group_frame_type; /* Type information for group frames */
uint32_t temp_32[4]; /* Used for short-term 32-bit or BOOL values */
uint8_t return_id; /* Where to go on in internal "return" */
uint8_t op; /* Processing opcode */
#if PCRE2_CODE_UNIT_WIDTH == 8
PCRE2_UCHAR occu[6]; /* Used for other case code units */
#elif PCRE2_CODE_UNIT_WIDTH == 16
PCRE2_UCHAR occu[2]; /* Used for other case code units */
#else
PCRE2_UCHAR occu[1]; /* Used for other case code units */
#endif
/* The rest have to be copied from the previous frame whenever a new frame
becomes current. The final field is specified as a large vector so that
runtime array bound checks don't catch references to it. However, for any
specific call to pcre2_match() the memory allocated for each frame structure
allows for exactly the right size ovector for the number of capturing
parentheses. */
PCRE2_SPTR eptr; /* MUST BE FIRST */
PCRE2_SPTR start_match; /* Can be adjusted by \K */
PCRE2_SPTR mark; /* Most recent mark on the success path */
uint32_t current_recurse; /* Current (deepest) recursion number */
uint32_t capture_last; /* Most recent capture */
PCRE2_SIZE last_group_offset; /* Saved offset to most recent group frame */
PCRE2_SIZE offset_top; /* Offset after highest capture */
PCRE2_SIZE ovector[10000]; /* Must be last in the structure */
} heapframe;
typedef char check_heapframe_size[
((sizeof(heapframe) % sizeof(PCRE2_SIZE)) == 0)? (+1):(-1)];
/* Structure for passing "static" information around between the functions
doing traditional NFA matching (pcre2_match() and friends). */
typedef struct match_block {
pcre2_memctl memctl; /* For general use */
#ifdef HEAP_MATCH_RECURSE
pcre2_memctl stack_memctl; /* For "stack" frames */
#endif
uint32_t match_call_count; /* As it says */
PCRE2_SIZE frame_vector_size; /* Size of a backtracking frame */
heapframe *match_frames; /* Points to vector of frames */
heapframe *match_frames_top; /* Points after the end of the vector */
heapframe *stack_frames; /* The original vector on the stack */
PCRE2_SIZE heap_limit; /* As it says */
uint32_t match_limit; /* As it says */
uint32_t match_limit_recursion; /* As it says */
uint32_t match_limit_depth; /* As it says */
uint32_t match_call_count; /* Number of times a new frame is created */
BOOL hitend; /* Hit the end of the subject at some point */
BOOL hasthen; /* Pattern contains (*THEN) */
const uint8_t *lcc; /* Points to lower casing table */
const uint8_t *fcc; /* Points to case-flipping table */
const uint8_t *ctypes; /* Points to table of type maps */
PCRE2_SIZE *ovector; /* Pointer to the offset vector */
PCRE2_SIZE offset_end; /* One past the end */
PCRE2_SIZE offset_max; /* The maximum usable for return data */
PCRE2_SIZE start_offset; /* The start offset value */
PCRE2_SIZE end_offset_top; /* Highwater mark at end of match */
uint16_t partial; /* PARTIAL options */
@ -798,30 +846,23 @@ typedef struct match_block {
PCRE2_SPTR start_code; /* For use when recursing */
PCRE2_SPTR start_subject; /* Start of the subject string */
PCRE2_SPTR end_subject; /* End of the subject string */
PCRE2_SPTR start_match_ptr; /* Start of matched string */
PCRE2_SPTR end_match_ptr; /* Subject position at end match */
PCRE2_SPTR start_used_ptr; /* Earliest consulted character */
PCRE2_SPTR last_used_ptr; /* Latest consulted character */
PCRE2_SPTR mark; /* Mark pointer to pass back on success */
PCRE2_SPTR nomatch_mark; /* Mark pointer to pass back on failure */
PCRE2_SPTR once_target; /* Where to back up to for atomic groups */
PCRE2_SPTR verb_ecode_ptr; /* For passing back info */
PCRE2_SPTR verb_skip_ptr; /* For passing back a (*SKIP) name */
uint32_t verb_current_recurse; /* Current recurse when (*VERB) happens */
uint32_t moptions; /* Match options */
uint32_t poptions; /* Pattern options */
uint32_t capture_last; /* Most recent capture number + overflow flag */
uint32_t skip_arg_count; /* For counting SKIP_ARGs */
uint32_t ignore_skip_arg; /* For re-run when SKIP arg name not found */
uint32_t match_function_type; /* Set for certain special calls of match() */
uint32_t nltype; /* Newline type */
uint32_t nllen; /* Newline string length */
PCRE2_UCHAR nl[4]; /* Newline string when fixed */
eptrblock *eptrchain; /* Chain of eptrblocks for tail recursions */
recursion_info *recursive; /* Linked list of recursion data */
ovecsave_frame *ovecsave_chain; /* Linked list of free ovecsave blocks */
void *callout_data; /* To pass back to callouts */
int (*callout)(pcre2_callout_block *,void *); /* Callout function or NULL */
#ifdef HEAP_MATCH_RECURSE
void *match_frames_base; /* For remembering malloc'd frames */
#endif
} match_block;
/* A similar structure is used for the same purpose by the DFA matching
@ -836,6 +877,9 @@ typedef struct dfa_match_block {
PCRE2_SPTR last_used_ptr; /* Latest consulted character */
const uint8_t *tables; /* Character tables */
PCRE2_SIZE start_offset; /* The start offset value */
uint32_t match_limit; /* As it says */
uint32_t match_limit_depth; /* As it says */
uint32_t match_call_count; /* Number of calls of internal function */
uint32_t moptions; /* Match options */
uint32_t poptions; /* Pattern options */
uint32_t nltype; /* Newline type */

File diff suppressed because it is too large Load Diff

View File

@ -49,10 +49,10 @@ static SLJIT_NOINLINE int jit_machine_stack_exec(jit_arguments *arguments, jit_f
sljit_u8 local_space[MACHINE_STACK_SIZE];
struct sljit_stack local_stack;
local_stack.top = (sljit_sw)&local_space;
local_stack.base = local_stack.top;
local_stack.limit = local_stack.base + MACHINE_STACK_SIZE;
local_stack.max_limit = local_stack.limit;
local_stack.max_limit = local_space;
local_stack.limit = local_space;
local_stack.base = local_space + MACHINE_STACK_SIZE;
local_stack.top = local_space + MACHINE_STACK_SIZE;
arguments->stack = &local_stack;
return executable_func(arguments);
}

File diff suppressed because it is too large Load Diff

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -51,7 +51,7 @@ POSSIBILITY OF SUCH DAMAGE.
* Create a match data block given ovector size *
*************************************************/
/* A minimum of 1 is imposed on the number of ovector triplets. */
/* A minimum of 1 is imposed on the number of ovector pairs. */
PCRE2_EXP_DEFN pcre2_match_data * PCRE2_CALL_CONVENTION
pcre2_match_data_create(uint32_t oveccount, pcre2_general_context *gcontext)
@ -59,7 +59,7 @@ pcre2_match_data_create(uint32_t oveccount, pcre2_general_context *gcontext)
pcre2_match_data *yield;
if (oveccount < 1) oveccount = 1;
yield = PRIV(memctl_malloc)(
sizeof(pcre2_match_data) + 3*oveccount*sizeof(PCRE2_SIZE),
offsetof(pcre2_match_data, ovector) + 2*oveccount*sizeof(PCRE2_SIZE),
(pcre2_memctl *)gcontext);
if (yield == NULL) return NULL;
yield->oveccount = oveccount;

View File

@ -83,7 +83,7 @@ PRIV(ord2utf)(uint32_t cvalue, PCRE2_UCHAR *buffer)
/* Convert to UTF-8 */
#if PCRE2_CODE_UNIT_WIDTH == 8
register int i, j;
int i, j;
for (i = 0; i < PRIV(utf8_table1_size); i++)
if ((int)cvalue <= PRIV(utf8_table1)[i]) break;
buffer += i;

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -75,10 +75,12 @@ if (where == NULL) /* Requests field length */
case PCRE2_INFO_BACKREFMAX:
case PCRE2_INFO_BSR:
case PCRE2_INFO_CAPTURECOUNT:
case PCRE2_INFO_DEPTHLIMIT:
case PCRE2_INFO_FIRSTCODETYPE:
case PCRE2_INFO_FIRSTCODEUNIT:
case PCRE2_INFO_HASBACKSLASHC:
case PCRE2_INFO_HASCRORLF:
case PCRE2_INFO_HEAPLIMIT:
case PCRE2_INFO_JCHANGED:
case PCRE2_INFO_LASTCODETYPE:
case PCRE2_INFO_LASTCODEUNIT:
@ -89,7 +91,6 @@ if (where == NULL) /* Requests field length */
case PCRE2_INFO_NAMEENTRYSIZE:
case PCRE2_INFO_NAMECOUNT:
case PCRE2_INFO_NEWLINE:
case PCRE2_INFO_RECURSIONLIMIT:
return sizeof(uint32_t);
case PCRE2_INFO_FIRSTBITMAP:
@ -97,6 +98,7 @@ if (where == NULL) /* Requests field length */
case PCRE2_INFO_JITSIZE:
case PCRE2_INFO_SIZE:
case PCRE2_INFO_FRAMESIZE:
return sizeof(size_t);
case PCRE2_INFO_NAMETABLE:
@ -137,6 +139,11 @@ switch(what)
*((uint32_t *)where) = re->top_bracket;
break;
case PCRE2_INFO_DEPTHLIMIT:
*((uint32_t *)where) = re->limit_depth;
if (re->limit_depth == UINT32_MAX) return PCRE2_ERROR_UNSET;
break;
case PCRE2_INFO_FIRSTCODETYPE:
*((uint32_t *)where) = ((re->flags & PCRE2_FIRSTSET) != 0)? 1 :
((re->flags & PCRE2_STARTLINE) != 0)? 2 : 0;
@ -152,6 +159,11 @@ switch(what)
&(re->start_bitmap[0]) : NULL;
break;
case PCRE2_INFO_FRAMESIZE:
*((size_t *)where) = offsetof(heapframe, ovector) +
re->top_bracket * 2 * sizeof(PCRE2_SIZE);
break;
case PCRE2_INFO_HASBACKSLASHC:
*((uint32_t *)where) = (re->flags & PCRE2_HASBKC) != 0;
break;
@ -160,6 +172,11 @@ switch(what)
*((uint32_t *)where) = (re->flags & PCRE2_HASCRORLF) != 0;
break;
case PCRE2_INFO_HEAPLIMIT:
*((uint32_t *)where) = re->limit_heap;
if (re->limit_heap == UINT32_MAX) return PCRE2_ERROR_UNSET;
break;
case PCRE2_INFO_JCHANGED:
*((uint32_t *)where) = (re->flags & PCRE2_JCHANGED) != 0;
break;
@ -215,11 +232,6 @@ switch(what)
*((uint32_t *)where) = re->newline_convention;
break;
case PCRE2_INFO_RECURSIONLIMIT:
*((uint32_t *)where) = re->limit_recursion;
if (re->limit_recursion == UINT32_MAX) return PCRE2_ERROR_UNSET;
break;
case PCRE2_INFO_SIZE:
*((size_t *)where) = re->blocksize;
break;
@ -255,11 +267,15 @@ pcre2_real_code *re = (pcre2_real_code *)code;
pcre2_callout_enumerate_block cb;
PCRE2_SPTR cc;
#ifdef SUPPORT_UNICODE
BOOL utf = (re->overall_options & PCRE2_UTF) != 0;
BOOL utf;
#endif
if (re == NULL) return PCRE2_ERROR_NULL;
#ifdef SUPPORT_UNICODE
utf = (re->overall_options & PCRE2_UTF) != 0;
#endif
/* Check that the first field in the block is the magic number. If it is not,
return with PCRE2_ERROR_BADMAGIC. */

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -206,7 +206,7 @@ print_custring(FILE *f, PCRE2_SPTR ptr)
{
while (*ptr != '\0')
{
register uint32_t c = *ptr++;
uint32_t c = *ptr++;
if (PRINTABLE(c)) fprintf(f, "%c", c); else fprintf(f, "\\x{%x}", c);
}
}
@ -216,7 +216,7 @@ print_custring_bylen(FILE *f, PCRE2_SPTR ptr, PCRE2_UCHAR len)
{
for (; len > 0; len--)
{
register uint32_t c = *ptr++;
uint32_t c = *ptr++;
if (PRINTABLE(c)) fprintf(f, "%c", c); else fprintf(f, "\\x{%x}", c);
}
}
@ -340,7 +340,7 @@ for(;;)
case OP_TABLE_LENGTH +
((sizeof(OP_names)/sizeof(const char *) == OP_TABLE_LENGTH) &&
(sizeof(OP_lengths) == OP_TABLE_LENGTH)):
break;
return;
/* ========================================================================== */
case OP_END:
@ -393,7 +393,6 @@ for(;;)
case OP_ASSERTBACK:
case OP_ASSERTBACK_NOT:
case OP_ONCE:
case OP_ONCE_NC:
case OP_COND:
case OP_SCOND:
case OP_REVERSE:

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -214,7 +214,10 @@ for (i = 0; i < number_of_codes; i++)
if (dst_re->magic_number != MAGIC_NUMBER ||
dst_re->name_entry_size > MAX_NAME_SIZE + IMM2_SIZE + 1 ||
dst_re->name_count > MAX_NAME_COUNT)
{
memctl->free(dst_re, memctl->memory_data);
return PCRE2_ERROR_BADSERIALIZEDDATA;
}
/* At the moment only one table is supported. */

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -46,9 +46,11 @@ collecting data (e.g. minimum matching length). */
#include "config.h"
#endif
#include "pcre2_internal.h"
/* The maximum remembered capturing brackets minimum. */
#define MAX_CACHE_BACKREF 128
/* Set a bit in the starting code unit bit map. */
@ -71,6 +73,12 @@ length is 16-bits long (on the grounds that anything longer than that is
pathological), so we give up when we reach that amount. This also means that
integer overflow for really crazy patterns cannot happen.
Backreference minimum lengths are cached to speed up multiple references. This
function is called only when the highest back reference in the pattern is less
than or equal to MAX_CACHE_BACKREF, which is one less than the size of the
caching vector. The zeroth element contains the number of the highest set
value.
Arguments:
re compiled pattern block
code pointer to start of group (the bracket)
@ -78,6 +86,7 @@ Arguments:
utf UTF flag
recurses chain of recurse_check to catch mutual recursion
countptr pointer to call count (to catch over complexity)
backref_cache vector for caching back references.
Returns: the minimum length
-1 \C in UTF-8 mode
@ -90,7 +99,8 @@ Returns: the minimum length
static int
find_minlength(const pcre2_real_code *re, PCRE2_SPTR code,
PCRE2_SPTR startcode, BOOL utf, recurse_check *recurses, int *countptr)
PCRE2_SPTR startcode, BOOL utf, recurse_check *recurses, int *countptr,
int *backref_cache)
{
int length = -1;
int prev_cap_recno = -1;
@ -101,8 +111,8 @@ uint32_t once_fudge = 0;
BOOL had_recurse = FALSE;
BOOL dupcapused = (re->flags & PCRE2_DUPCAPUSED) != 0;
recurse_check this_recurse;
register int branchlength = 0;
register PCRE2_UCHAR *cc = (PCRE2_UCHAR *)code + 1 + LINK_SIZE;
int branchlength = 0;
PCRE2_UCHAR *cc = (PCRE2_UCHAR *)code + 1 + LINK_SIZE;
/* If this is a "could be empty" group, its minimum length is 0. */
@ -124,7 +134,7 @@ for (;;)
{
int d, min, recno;
PCRE2_UCHAR *cs, *ce;
register PCRE2_UCHAR op = *cc;
PCRE2_UCHAR op = *cc;
if (branchlength >= UINT16_MAX) return UINT16_MAX;
@ -146,12 +156,12 @@ for (;;)
}
goto PROCESS_NON_CAPTURE;
/* There's a special case of OP_ONCE, when it is wrapped round an
case OP_BRA:
/* There's a special case of OP_BRA, when it is wrapped round a repeated
OP_RECURSE. We'd like to process the latter at this level so that
remembering the value works for repeated cases. So we do nothing, but
set a fudge value to skip over the OP_KET after the recurse. */
case OP_ONCE:
if (cc[1+LINK_SIZE] == OP_RECURSE && cc[2*(1+LINK_SIZE)] == OP_KET)
{
once_fudge = 1 + LINK_SIZE;
@ -160,13 +170,13 @@ for (;;)
}
/* Fall through */
case OP_ONCE_NC:
case OP_BRA:
case OP_ONCE:
case OP_SBRA:
case OP_BRAPOS:
case OP_SBRAPOS:
PROCESS_NON_CAPTURE:
d = find_minlength(re, cc, startcode, utf, recurses, countptr);
d = find_minlength(re, cc, startcode, utf, recurses, countptr,
backref_cache);
if (d < 0) return d;
branchlength += d;
do cc += GET(cc, 1); while (*cc == OP_ALT);
@ -182,11 +192,12 @@ for (;;)
case OP_SCBRA:
case OP_CBRAPOS:
case OP_SCBRAPOS:
recno = dupcapused? prev_cap_recno - 1 : (int)GET2(cc, 1+LINK_SIZE);
if (recno != prev_cap_recno)
recno = (int)GET2(cc, 1+LINK_SIZE);
if (dupcapused || recno != prev_cap_recno)
{
prev_cap_recno = recno;
prev_cap_d = find_minlength(re, cc, startcode, utf, recurses, countptr);
prev_cap_d = find_minlength(re, cc, startcode, utf, recurses, countptr,
backref_cache);
if (prev_cap_d < 0) return prev_cap_d;
}
branchlength += prev_cap_d;
@ -456,38 +467,52 @@ for (;;)
d = INT_MAX;
/* Scan all groups with the same name */
/* Scan all groups with the same name; find the shortest. */
while (count-- > 0)
{
ce = cs = (PCRE2_UCHAR *)PRIV(find_bracket)(startcode, utf, GET2(slot, 0));
int dd, i;
recno = GET2(slot, 0);
if (recno <= backref_cache[0] && backref_cache[recno] >= 0)
dd = backref_cache[recno];
else
{
ce = cs = (PCRE2_UCHAR *)PRIV(find_bracket)(startcode, utf, recno);
if (cs == NULL) return -2;
do ce += GET(ce, 1); while (*ce == OP_ALT);
if (cc > cs && cc < ce) /* Simple recursion */
{
d = 0;
dd = 0;
had_recurse = TRUE;
break;
}
else
{
recurse_check *r = recurses;
for (r = recurses; r != NULL; r = r->prev) if (r->group == cs) break;
for (r = recurses; r != NULL; r = r->prev)
if (r->group == cs) break;
if (r != NULL) /* Mutual recursion */
{
d = 0;
dd = 0;
had_recurse = TRUE;
break;
}
else
{
int dd;
this_recurse.prev = recurses;
this_recurse.group = cs;
dd = find_minlength(re, cs, startcode, utf, &this_recurse, countptr);
dd = find_minlength(re, cs, startcode, utf, &this_recurse,
countptr, backref_cache);
if (dd < 0) return dd;
}
}
backref_cache[recno] = dd;
for (i = backref_cache[0] + 1; i < recno; i++) backref_cache[i] = -1;
backref_cache[0] = recno;
}
if (dd < d) d = dd;
}
}
if (d <= 0) break; /* No point looking at any more */
slot += re->name_entry_size;
}
}
@ -501,9 +526,15 @@ for (;;)
case OP_REF:
case OP_REFI:
if (dupcapused) return -1;
recno = GET2(cc, 1);
if (recno <= backref_cache[0] && backref_cache[recno] >= 0)
d = backref_cache[recno];
else
{
int i;
if ((re->overall_options & PCRE2_MATCH_UNSET_BACKREF) == 0)
{
ce = cs = (PCRE2_UCHAR *)PRIV(find_bracket)(startcode, utf, GET2(cc, 1));
ce = cs = (PCRE2_UCHAR *)PRIV(find_bracket)(startcode, utf, recno);
if (cs == NULL) return -2;
do ce += GET(ce, 1); while (*ce == OP_ALT);
if (cc > cs && cc < ce) /* Simple recursion */
@ -524,11 +555,19 @@ for (;;)
{
this_recurse.prev = recurses;
this_recurse.group = cs;
d = find_minlength(re, cs, startcode, utf, &this_recurse, countptr);
d = find_minlength(re, cs, startcode, utf, &this_recurse, countptr,
backref_cache);
if (d < 0) return d;
}
}
}
else d = 0;
backref_cache[recno] = d;
for (i = backref_cache[0] + 1; i < recno; i++) backref_cache[i] = -1;
backref_cache[0] = recno;
}
cc += 1 + IMM2_SIZE;
/* Handle repeated back references */
@ -601,7 +640,7 @@ for (;;)
this_recurse.prev = recurses;
this_recurse.group = cs;
prev_recurse_d = find_minlength(re, cs, startcode, utf, &this_recurse,
countptr);
countptr, backref_cache);
if (prev_recurse_d < 0) return prev_recurse_d;
prev_recurse_recno = recno;
branchlength += prev_recurse_d;
@ -747,6 +786,7 @@ if (utf)
if (caseless)
{
#ifdef SUPPORT_UNICODE
if (utf)
{
#if PCRE2_CODE_UNIT_WIDTH == 8
@ -759,10 +799,12 @@ if (caseless)
if (c > 0xff) SET_BIT(0xff); else SET_BIT(c);
#endif
}
else
#endif /* SUPPORT_UNICODE */
/* Not UTF */
else if (MAX_255(c)) SET_BIT(re->tables[fcc_offset + c]);
if (MAX_255(c)) SET_BIT(re->tables[fcc_offset + c]);
}
return p;
@ -792,7 +834,7 @@ Returns: nothing
static void
set_type_bits(pcre2_real_code *re, int cbit_type, unsigned int table_limit)
{
register uint32_t c;
uint32_t c;
for (c = 0; c < table_limit; c++)
re->start_bitmap[c] |= re->tables[c+cbits_offset+cbit_type];
#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH == 8
@ -833,7 +875,7 @@ Returns: nothing
static void
set_nottype_bits(pcre2_real_code *re, int cbit_type, unsigned int table_limit)
{
register uint32_t c;
uint32_t c;
for (c = 0; c < table_limit; c++)
re->start_bitmap[c] |= ~(re->tables[c+cbits_offset+cbit_type]);
#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH == 8
@ -873,7 +915,7 @@ Returns: SSB_FAIL => Failed to find any starting code units
static int
set_start_bits(pcre2_real_code *re, PCRE2_SPTR code, BOOL utf)
{
register uint32_t c;
uint32_t c;
int yield = SSB_DONE;
#if defined SUPPORT_UNICODE && PCRE2_CODE_UNIT_WIDTH == 8
@ -911,7 +953,6 @@ do
case OP_ALLANY:
case OP_ANY:
case OP_ANYBYTE:
case OP_CIRC:
case OP_CIRCM:
case OP_CLOSE:
case OP_COMMIT:
@ -979,6 +1020,13 @@ do
case OP_THEN_ARG:
return SSB_FAIL;
/* OP_CIRC happens only at the start of an anchored branch (multiline ^
uses OP_CIRCM). Skip over it. */
case OP_CIRC:
tcode += PRIV(OP_lengths)[OP_CIRC];
break;
/* A "real" property test implies no starting bits, but the fake property
PT_CLIST identifies a list of characters. These lists are short, as they
are used for characters with more than one "other case", so there is no
@ -1025,7 +1073,6 @@ do
case OP_CBRAPOS:
case OP_SCBRAPOS:
case OP_ONCE:
case OP_ONCE_NC:
case OP_ASSERT:
rc = set_start_bits(re, tcode, utf);
if (rc == SSB_FAIL || rc == SSB_UNKNOWN) return rc;
@ -1407,6 +1454,10 @@ do
classmap = ((tcode[1 + LINK_SIZE] & XCL_MAP) == 0)? NULL :
(uint8_t *)(tcode + 1 + LINK_SIZE + 1);
#endif
/* It seems that the fall through comment must be outside the #ifdef if
it is to avoid the gcc compiler warning. */
/* Fall through */
/* Enter here for a negative non-XCLASS. In the 8-bit library, if we are
in UTF mode, any byte with a value >= 0xc4 is a potentially valid starter
@ -1534,24 +1585,31 @@ BOOL utf = (re->overall_options & PCRE2_UTF) != 0;
code = (PCRE2_UCHAR *)((uint8_t *)re + sizeof(pcre2_real_code)) +
re->name_entry_size * re->name_count;
/* For an anchored pattern, or an unanchored pattern that has a first code
unit, or a multiline pattern that matches only at "line start", there is no
point in seeking a list of starting code units. */
/* For a pattern that has a first code unit, or a multiline pattern that
matches only at "line start", there is no point in seeking a list of starting
code units. */
if ((re->overall_options & PCRE2_ANCHORED) == 0 &&
(re->flags & (PCRE2_FIRSTSET|PCRE2_STARTLINE)) == 0)
if ((re->flags & (PCRE2_FIRSTSET|PCRE2_STARTLINE)) == 0)
{
int rc = set_start_bits(re, code, utf);
if (rc == SSB_UNKNOWN) return 1;
if (rc == SSB_DONE) re->flags |= PCRE2_FIRSTMAPSET;
}
/* Find the minimum length of subject string. If it can match an empty string,
the minimum length is already known. */
/* Find the minimum length of subject string. If the pattern can match an empty
string, the minimum length is already known. If there are more back references
than the size of the vector we are going to cache them in, do nothing. A
pattern that complicated will probably take a long time to analyze and may in
any case turn out to be too complicated. Note that back reference minima are
held as 16-bit numbers. */
if ((re->flags & PCRE2_MATCH_EMPTY) == 0)
if ((re->flags & PCRE2_MATCH_EMPTY) == 0 &&
re->top_backref <= MAX_CACHE_BACKREF)
{
switch(min = find_minlength(re, code, code, utf, NULL, &count))
int backref_cache[MAX_CACHE_BACKREF+1];
backref_cache[0] = 0; /* Highest one that is set */
min = find_minlength(re, code, code, utf, NULL, &count, backref_cache);
switch(min)
{
case -1: /* \C in UTF mode or (*ACCEPT) or over-complex regex */
break; /* Leave minlength unchanged (will be zero) */

View File

@ -114,7 +114,7 @@ for (; ptr < ptrend; ptr++)
else if (*ptr == CHAR_BACKSLASH)
{
int erc;
int errorcode = 0;
int errorcode;
uint32_t ch;
if (ptr < ptrend - 1) switch (ptr[1])
@ -127,8 +127,10 @@ for (; ptr < ptrend; ptr++)
continue;
}
ptr += 1; /* Must point after \ */
erc = PRIV(check_escape)(&ptr, ptrend, &ch, &errorcode,
code->overall_options, FALSE, NULL);
ptr -= 1; /* Back to last code unit of escape */
if (errorcode != 0)
{
rc = errorcode;
@ -287,6 +289,12 @@ options &= ~SUBSTITUTE_OPTIONS;
/* Copy up to the start offset */
if (start_offset > length)
{
match_data->leftchar = 0;
rc = PCRE2_ERROR_BADOFFSET;
goto EXIT;
}
CHECKMEMCPY(subject, start_offset);
/* Loop for global substituting. */
@ -698,7 +706,7 @@ do
else if ((suboptions & PCRE2_SUBSTITUTE_EXTENDED) != 0 &&
*ptr == CHAR_BACKSLASH)
{
int errorcode = 0;
int errorcode;
if (ptr < repend - 1) switch (ptr[1])
{
@ -728,10 +736,10 @@ do
break;
}
ptr++; /* Point after \ */
rc = PRIV(check_escape)(&ptr, repend, &ch, &errorcode,
code->overall_options, FALSE, NULL);
if (errorcode != 0) goto BADESCAPE;
ptr++;
switch(rc)
{

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -39,7 +39,7 @@ POSSIBILITY OF SUCH DAMAGE.
*/
/* This module contains some fixed tables that are used by more than one of the
PCRE code modules. The tables are also #included by the pcre2test program,
PCRE2 code modules. The tables are also #included by the pcre2test program,
which uses macros to change their names from _pcre2_xxx to xxxx, thereby
avoiding name clashes with the library. In this case, PCRE2_PCRE2TEST is
defined. */
@ -157,44 +157,62 @@ two code points. The breaking rules are as follows:
LV or V may be followed by V or T
LVT or T may be followed by T
4. Do not break before extending characters.
4. Do not break before extending characters or zero-width-joiner (ZWJ).
The next two rules are only for extended grapheme clusters (but that's what we
The following rules are only for extended grapheme clusters (but that's what we
are implementing).
5. Do not break before SpacingMarks.
6. Do not break after Prepend characters.
7. Otherwise, break everywhere.
7. Do not break within emoji modifier sequences (E_Base or E_Base_GAZ followed
by E_Modifier). Extend characters are allowed before the modifier; this
cannot be represented in this table, the code has to deal with it.
8. Do not break within emoji zwj sequences (ZWJ followed by Glue_After_Zwj or
E_Base_GAZ).
9. Do not break within emoji flag sequences. That is, do not break between
regional indicator (RI) symbols if there are an odd number of RI characters
before the break point. This table encodes "join RI characters"; the code
has to deal with checking for previous adjoining RIs.
10. Otherwise, break everywhere.
*/
#define ESZ (1<<ucp_gbExtend)|(1<<ucp_gbSpacingMark)|(1<<ucp_gbZWJ)
const uint32_t PRIV(ucp_gbtable)[] = {
(1<<ucp_gbLF), /* 0 CR */
0, /* 1 LF */
0, /* 2 Control */
(1<<ucp_gbExtend)|(1<<ucp_gbSpacingMark), /* 3 Extend */
(1<<ucp_gbExtend)|(1<<ucp_gbPrepend)| /* 4 Prepend */
(1<<ucp_gbSpacingMark)|(1<<ucp_gbL)|
(1<<ucp_gbV)|(1<<ucp_gbT)|(1<<ucp_gbLV)|
(1<<ucp_gbLVT)|(1<<ucp_gbOther),
(1<<ucp_gbExtend)|(1<<ucp_gbSpacingMark), /* 5 SpacingMark */
(1<<ucp_gbExtend)|(1<<ucp_gbSpacingMark)|(1<<ucp_gbL)| /* 6 L */
(1<<ucp_gbL)|(1<<ucp_gbV)|(1<<ucp_gbLV)|(1<<ucp_gbLVT),
(1<<ucp_gbExtend)|(1<<ucp_gbSpacingMark)|(1<<ucp_gbV)| /* 7 V */
(1<<ucp_gbT),
(1<<ucp_gbExtend)|(1<<ucp_gbSpacingMark)|(1<<ucp_gbT), /* 8 T */
(1<<ucp_gbExtend)|(1<<ucp_gbSpacingMark)|(1<<ucp_gbV)| /* 9 LV */
(1<<ucp_gbT),
(1<<ucp_gbExtend)|(1<<ucp_gbSpacingMark)|(1<<ucp_gbT), /* 10 LVT */
ESZ, /* 3 Extend */
ESZ|(1<<ucp_gbPrepend)| /* 4 Prepend */
(1<<ucp_gbL)|(1<<ucp_gbV)|(1<<ucp_gbT)|
(1<<ucp_gbLV)|(1<<ucp_gbLVT)|(1<<ucp_gbOther)|
(1<<ucp_gbRegionalIndicator)|
(1<<ucp_gbE_Base)|(1<<ucp_gbE_Modifier)|
(1<<ucp_gbE_Base_GAZ)|
(1<<ucp_gbZWJ)|(1<<ucp_gbGlue_After_Zwj),
ESZ, /* 5 SpacingMark */
ESZ|(1<<ucp_gbL)|(1<<ucp_gbV)|(1<<ucp_gbLV)| /* 6 L */
(1<<ucp_gbLVT),
ESZ|(1<<ucp_gbV)|(1<<ucp_gbT), /* 7 V */
ESZ|(1<<ucp_gbT), /* 8 T */
ESZ|(1<<ucp_gbV)|(1<<ucp_gbT), /* 9 LV */
ESZ|(1<<ucp_gbT), /* 10 LVT */
(1<<ucp_gbRegionalIndicator), /* 11 RegionalIndicator */
(1<<ucp_gbExtend)|(1<<ucp_gbSpacingMark) /* 12 Other */
ESZ, /* 12 Other */
ESZ|(1<<ucp_gbE_Modifier), /* 13 E_Base */
ESZ, /* 14 E_Modifier */
ESZ|(1<<ucp_gbE_Modifier), /* 15 E_Base_GAZ */
ESZ|(1<<ucp_gbGlue_After_Zwj)|(1<<ucp_gbE_Base_GAZ), /* 16 ZWJ */
ESZ /* 12 Glue_After_Zwj */
};
#undef ESZ
#ifdef SUPPORT_JIT
/* This table reverses PRIV(ucp_gentype). We can save the cost
of a memory load. */
@ -227,6 +245,7 @@ version. Like all other character and string literals that are compared against
the regular expression pattern, we must use STR_ macros instead of literal
strings to make sure that UTF-8 support works on EBCDIC platforms. */
#define STRING_Adlam0 STR_A STR_d STR_l STR_a STR_m "\0"
#define STRING_Ahom0 STR_A STR_h STR_o STR_m "\0"
#define STRING_Anatolian_Hieroglyphs0 STR_A STR_n STR_a STR_t STR_o STR_l STR_i STR_a STR_n STR_UNDERSCORE STR_H STR_i STR_e STR_r STR_o STR_g STR_l STR_y STR_p STR_h STR_s "\0"
#define STRING_Any0 STR_A STR_n STR_y "\0"
@ -238,6 +257,7 @@ strings to make sure that UTF-8 support works on EBCDIC platforms. */
#define STRING_Bassa_Vah0 STR_B STR_a STR_s STR_s STR_a STR_UNDERSCORE STR_V STR_a STR_h "\0"
#define STRING_Batak0 STR_B STR_a STR_t STR_a STR_k "\0"
#define STRING_Bengali0 STR_B STR_e STR_n STR_g STR_a STR_l STR_i "\0"
#define STRING_Bhaiksuki0 STR_B STR_h STR_a STR_i STR_k STR_s STR_u STR_k STR_i "\0"
#define STRING_Bopomofo0 STR_B STR_o STR_p STR_o STR_m STR_o STR_f STR_o "\0"
#define STRING_Brahmi0 STR_B STR_r STR_a STR_h STR_m STR_i "\0"
#define STRING_Braille0 STR_B STR_r STR_a STR_i STR_l STR_l STR_e "\0"
@ -313,6 +333,8 @@ strings to make sure that UTF-8 support works on EBCDIC platforms. */
#define STRING_Malayalam0 STR_M STR_a STR_l STR_a STR_y STR_a STR_l STR_a STR_m "\0"
#define STRING_Mandaic0 STR_M STR_a STR_n STR_d STR_a STR_i STR_c "\0"
#define STRING_Manichaean0 STR_M STR_a STR_n STR_i STR_c STR_h STR_a STR_e STR_a STR_n "\0"
#define STRING_Marchen0 STR_M STR_a STR_r STR_c STR_h STR_e STR_n "\0"
#define STRING_Masaram_Gondi0 STR_M STR_a STR_s STR_a STR_r STR_a STR_m STR_UNDERSCORE STR_G STR_o STR_n STR_d STR_i "\0"
#define STRING_Mc0 STR_M STR_c "\0"
#define STRING_Me0 STR_M STR_e "\0"
#define STRING_Meetei_Mayek0 STR_M STR_e STR_e STR_t STR_e STR_i STR_UNDERSCORE STR_M STR_a STR_y STR_e STR_k "\0"
@ -330,9 +352,11 @@ strings to make sure that UTF-8 support works on EBCDIC platforms. */
#define STRING_Nabataean0 STR_N STR_a STR_b STR_a STR_t STR_a STR_e STR_a STR_n "\0"
#define STRING_Nd0 STR_N STR_d "\0"
#define STRING_New_Tai_Lue0 STR_N STR_e STR_w STR_UNDERSCORE STR_T STR_a STR_i STR_UNDERSCORE STR_L STR_u STR_e "\0"
#define STRING_Newa0 STR_N STR_e STR_w STR_a "\0"
#define STRING_Nko0 STR_N STR_k STR_o "\0"
#define STRING_Nl0 STR_N STR_l "\0"
#define STRING_No0 STR_N STR_o "\0"
#define STRING_Nushu0 STR_N STR_u STR_s STR_h STR_u "\0"
#define STRING_Ogham0 STR_O STR_g STR_h STR_a STR_m "\0"
#define STRING_Ol_Chiki0 STR_O STR_l STR_UNDERSCORE STR_C STR_h STR_i STR_k STR_i "\0"
#define STRING_Old_Hungarian0 STR_O STR_l STR_d STR_UNDERSCORE STR_H STR_u STR_n STR_g STR_a STR_r STR_i STR_a STR_n "\0"
@ -343,6 +367,7 @@ strings to make sure that UTF-8 support works on EBCDIC platforms. */
#define STRING_Old_South_Arabian0 STR_O STR_l STR_d STR_UNDERSCORE STR_S STR_o STR_u STR_t STR_h STR_UNDERSCORE STR_A STR_r STR_a STR_b STR_i STR_a STR_n "\0"
#define STRING_Old_Turkic0 STR_O STR_l STR_d STR_UNDERSCORE STR_T STR_u STR_r STR_k STR_i STR_c "\0"
#define STRING_Oriya0 STR_O STR_r STR_i STR_y STR_a "\0"
#define STRING_Osage0 STR_O STR_s STR_a STR_g STR_e "\0"
#define STRING_Osmanya0 STR_O STR_s STR_m STR_a STR_n STR_y STR_a "\0"
#define STRING_P0 STR_P "\0"
#define STRING_Pahawh_Hmong0 STR_P STR_a STR_h STR_a STR_w STR_h STR_UNDERSCORE STR_H STR_m STR_o STR_n STR_g "\0"
@ -373,6 +398,7 @@ strings to make sure that UTF-8 support works on EBCDIC platforms. */
#define STRING_Sm0 STR_S STR_m "\0"
#define STRING_So0 STR_S STR_o "\0"
#define STRING_Sora_Sompeng0 STR_S STR_o STR_r STR_a STR_UNDERSCORE STR_S STR_o STR_m STR_p STR_e STR_n STR_g "\0"
#define STRING_Soyombo0 STR_S STR_o STR_y STR_o STR_m STR_b STR_o "\0"
#define STRING_Sundanese0 STR_S STR_u STR_n STR_d STR_a STR_n STR_e STR_s STR_e "\0"
#define STRING_Syloti_Nagri0 STR_S STR_y STR_l STR_o STR_t STR_i STR_UNDERSCORE STR_N STR_a STR_g STR_r STR_i "\0"
#define STRING_Syriac0 STR_S STR_y STR_r STR_i STR_a STR_c "\0"
@ -383,6 +409,7 @@ strings to make sure that UTF-8 support works on EBCDIC platforms. */
#define STRING_Tai_Viet0 STR_T STR_a STR_i STR_UNDERSCORE STR_V STR_i STR_e STR_t "\0"
#define STRING_Takri0 STR_T STR_a STR_k STR_r STR_i "\0"
#define STRING_Tamil0 STR_T STR_a STR_m STR_i STR_l "\0"
#define STRING_Tangut0 STR_T STR_a STR_n STR_g STR_u STR_t "\0"
#define STRING_Telugu0 STR_T STR_e STR_l STR_u STR_g STR_u "\0"
#define STRING_Thaana0 STR_T STR_h STR_a STR_a STR_n STR_a "\0"
#define STRING_Thai0 STR_T STR_h STR_a STR_i "\0"
@ -399,11 +426,13 @@ strings to make sure that UTF-8 support works on EBCDIC platforms. */
#define STRING_Xwd0 STR_X STR_w STR_d "\0"
#define STRING_Yi0 STR_Y STR_i "\0"
#define STRING_Z0 STR_Z "\0"
#define STRING_Zanabazar_Square0 STR_Z STR_a STR_n STR_a STR_b STR_a STR_z STR_a STR_r STR_UNDERSCORE STR_S STR_q STR_u STR_a STR_r STR_e "\0"
#define STRING_Zl0 STR_Z STR_l "\0"
#define STRING_Zp0 STR_Z STR_p "\0"
#define STRING_Zs0 STR_Z STR_s "\0"
const char PRIV(utt_names)[] =
STRING_Adlam0
STRING_Ahom0
STRING_Anatolian_Hieroglyphs0
STRING_Any0
@ -415,6 +444,7 @@ const char PRIV(utt_names)[] =
STRING_Bassa_Vah0
STRING_Batak0
STRING_Bengali0
STRING_Bhaiksuki0
STRING_Bopomofo0
STRING_Brahmi0
STRING_Braille0
@ -490,6 +520,8 @@ const char PRIV(utt_names)[] =
STRING_Malayalam0
STRING_Mandaic0
STRING_Manichaean0
STRING_Marchen0
STRING_Masaram_Gondi0
STRING_Mc0
STRING_Me0
STRING_Meetei_Mayek0
@ -507,9 +539,11 @@ const char PRIV(utt_names)[] =
STRING_Nabataean0
STRING_Nd0
STRING_New_Tai_Lue0
STRING_Newa0
STRING_Nko0
STRING_Nl0
STRING_No0
STRING_Nushu0
STRING_Ogham0
STRING_Ol_Chiki0
STRING_Old_Hungarian0
@ -520,6 +554,7 @@ const char PRIV(utt_names)[] =
STRING_Old_South_Arabian0
STRING_Old_Turkic0
STRING_Oriya0
STRING_Osage0
STRING_Osmanya0
STRING_P0
STRING_Pahawh_Hmong0
@ -550,6 +585,7 @@ const char PRIV(utt_names)[] =
STRING_Sm0
STRING_So0
STRING_Sora_Sompeng0
STRING_Soyombo0
STRING_Sundanese0
STRING_Syloti_Nagri0
STRING_Syriac0
@ -560,6 +596,7 @@ const char PRIV(utt_names)[] =
STRING_Tai_Viet0
STRING_Takri0
STRING_Tamil0
STRING_Tangut0
STRING_Telugu0
STRING_Thaana0
STRING_Thai0
@ -576,186 +613,197 @@ const char PRIV(utt_names)[] =
STRING_Xwd0
STRING_Yi0
STRING_Z0
STRING_Zanabazar_Square0
STRING_Zl0
STRING_Zp0
STRING_Zs0;
const ucp_type_table PRIV(utt)[] = {
{ 0, PT_SC, ucp_Ahom },
{ 5, PT_SC, ucp_Anatolian_Hieroglyphs },
{ 27, PT_ANY, 0 },
{ 31, PT_SC, ucp_Arabic },
{ 38, PT_SC, ucp_Armenian },
{ 47, PT_SC, ucp_Avestan },
{ 55, PT_SC, ucp_Balinese },
{ 64, PT_SC, ucp_Bamum },
{ 70, PT_SC, ucp_Bassa_Vah },
{ 80, PT_SC, ucp_Batak },
{ 86, PT_SC, ucp_Bengali },
{ 94, PT_SC, ucp_Bopomofo },
{ 103, PT_SC, ucp_Brahmi },
{ 110, PT_SC, ucp_Braille },
{ 118, PT_SC, ucp_Buginese },
{ 127, PT_SC, ucp_Buhid },
{ 133, PT_GC, ucp_C },
{ 135, PT_SC, ucp_Canadian_Aboriginal },
{ 155, PT_SC, ucp_Carian },
{ 162, PT_SC, ucp_Caucasian_Albanian },
{ 181, PT_PC, ucp_Cc },
{ 184, PT_PC, ucp_Cf },
{ 187, PT_SC, ucp_Chakma },
{ 194, PT_SC, ucp_Cham },
{ 199, PT_SC, ucp_Cherokee },
{ 208, PT_PC, ucp_Cn },
{ 211, PT_PC, ucp_Co },
{ 214, PT_SC, ucp_Common },
{ 221, PT_SC, ucp_Coptic },
{ 228, PT_PC, ucp_Cs },
{ 231, PT_SC, ucp_Cuneiform },
{ 241, PT_SC, ucp_Cypriot },
{ 249, PT_SC, ucp_Cyrillic },
{ 258, PT_SC, ucp_Deseret },
{ 266, PT_SC, ucp_Devanagari },
{ 277, PT_SC, ucp_Duployan },
{ 286, PT_SC, ucp_Egyptian_Hieroglyphs },
{ 307, PT_SC, ucp_Elbasan },
{ 315, PT_SC, ucp_Ethiopic },
{ 324, PT_SC, ucp_Georgian },
{ 333, PT_SC, ucp_Glagolitic },
{ 344, PT_SC, ucp_Gothic },
{ 351, PT_SC, ucp_Grantha },
{ 359, PT_SC, ucp_Greek },
{ 365, PT_SC, ucp_Gujarati },
{ 374, PT_SC, ucp_Gurmukhi },
{ 383, PT_SC, ucp_Han },
{ 387, PT_SC, ucp_Hangul },
{ 394, PT_SC, ucp_Hanunoo },
{ 402, PT_SC, ucp_Hatran },
{ 409, PT_SC, ucp_Hebrew },
{ 416, PT_SC, ucp_Hiragana },
{ 425, PT_SC, ucp_Imperial_Aramaic },
{ 442, PT_SC, ucp_Inherited },
{ 452, PT_SC, ucp_Inscriptional_Pahlavi },
{ 474, PT_SC, ucp_Inscriptional_Parthian },
{ 497, PT_SC, ucp_Javanese },
{ 506, PT_SC, ucp_Kaithi },
{ 513, PT_SC, ucp_Kannada },
{ 521, PT_SC, ucp_Katakana },
{ 530, PT_SC, ucp_Kayah_Li },
{ 539, PT_SC, ucp_Kharoshthi },
{ 550, PT_SC, ucp_Khmer },
{ 556, PT_SC, ucp_Khojki },
{ 563, PT_SC, ucp_Khudawadi },
{ 573, PT_GC, ucp_L },
{ 575, PT_LAMP, 0 },
{ 578, PT_SC, ucp_Lao },
{ 582, PT_SC, ucp_Latin },
{ 588, PT_SC, ucp_Lepcha },
{ 595, PT_SC, ucp_Limbu },
{ 601, PT_SC, ucp_Linear_A },
{ 610, PT_SC, ucp_Linear_B },
{ 619, PT_SC, ucp_Lisu },
{ 624, PT_PC, ucp_Ll },
{ 627, PT_PC, ucp_Lm },
{ 630, PT_PC, ucp_Lo },
{ 633, PT_PC, ucp_Lt },
{ 636, PT_PC, ucp_Lu },
{ 639, PT_SC, ucp_Lycian },
{ 646, PT_SC, ucp_Lydian },
{ 653, PT_GC, ucp_M },
{ 655, PT_SC, ucp_Mahajani },
{ 664, PT_SC, ucp_Malayalam },
{ 674, PT_SC, ucp_Mandaic },
{ 682, PT_SC, ucp_Manichaean },
{ 693, PT_PC, ucp_Mc },
{ 696, PT_PC, ucp_Me },
{ 699, PT_SC, ucp_Meetei_Mayek },
{ 712, PT_SC, ucp_Mende_Kikakui },
{ 726, PT_SC, ucp_Meroitic_Cursive },
{ 743, PT_SC, ucp_Meroitic_Hieroglyphs },
{ 764, PT_SC, ucp_Miao },
{ 769, PT_PC, ucp_Mn },
{ 772, PT_SC, ucp_Modi },
{ 777, PT_SC, ucp_Mongolian },
{ 787, PT_SC, ucp_Mro },
{ 791, PT_SC, ucp_Multani },
{ 799, PT_SC, ucp_Myanmar },
{ 807, PT_GC, ucp_N },
{ 809, PT_SC, ucp_Nabataean },
{ 819, PT_PC, ucp_Nd },
{ 822, PT_SC, ucp_New_Tai_Lue },
{ 834, PT_SC, ucp_Nko },
{ 838, PT_PC, ucp_Nl },
{ 841, PT_PC, ucp_No },
{ 844, PT_SC, ucp_Ogham },
{ 850, PT_SC, ucp_Ol_Chiki },
{ 859, PT_SC, ucp_Old_Hungarian },
{ 873, PT_SC, ucp_Old_Italic },
{ 884, PT_SC, ucp_Old_North_Arabian },
{ 902, PT_SC, ucp_Old_Permic },
{ 913, PT_SC, ucp_Old_Persian },
{ 925, PT_SC, ucp_Old_South_Arabian },
{ 943, PT_SC, ucp_Old_Turkic },
{ 954, PT_SC, ucp_Oriya },
{ 960, PT_SC, ucp_Osmanya },
{ 968, PT_GC, ucp_P },
{ 970, PT_SC, ucp_Pahawh_Hmong },
{ 983, PT_SC, ucp_Palmyrene },
{ 993, PT_SC, ucp_Pau_Cin_Hau },
{ 1005, PT_PC, ucp_Pc },
{ 1008, PT_PC, ucp_Pd },
{ 1011, PT_PC, ucp_Pe },
{ 1014, PT_PC, ucp_Pf },
{ 1017, PT_SC, ucp_Phags_Pa },
{ 1026, PT_SC, ucp_Phoenician },
{ 1037, PT_PC, ucp_Pi },
{ 1040, PT_PC, ucp_Po },
{ 1043, PT_PC, ucp_Ps },
{ 1046, PT_SC, ucp_Psalter_Pahlavi },
{ 1062, PT_SC, ucp_Rejang },
{ 1069, PT_SC, ucp_Runic },
{ 1075, PT_GC, ucp_S },
{ 1077, PT_SC, ucp_Samaritan },
{ 1087, PT_SC, ucp_Saurashtra },
{ 1098, PT_PC, ucp_Sc },
{ 1101, PT_SC, ucp_Sharada },
{ 1109, PT_SC, ucp_Shavian },
{ 1117, PT_SC, ucp_Siddham },
{ 1125, PT_SC, ucp_SignWriting },
{ 1137, PT_SC, ucp_Sinhala },
{ 1145, PT_PC, ucp_Sk },
{ 1148, PT_PC, ucp_Sm },
{ 1151, PT_PC, ucp_So },
{ 1154, PT_SC, ucp_Sora_Sompeng },
{ 1167, PT_SC, ucp_Sundanese },
{ 1177, PT_SC, ucp_Syloti_Nagri },
{ 1190, PT_SC, ucp_Syriac },
{ 1197, PT_SC, ucp_Tagalog },
{ 1205, PT_SC, ucp_Tagbanwa },
{ 1214, PT_SC, ucp_Tai_Le },
{ 1221, PT_SC, ucp_Tai_Tham },
{ 1230, PT_SC, ucp_Tai_Viet },
{ 1239, PT_SC, ucp_Takri },
{ 1245, PT_SC, ucp_Tamil },
{ 1251, PT_SC, ucp_Telugu },
{ 1258, PT_SC, ucp_Thaana },
{ 1265, PT_SC, ucp_Thai },
{ 1270, PT_SC, ucp_Tibetan },
{ 1278, PT_SC, ucp_Tifinagh },
{ 1287, PT_SC, ucp_Tirhuta },
{ 1295, PT_SC, ucp_Ugaritic },
{ 1304, PT_SC, ucp_Vai },
{ 1308, PT_SC, ucp_Warang_Citi },
{ 1320, PT_ALNUM, 0 },
{ 1324, PT_PXSPACE, 0 },
{ 1328, PT_SPACE, 0 },
{ 1332, PT_UCNC, 0 },
{ 1336, PT_WORD, 0 },
{ 1340, PT_SC, ucp_Yi },
{ 1343, PT_GC, ucp_Z },
{ 1345, PT_PC, ucp_Zl },
{ 1348, PT_PC, ucp_Zp },
{ 1351, PT_PC, ucp_Zs }
{ 0, PT_SC, ucp_Adlam },
{ 6, PT_SC, ucp_Ahom },
{ 11, PT_SC, ucp_Anatolian_Hieroglyphs },
{ 33, PT_ANY, 0 },
{ 37, PT_SC, ucp_Arabic },
{ 44, PT_SC, ucp_Armenian },
{ 53, PT_SC, ucp_Avestan },
{ 61, PT_SC, ucp_Balinese },
{ 70, PT_SC, ucp_Bamum },
{ 76, PT_SC, ucp_Bassa_Vah },
{ 86, PT_SC, ucp_Batak },
{ 92, PT_SC, ucp_Bengali },
{ 100, PT_SC, ucp_Bhaiksuki },
{ 110, PT_SC, ucp_Bopomofo },
{ 119, PT_SC, ucp_Brahmi },
{ 126, PT_SC, ucp_Braille },
{ 134, PT_SC, ucp_Buginese },
{ 143, PT_SC, ucp_Buhid },
{ 149, PT_GC, ucp_C },
{ 151, PT_SC, ucp_Canadian_Aboriginal },
{ 171, PT_SC, ucp_Carian },
{ 178, PT_SC, ucp_Caucasian_Albanian },
{ 197, PT_PC, ucp_Cc },
{ 200, PT_PC, ucp_Cf },
{ 203, PT_SC, ucp_Chakma },
{ 210, PT_SC, ucp_Cham },
{ 215, PT_SC, ucp_Cherokee },
{ 224, PT_PC, ucp_Cn },
{ 227, PT_PC, ucp_Co },
{ 230, PT_SC, ucp_Common },
{ 237, PT_SC, ucp_Coptic },
{ 244, PT_PC, ucp_Cs },
{ 247, PT_SC, ucp_Cuneiform },
{ 257, PT_SC, ucp_Cypriot },
{ 265, PT_SC, ucp_Cyrillic },
{ 274, PT_SC, ucp_Deseret },
{ 282, PT_SC, ucp_Devanagari },
{ 293, PT_SC, ucp_Duployan },
{ 302, PT_SC, ucp_Egyptian_Hieroglyphs },
{ 323, PT_SC, ucp_Elbasan },
{ 331, PT_SC, ucp_Ethiopic },
{ 340, PT_SC, ucp_Georgian },
{ 349, PT_SC, ucp_Glagolitic },
{ 360, PT_SC, ucp_Gothic },
{ 367, PT_SC, ucp_Grantha },
{ 375, PT_SC, ucp_Greek },
{ 381, PT_SC, ucp_Gujarati },
{ 390, PT_SC, ucp_Gurmukhi },
{ 399, PT_SC, ucp_Han },
{ 403, PT_SC, ucp_Hangul },
{ 410, PT_SC, ucp_Hanunoo },
{ 418, PT_SC, ucp_Hatran },
{ 425, PT_SC, ucp_Hebrew },
{ 432, PT_SC, ucp_Hiragana },
{ 441, PT_SC, ucp_Imperial_Aramaic },
{ 458, PT_SC, ucp_Inherited },
{ 468, PT_SC, ucp_Inscriptional_Pahlavi },
{ 490, PT_SC, ucp_Inscriptional_Parthian },
{ 513, PT_SC, ucp_Javanese },
{ 522, PT_SC, ucp_Kaithi },
{ 529, PT_SC, ucp_Kannada },
{ 537, PT_SC, ucp_Katakana },
{ 546, PT_SC, ucp_Kayah_Li },
{ 555, PT_SC, ucp_Kharoshthi },
{ 566, PT_SC, ucp_Khmer },
{ 572, PT_SC, ucp_Khojki },
{ 579, PT_SC, ucp_Khudawadi },
{ 589, PT_GC, ucp_L },
{ 591, PT_LAMP, 0 },
{ 594, PT_SC, ucp_Lao },
{ 598, PT_SC, ucp_Latin },
{ 604, PT_SC, ucp_Lepcha },
{ 611, PT_SC, ucp_Limbu },
{ 617, PT_SC, ucp_Linear_A },
{ 626, PT_SC, ucp_Linear_B },
{ 635, PT_SC, ucp_Lisu },
{ 640, PT_PC, ucp_Ll },
{ 643, PT_PC, ucp_Lm },
{ 646, PT_PC, ucp_Lo },
{ 649, PT_PC, ucp_Lt },
{ 652, PT_PC, ucp_Lu },
{ 655, PT_SC, ucp_Lycian },
{ 662, PT_SC, ucp_Lydian },
{ 669, PT_GC, ucp_M },
{ 671, PT_SC, ucp_Mahajani },
{ 680, PT_SC, ucp_Malayalam },
{ 690, PT_SC, ucp_Mandaic },
{ 698, PT_SC, ucp_Manichaean },
{ 709, PT_SC, ucp_Marchen },
{ 717, PT_SC, ucp_Masaram_Gondi },
{ 731, PT_PC, ucp_Mc },
{ 734, PT_PC, ucp_Me },
{ 737, PT_SC, ucp_Meetei_Mayek },
{ 750, PT_SC, ucp_Mende_Kikakui },
{ 764, PT_SC, ucp_Meroitic_Cursive },
{ 781, PT_SC, ucp_Meroitic_Hieroglyphs },
{ 802, PT_SC, ucp_Miao },
{ 807, PT_PC, ucp_Mn },
{ 810, PT_SC, ucp_Modi },
{ 815, PT_SC, ucp_Mongolian },
{ 825, PT_SC, ucp_Mro },
{ 829, PT_SC, ucp_Multani },
{ 837, PT_SC, ucp_Myanmar },
{ 845, PT_GC, ucp_N },
{ 847, PT_SC, ucp_Nabataean },
{ 857, PT_PC, ucp_Nd },
{ 860, PT_SC, ucp_New_Tai_Lue },
{ 872, PT_SC, ucp_Newa },
{ 877, PT_SC, ucp_Nko },
{ 881, PT_PC, ucp_Nl },
{ 884, PT_PC, ucp_No },
{ 887, PT_SC, ucp_Nushu },
{ 893, PT_SC, ucp_Ogham },
{ 899, PT_SC, ucp_Ol_Chiki },
{ 908, PT_SC, ucp_Old_Hungarian },
{ 922, PT_SC, ucp_Old_Italic },
{ 933, PT_SC, ucp_Old_North_Arabian },
{ 951, PT_SC, ucp_Old_Permic },
{ 962, PT_SC, ucp_Old_Persian },
{ 974, PT_SC, ucp_Old_South_Arabian },
{ 992, PT_SC, ucp_Old_Turkic },
{ 1003, PT_SC, ucp_Oriya },
{ 1009, PT_SC, ucp_Osage },
{ 1015, PT_SC, ucp_Osmanya },
{ 1023, PT_GC, ucp_P },
{ 1025, PT_SC, ucp_Pahawh_Hmong },
{ 1038, PT_SC, ucp_Palmyrene },
{ 1048, PT_SC, ucp_Pau_Cin_Hau },
{ 1060, PT_PC, ucp_Pc },
{ 1063, PT_PC, ucp_Pd },
{ 1066, PT_PC, ucp_Pe },
{ 1069, PT_PC, ucp_Pf },
{ 1072, PT_SC, ucp_Phags_Pa },
{ 1081, PT_SC, ucp_Phoenician },
{ 1092, PT_PC, ucp_Pi },
{ 1095, PT_PC, ucp_Po },
{ 1098, PT_PC, ucp_Ps },
{ 1101, PT_SC, ucp_Psalter_Pahlavi },
{ 1117, PT_SC, ucp_Rejang },
{ 1124, PT_SC, ucp_Runic },
{ 1130, PT_GC, ucp_S },
{ 1132, PT_SC, ucp_Samaritan },
{ 1142, PT_SC, ucp_Saurashtra },
{ 1153, PT_PC, ucp_Sc },
{ 1156, PT_SC, ucp_Sharada },
{ 1164, PT_SC, ucp_Shavian },
{ 1172, PT_SC, ucp_Siddham },
{ 1180, PT_SC, ucp_SignWriting },
{ 1192, PT_SC, ucp_Sinhala },
{ 1200, PT_PC, ucp_Sk },
{ 1203, PT_PC, ucp_Sm },
{ 1206, PT_PC, ucp_So },
{ 1209, PT_SC, ucp_Sora_Sompeng },
{ 1222, PT_SC, ucp_Soyombo },
{ 1230, PT_SC, ucp_Sundanese },
{ 1240, PT_SC, ucp_Syloti_Nagri },
{ 1253, PT_SC, ucp_Syriac },
{ 1260, PT_SC, ucp_Tagalog },
{ 1268, PT_SC, ucp_Tagbanwa },
{ 1277, PT_SC, ucp_Tai_Le },
{ 1284, PT_SC, ucp_Tai_Tham },
{ 1293, PT_SC, ucp_Tai_Viet },
{ 1302, PT_SC, ucp_Takri },
{ 1308, PT_SC, ucp_Tamil },
{ 1314, PT_SC, ucp_Tangut },
{ 1321, PT_SC, ucp_Telugu },
{ 1328, PT_SC, ucp_Thaana },
{ 1335, PT_SC, ucp_Thai },
{ 1340, PT_SC, ucp_Tibetan },
{ 1348, PT_SC, ucp_Tifinagh },
{ 1357, PT_SC, ucp_Tirhuta },
{ 1365, PT_SC, ucp_Ugaritic },
{ 1374, PT_SC, ucp_Vai },
{ 1378, PT_SC, ucp_Warang_Citi },
{ 1390, PT_ALNUM, 0 },
{ 1394, PT_PXSPACE, 0 },
{ 1398, PT_SPACE, 0 },
{ 1402, PT_UCNC, 0 },
{ 1406, PT_WORD, 0 },
{ 1410, PT_SC, ucp_Yi },
{ 1413, PT_GC, ucp_Z },
{ 1415, PT_SC, ucp_Zanabazar_Square },
{ 1432, PT_PC, ucp_Zl },
{ 1435, PT_PC, ucp_Zp },
{ 1438, PT_PC, ucp_Zs }
};
const size_t PRIV(utt_size) = sizeof(PRIV(utt)) / sizeof(ucp_type_table);

File diff suppressed because it is too large Load Diff

View File

@ -39,8 +39,8 @@ POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _PCRE2_UCP_H
#define _PCRE2_UCP_H
#ifndef PCRE2_UCP_H_IDEMPOTENT_GUARD
#define PCRE2_UCP_H_IDEMPOTENT_GUARD
/* This file contains definitions of the property values that are returned by
the UCD access macros. New values that are added for new releases of Unicode
@ -100,9 +100,7 @@ enum {
ucp_Zs /* Space separator */
};
/* These are grapheme break properties. Note that the code for processing them
assumes that the values are less than 16. If more values are added that take
the number to 16 or more, the code will have to be rewritten. */
/* These are grapheme break properties. */
enum {
ucp_gbCR, /* 0 */
@ -117,7 +115,12 @@ enum {
ucp_gbLV, /* 9 Hangul syllable type LV */
ucp_gbLVT, /* 10 Hangul syllable type LVT */
ucp_gbRegionalIndicator, /* 11 */
ucp_gbOther /* 12 */
ucp_gbOther, /* 12 */
ucp_gbE_Base, /* 13 */
ucp_gbE_Modifier, /* 14 */
ucp_gbE_Base_GAZ, /* 15 */
ucp_gbZWJ, /* 16 */
ucp_gbGlue_After_Zwj /* 17 */
};
/* These are the script identifications. */
@ -184,13 +187,13 @@ enum {
ucp_Tifinagh,
ucp_Ugaritic,
ucp_Yi,
/* New for Unicode 5.0: */
/* New for Unicode 5.0 */
ucp_Balinese,
ucp_Cuneiform,
ucp_Nko,
ucp_Phags_Pa,
ucp_Phoenician,
/* New for Unicode 5.1: */
/* New for Unicode 5.1 */
ucp_Carian,
ucp_Cham,
ucp_Kayah_Li,
@ -202,7 +205,7 @@ enum {
ucp_Saurashtra,
ucp_Sundanese,
ucp_Vai,
/* New for Unicode 5.2: */
/* New for Unicode 5.2 */
ucp_Avestan,
ucp_Bamum,
ucp_Egyptian_Hieroglyphs,
@ -218,11 +221,11 @@ enum {
ucp_Samaritan,
ucp_Tai_Tham,
ucp_Tai_Viet,
/* New for Unicode 6.0.0: */
/* New for Unicode 6.0.0 */
ucp_Batak,
ucp_Brahmi,
ucp_Mandaic,
/* New for Unicode 6.1.0: */
/* New for Unicode 6.1.0 */
ucp_Chakma,
ucp_Meroitic_Cursive,
ucp_Meroitic_Hieroglyphs,
@ -230,7 +233,7 @@ enum {
ucp_Sharada,
ucp_Sora_Sompeng,
ucp_Takri,
/* New for Unicode 7.0.0: */
/* New for Unicode 7.0.0 */
ucp_Bassa_Vah,
ucp_Caucasian_Albanian,
ucp_Duployan,
@ -254,15 +257,26 @@ enum {
ucp_Siddham,
ucp_Tirhuta,
ucp_Warang_Citi,
/* New for Unicode 8.0.0: */
/* New for Unicode 8.0.0 */
ucp_Ahom,
ucp_Anatolian_Hieroglyphs,
ucp_Hatran,
ucp_Multani,
ucp_Old_Hungarian,
ucp_SignWriting
ucp_SignWriting,
/* New for Unicode 10.0.0 (no update since 8.0.0) */
ucp_Adlam,
ucp_Bhaiksuki,
ucp_Marchen,
ucp_Newa,
ucp_Osage,
ucp_Tangut,
ucp_Masaram_Gondi,
ucp_Nushu,
ucp_Soyombo,
ucp_Zanabazar_Square
};
#endif
#endif /* PCRE2_UCP_H_IDEMPOTENT_GUARD */
/* End of pcre2_ucp.h */

View File

@ -7,7 +7,7 @@ and semantics are as close as possible to those of the Perl 5 language.
Written by Philip Hazel
Original API code Copyright (c) 1997-2012 University of Cambridge
New API code Copyright (c) 2016 University of Cambridge
New API code Copyright (c) 2016-2017 University of Cambridge
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
@ -93,8 +93,8 @@ Returns: == 0 if the string is a valid UTF string
int
PRIV(valid_utf)(PCRE2_SPTR string, PCRE2_SIZE length, PCRE2_SIZE *erroroffset)
{
register PCRE2_SPTR p;
register uint32_t c;
PCRE2_SPTR p;
uint32_t c;
/* ----------------- Check a UTF-8 string ----------------- */
@ -133,7 +133,7 @@ PCRE2_ERROR_UTF8_ERR21 Byte with the illegal value 0xfe or 0xff
for (p = string; length > 0; p++)
{
register uint32_t ab, d;
uint32_t ab, d;
c = *p;
length--;
@ -142,20 +142,20 @@ for (p = string; length > 0; p++)
if (c < 0xc0) /* Isolated 10xx xxxx byte */
{
*erroroffset = (int)(p - string);
*erroroffset = (PCRE2_SIZE)(p - string);
return PCRE2_ERROR_UTF8_ERR20;
}
if (c >= 0xfe) /* Invalid 0xfe or 0xff bytes */
{
*erroroffset = (int)(p - string);
*erroroffset = (PCRE2_SIZE)(p - string);
return PCRE2_ERROR_UTF8_ERR21;
}
ab = PRIV(utf8_table4)[c & 0x3f]; /* Number of additional bytes (1-5) */
if (length < ab) /* Missing bytes */
{
*erroroffset = (int)(p - string);
*erroroffset = (PCRE2_SIZE)(p - string);
switch(ab - length)
{
case 1: return PCRE2_ERROR_UTF8_ERR1;

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -90,10 +90,20 @@
/* Executable code allocation:
If SLJIT_EXECUTABLE_ALLOCATOR is not defined, the application should
define both SLJIT_MALLOC_EXEC and SLJIT_FREE_EXEC. */
define SLJIT_MALLOC_EXEC, SLJIT_FREE_EXEC, and SLJIT_EXEC_OFFSET. */
#ifndef SLJIT_EXECUTABLE_ALLOCATOR
/* Enabled by default. */
#define SLJIT_EXECUTABLE_ALLOCATOR 1
/* When SLJIT_PROT_EXECUTABLE_ALLOCATOR is enabled SLJIT uses
an allocator which does not set writable and executable
permission flags at the same time. The trade-of is increased
memory consumption and disabled dynamic code modifications. */
#ifndef SLJIT_PROT_EXECUTABLE_ALLOCATOR
/* Disabled by default. */
#define SLJIT_PROT_EXECUTABLE_ALLOCATOR 0
#endif
#endif
/* Force cdecl calling convention even if a better calling

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -187,14 +187,6 @@
/* External function definitions. */
/**********************************/
#if !(defined SLJIT_STD_MACROS_DEFINED && SLJIT_STD_MACROS_DEFINED)
/* These libraries are needed for the macros below. */
#include <stdlib.h>
#include <string.h>
#endif /* SLJIT_STD_MACROS_DEFINED */
/* General macros:
Note: SLJIT is designed to be independent from them as possible.
@ -304,6 +296,13 @@
#define SLJIT_CACHE_FLUSH(from, to) \
sys_icache_invalidate((char*)(from), (char*)(to) - (char*)(from))
#elif (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC)
/* The __clear_cache() implementation of GCC is a dummy function on PowerPC. */
#define SLJIT_CACHE_FLUSH(from, to) \
ppc_cache_flush((from), (to))
#define SLJIT_CACHE_FLUSH_OWN_IMPL 1
#elif (defined(__GNUC__) && (__GNUC__ >= 5 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)))
#define SLJIT_CACHE_FLUSH(from, to) \
@ -316,13 +315,6 @@
#define SLJIT_CACHE_FLUSH(from, to) \
cacheflush((long)(from), (long)(to), 0)
#elif (defined SLJIT_CONFIG_PPC && SLJIT_CONFIG_PPC)
/* The __clear_cache() implementation of GCC is a dummy function on PowerPC. */
#define SLJIT_CACHE_FLUSH(from, to) \
ppc_cache_flush((from), (to))
#define SLJIT_CACHE_FLUSH_OWN_IMPL 1
#elif (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
/* The __clear_cache() implementation of GCC is a dummy function on Sparc. */
@ -401,7 +393,9 @@ typedef double sljit_f64;
#ifndef SLJIT_W
/* Defining long constants. */
#if (defined SLJIT_64BIT_ARCHITECTURE && SLJIT_64BIT_ARCHITECTURE)
#if (defined SLJIT_CONFIG_UNSUPPORTED && SLJIT_CONFIG_UNSUPPORTED)
#define SLJIT_W(w) (w##l)
#elif (defined SLJIT_64BIT_ARCHITECTURE && SLJIT_64BIT_ARCHITECTURE)
#define SLJIT_W(w) (w##ll)
#else
#define SLJIT_W(w) (w)
@ -545,6 +539,14 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_exec(void* ptr);
SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void);
#define SLJIT_MALLOC_EXEC(size) sljit_malloc_exec(size)
#define SLJIT_FREE_EXEC(ptr) sljit_free_exec(ptr)
#if (defined SLJIT_PROT_EXECUTABLE_ALLOCATOR && SLJIT_PROT_EXECUTABLE_ALLOCATOR)
SLJIT_API_FUNC_ATTRIBUTE sljit_sw sljit_exec_offset(void* ptr);
#define SLJIT_EXEC_OFFSET(ptr) sljit_exec_offset(ptr)
#else
#define SLJIT_EXEC_OFFSET(ptr) 0
#endif
#endif
/**********************************************/
@ -553,37 +555,37 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void);
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
#define SLJIT_NUMBER_OF_REGISTERS 10
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 7
#define SLJIT_NUMBER_OF_REGISTERS 12
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 9
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
#define SLJIT_LOCALS_OFFSET_BASE ((2 + 4) * sizeof(sljit_sw))
#define SLJIT_LOCALS_OFFSET_BASE (compiler->locals_offset)
#else
/* Maximum 3 arguments are passed on the stack, +1 for double alignment. */
#define SLJIT_LOCALS_OFFSET_BASE ((3 + 1 + 4) * sizeof(sljit_sw))
#define SLJIT_LOCALS_OFFSET_BASE (compiler->locals_offset)
#endif /* SLJIT_X86_32_FASTCALL */
#elif (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
#ifndef _WIN64
#define SLJIT_NUMBER_OF_REGISTERS 12
#define SLJIT_NUMBER_OF_REGISTERS 13
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 6
#define SLJIT_LOCALS_OFFSET_BASE (sizeof(sljit_sw))
#define SLJIT_LOCALS_OFFSET_BASE 0
#else
#define SLJIT_NUMBER_OF_REGISTERS 12
#define SLJIT_NUMBER_OF_REGISTERS 13
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 8
#define SLJIT_LOCALS_OFFSET_BASE ((4 + 2) * sizeof(sljit_sw))
#define SLJIT_LOCALS_OFFSET_BASE (compiler->locals_offset)
#endif /* _WIN64 */
#elif (defined SLJIT_CONFIG_ARM_V5 && SLJIT_CONFIG_ARM_V5) || (defined SLJIT_CONFIG_ARM_V7 && SLJIT_CONFIG_ARM_V7)
#define SLJIT_NUMBER_OF_REGISTERS 11
#define SLJIT_NUMBER_OF_REGISTERS 12
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 8
#define SLJIT_LOCALS_OFFSET_BASE 0
#elif (defined SLJIT_CONFIG_ARM_THUMB2 && SLJIT_CONFIG_ARM_THUMB2)
#define SLJIT_NUMBER_OF_REGISTERS 11
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 7
#define SLJIT_NUMBER_OF_REGISTERS 12
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 8
#define SLJIT_LOCALS_OFFSET_BASE 0
#elif (defined SLJIT_CONFIG_ARM_64 && SLJIT_CONFIG_ARM_64)
@ -607,7 +609,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void);
#elif (defined SLJIT_CONFIG_MIPS && SLJIT_CONFIG_MIPS)
#define SLJIT_NUMBER_OF_REGISTERS 17
#define SLJIT_NUMBER_OF_REGISTERS 21
#define SLJIT_NUMBER_OF_SAVED_REGISTERS 8
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
#define SLJIT_LOCALS_OFFSET_BASE (4 * sizeof(sljit_sw))
@ -663,7 +665,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void);
#if (defined SLJIT_DEBUG && SLJIT_DEBUG)
#if !defined(SLJIT_ASSERT) || !defined(SLJIT_ASSERT_STOP)
#if !defined(SLJIT_ASSERT) || !defined(SLJIT_UNREACHABLE)
/* SLJIT_HALT_PROCESS must halt the process. */
#ifndef SLJIT_HALT_PROCESS
@ -675,7 +677,7 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void);
#include <stdio.h>
#endif /* !SLJIT_ASSERT || !SLJIT_ASSERT_STOP */
#endif /* !SLJIT_ASSERT || !SLJIT_UNREACHABLE */
/* Feel free to redefine these two macros. */
#ifndef SLJIT_ASSERT
@ -690,34 +692,33 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_free_unused_memory_exec(void);
#endif /* !SLJIT_ASSERT */
#ifndef SLJIT_ASSERT_STOP
#ifndef SLJIT_UNREACHABLE
#define SLJIT_ASSERT_STOP() \
#define SLJIT_UNREACHABLE() \
do { \
printf("Should never been reached " __FILE__ ":%d\n", __LINE__); \
SLJIT_HALT_PROCESS(); \
} while (0)
#endif /* !SLJIT_ASSERT_STOP */
#endif /* !SLJIT_UNREACHABLE */
#else /* (defined SLJIT_DEBUG && SLJIT_DEBUG) */
/* Forcing empty, but valid statements. */
#undef SLJIT_ASSERT
#undef SLJIT_ASSERT_STOP
#undef SLJIT_UNREACHABLE
#define SLJIT_ASSERT(x) \
do { } while (0)
#define SLJIT_ASSERT_STOP() \
#define SLJIT_UNREACHABLE() \
do { } while (0)
#endif /* (defined SLJIT_DEBUG && SLJIT_DEBUG) */
#ifndef SLJIT_COMPILE_ASSERT
/* Should be improved eventually. */
#define SLJIT_COMPILE_ASSERT(x, description) \
SLJIT_ASSERT(x)
switch(0) { case 0: case ((x) ? 1 : 0): break; }
#endif /* !SLJIT_COMPILE_ASSERT */

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -86,7 +86,7 @@ static SLJIT_INLINE void* alloc_chunk(sljit_uw size)
return VirtualAlloc(NULL, size, MEM_COMMIT | MEM_RESERVE, PAGE_EXECUTE_READWRITE);
}
static SLJIT_INLINE void free_chunk(void* chunk, sljit_uw size)
static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
{
SLJIT_UNUSED_ARG(size);
VirtualFree(chunk, 0, MEM_RELEASE);
@ -96,7 +96,7 @@ static SLJIT_INLINE void free_chunk(void* chunk, sljit_uw size)
static SLJIT_INLINE void* alloc_chunk(sljit_uw size)
{
void* retval;
void *retval;
#ifdef MAP_ANON
retval = mmap(NULL, size, PROT_READ | PROT_WRITE | PROT_EXEC, MAP_PRIVATE | MAP_ANON, -1, 0);
@ -111,7 +111,7 @@ static SLJIT_INLINE void* alloc_chunk(sljit_uw size)
return (retval != MAP_FAILED) ? retval : NULL;
}
static SLJIT_INLINE void free_chunk(void* chunk, sljit_uw size)
static SLJIT_INLINE void free_chunk(void *chunk, sljit_uw size)
{
munmap(chunk, size);
}
@ -180,8 +180,8 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_malloc_exec(sljit_uw size)
sljit_uw chunk_size;
allocator_grab_lock();
if (size < sizeof(struct free_block))
size = sizeof(struct free_block);
if (size < (64 - sizeof(struct block_header)))
size = (64 - sizeof(struct block_header));
size = ALIGN_SIZE(size);
free_block = free_blocks;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -76,6 +76,7 @@ static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 8] = {
#define BRK 0xd4200000
#define CBZ 0xb4000000
#define CLZ 0xdac01000
#define CSEL 0x9a800000
#define CSINC 0x9a800400
#define EOR 0xca000000
#define EORI 0xd2000000
@ -151,7 +152,7 @@ static SLJIT_INLINE void modify_imm64_const(sljit_ins* inst, sljit_uw new_imm)
inst[3] = MOVK | dst | ((new_imm >> 48) << 5) | (3 << 21);
}
static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code)
static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
{
sljit_sw diff;
sljit_uw target_addr;
@ -165,9 +166,10 @@ static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_in
target_addr = jump->u.target;
else {
SLJIT_ASSERT(jump->flags & JUMP_LABEL);
target_addr = (sljit_uw)(code + jump->u.label->size);
target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
}
diff = (sljit_sw)target_addr - (sljit_sw)(code_ptr + 4);
diff = (sljit_sw)target_addr - (sljit_sw)(code_ptr + 4) - executable_offset;
if (jump->flags & IS_COND) {
diff += sizeof(sljit_ins);
@ -211,6 +213,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_ins *buf_ptr;
sljit_ins *buf_end;
sljit_uw word_count;
sljit_sw executable_offset;
sljit_uw addr;
sljit_s32 dst;
@ -228,6 +231,8 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
code_ptr = code;
word_count = 0;
executable_offset = SLJIT_EXEC_OFFSET(code);
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
@ -242,13 +247,13 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_ASSERT(!jump || jump->addr >= word_count);
SLJIT_ASSERT(!const_ || const_->addr >= word_count);
if (label && label->size == word_count) {
label->addr = (sljit_uw)code_ptr;
label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = code_ptr - code;
label = label->next;
}
if (jump && jump->addr == word_count) {
jump->addr = (sljit_uw)(code_ptr - 4);
code_ptr -= detect_jump_type(jump, code_ptr, code);
code_ptr -= detect_jump_type(jump, code_ptr, code, executable_offset);
jump = jump->next;
}
if (const_ && const_->addr == word_count) {
@ -263,7 +268,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
} while (buf);
if (label && label->size == word_count) {
label->addr = (sljit_uw)code_ptr;
label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = code_ptr - code;
label = label->next;
}
@ -277,9 +282,10 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
while (jump) {
do {
addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
buf_ptr = (sljit_ins*)jump->addr;
buf_ptr = (sljit_ins *)jump->addr;
if (jump->flags & PATCH_B) {
addr = (sljit_sw)(addr - jump->addr) >> 2;
addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
SLJIT_ASSERT((sljit_sw)addr <= 0x1ffffff && (sljit_sw)addr >= -0x2000000);
buf_ptr[0] = ((jump->flags & IS_BL) ? BL : B) | (addr & 0x3ffffff);
if (jump->flags & IS_COND)
@ -287,7 +293,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
break;
}
if (jump->flags & PATCH_COND) {
addr = (sljit_sw)(addr - jump->addr) >> 2;
addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
SLJIT_ASSERT((sljit_sw)addr <= 0x3ffff && (sljit_sw)addr >= -0x40000);
buf_ptr[0] = (buf_ptr[0] & ~0xffffe0) | ((addr & 0x7ffff) << 5);
break;
@ -308,11 +314,37 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
}
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_offset = executable_offset;
compiler->executable_size = (code_ptr - code) * sizeof(sljit_ins);
code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
SLJIT_CACHE_FLUSH(code, code_ptr);
return code;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
{
switch (feature_type) {
case SLJIT_HAS_FPU:
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
#else
/* Available by default. */
return 1;
#endif
case SLJIT_HAS_PRE_UPDATE:
case SLJIT_HAS_CLZ:
case SLJIT_HAS_CMOV:
return 1;
default:
return 0;
}
}
/* --------------------------------------------------------------------- */
/* Core code generator functions. */
/* --------------------------------------------------------------------- */
@ -365,7 +397,7 @@ static sljit_ins logical_imm(sljit_sw imm, sljit_s32 len)
uimm = (sljit_uw)imm;
while (1) {
if (len <= 0) {
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return 0;
}
mask = ((sljit_uw)1 << len) - 1;
@ -635,7 +667,7 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
}
goto set_flags;
default:
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
break;
}
@ -702,7 +734,7 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
case SLJIT_NOT:
SLJIT_ASSERT(arg1 == TMP_REG1);
FAIL_IF(push_inst(compiler, (ORN ^ inv_bits) | RD(dst) | RN(TMP_ZERO) | RM(arg2)));
goto set_flags;
break; /* Set flags. */
case SLJIT_NEG:
SLJIT_ASSERT(arg1 == TMP_REG1);
if (flags & SET_FLAGS)
@ -710,8 +742,7 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
return push_inst(compiler, (SUB ^ inv_bits) | RD(dst) | RN(TMP_ZERO) | RM(arg2));
case SLJIT_CLZ:
SLJIT_ASSERT(arg1 == TMP_REG1);
FAIL_IF(push_inst(compiler, (CLZ ^ inv_bits) | RD(dst) | RN(arg2)));
goto set_flags;
return push_inst(compiler, (CLZ ^ inv_bits) | RD(dst) | RN(arg2));
case SLJIT_ADD:
CHECK_FLAGS(1 << 29);
return push_inst(compiler, (ADD ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
@ -740,23 +771,23 @@ static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, s
return push_inst(compiler, (AND ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
case SLJIT_OR:
FAIL_IF(push_inst(compiler, (ORR ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
goto set_flags;
break; /* Set flags. */
case SLJIT_XOR:
FAIL_IF(push_inst(compiler, (EOR ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
goto set_flags;
break; /* Set flags. */
case SLJIT_SHL:
FAIL_IF(push_inst(compiler, (LSLV ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
goto set_flags;
break; /* Set flags. */
case SLJIT_LSHR:
FAIL_IF(push_inst(compiler, (LSRV ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
goto set_flags;
break; /* Set flags. */
case SLJIT_ASHR:
FAIL_IF(push_inst(compiler, (ASRV ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
goto set_flags;
}
SLJIT_ASSERT_STOP();
break; /* Set flags. */
default:
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
}
set_flags:
if (flags & SET_FLAGS)
@ -859,6 +890,10 @@ static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 flag
}
arg &= REG_MASK;
if (arg == SLJIT_UNUSED)
return 0;
if (argw >= 0 && (argw >> shift) <= 0xfff && (argw & ((1 << shift) - 1)) == 0) {
if (SLJIT_UNLIKELY(flags & ARG_TEST))
return 1;
@ -919,21 +954,23 @@ static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sl
next_argw = 0;
}
tmp_r = (flags & STORE) ? TMP_REG3 : reg;
tmp_r = ((flags & STORE) || (flags == (WORD_SIZE | SIGNED))) ? TMP_REG3 : reg;
if (SLJIT_UNLIKELY((flags & UPDATE) && (arg & REG_MASK))) {
/* Update only applies if a base register exists. */
other_r = OFFS_REG(arg);
if (!other_r) {
other_r = arg & REG_MASK;
if (other_r != reg && argw >= 0 && argw <= 0xffffff) {
SLJIT_ASSERT(other_r != reg);
if (argw >= 0 && argw <= 0xffffff) {
if ((argw & 0xfff) != 0)
FAIL_IF(push_inst(compiler, ADDI | RD(other_r) | RN(other_r) | ((argw & 0xfff) << 10)));
if (argw >> 12)
FAIL_IF(push_inst(compiler, ADDI | (1 << 22) | RD(other_r) | RN(other_r) | ((argw >> 12) << 10)));
return push_inst(compiler, sljit_mem_imm[flags & 0x3] | (shift << 30) | RT(reg) | RN(other_r));
}
else if (other_r != reg && argw < 0 && argw >= -0xffffff) {
else if (argw < 0 && argw >= -0xffffff) {
argw = -argw;
if ((argw & 0xfff) != 0)
FAIL_IF(push_inst(compiler, SUBI | RD(other_r) | RN(other_r) | ((argw & 0xfff) << 10)));
@ -966,18 +1003,8 @@ static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sl
/* No caching here. */
arg &= REG_MASK;
argw &= 0x3;
if (!argw || argw == shift) {
FAIL_IF(push_inst(compiler, sljit_mem_reg[flags & 0x3] | (shift << 30) | RT(reg) | RN(arg) | RM(other_r) | (argw ? (1 << 12) : 0)));
return push_inst(compiler, ADD | RD(arg) | RN(arg) | RM(other_r) | (argw << 10));
}
if (arg != reg) {
FAIL_IF(push_inst(compiler, ADD | RD(arg) | RN(arg) | RM(other_r) | (argw << 10)));
return push_inst(compiler, sljit_mem_imm[flags & 0x3] | (shift << 30) | RT(reg) | RN(arg));
}
FAIL_IF(push_inst(compiler, ADD | RD(TMP_LR) | RN(arg) | RM(other_r) | (argw << 10)));
FAIL_IF(push_inst(compiler, sljit_mem_imm[flags & 0x3] | (shift << 30) | RT(reg) | RN(TMP_LR)));
return push_inst(compiler, ORR | RD(arg) | RN(TMP_ZERO) | RM(TMP_LR));
FAIL_IF(push_inst(compiler, sljit_mem_reg[flags & 0x3] | (shift << 30) | RT(reg) | RN(arg) | RM(other_r)));
return push_inst(compiler, ADD | RD(arg) | RN(arg) | RM(other_r));
}
if (arg & OFFS_REG_MASK) {
@ -998,16 +1025,16 @@ static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sl
}
}
if (argw >= 0 && argw <= 0xffffff && (argw & ((1 << shift) - 1)) == 0) {
FAIL_IF(push_inst(compiler, ADDI | (1 << 22) | RD(tmp_r) | RN(arg & REG_MASK) | ((argw >> 12) << 10)));
return push_inst(compiler, sljit_mem_imm[flags & 0x3] | (shift << 30)
| RT(reg) | RN(tmp_r) | ((argw & 0xfff) << (10 - shift)));
}
diff = argw - next_argw;
next_arg = (arg & REG_MASK) && (arg == next_arg) && diff <= 0xfff && diff >= -0xfff && diff != 0;
arg &= REG_MASK;
if (arg != SLJIT_UNUSED && argw >= 0 && argw <= 0xffffff && (argw & ((1 << shift) - 1)) == 0) {
FAIL_IF(push_inst(compiler, ADDI | (1 << 22) | RD(tmp_r) | RN(arg) | ((argw >> 12) << 10)));
return push_inst(compiler, sljit_mem_imm[flags & 0x3] | (shift << 30)
| RT(reg) | RN(tmp_r) | ((argw & 0xfff) << (10 - shift)));
}
if (arg && compiler->cache_arg == SLJIT_MEM) {
if (compiler->cache_argw == argw)
return push_inst(compiler, sljit_mem_reg[flags & 0x3] | (shift << 30) | RT(reg) | RN(arg) | RM(TMP_REG3));
@ -1290,6 +1317,23 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
compiler->cache_arg = 0;
compiler->cache_argw = 0;
if (dst == SLJIT_UNUSED && !HAS_FLAGS(op)) {
if (op <= SLJIT_MOV_P && (src & SLJIT_MEM)) {
SLJIT_ASSERT(reg_map[1] == 0 && reg_map[3] == 2 && reg_map[5] == 4);
if (op >= SLJIT_MOV_U8 && op <= SLJIT_MOV_S8)
dst = 5;
else if (op >= SLJIT_MOV_U16 && op <= SLJIT_MOV_S16)
dst = 3;
else
dst = 1;
/* Signed word sized load is the prefetch instruction. */
return emit_op_mem(compiler, WORD_SIZE | SIGNED, dst, src, srcw);
}
return SLJIT_SUCCESS;
}
dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
op = GET_OPCODE(op);
@ -1364,7 +1408,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
srcw = (sljit_s32)srcw;
break;
default:
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
flags = 0;
break;
}
@ -1391,7 +1435,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
flags = GET_FLAGS(op_flags) ? SET_FLAGS : 0;
flags = HAS_FLAGS(op_flags) ? SET_FLAGS : 0;
mem_flags = WORD_SIZE;
if (op_flags & SLJIT_I32_OP) {
flags |= INT_OP;
@ -1443,8 +1487,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
compiler->cache_arg = 0;
compiler->cache_argw = 0;
if (dst == SLJIT_UNUSED && !HAS_FLAGS(op))
return SLJIT_SUCCESS;
dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
flags = GET_FLAGS(op) ? SET_FLAGS : 0;
flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
mem_flags = WORD_SIZE;
if (op & SLJIT_I32_OP) {
flags |= INT_OP;
@ -1537,16 +1584,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *c
/* Floating point operators */
/* --------------------------------------------------------------------- */
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
#else
/* Available by default. */
return 1;
#endif
}
static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
{
sljit_u32 shift = MEM_SIZE_SHIFT(flags);
@ -1604,7 +1641,7 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
sljit_s32 dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
if (GET_OPCODE(op) == SLJIT_CONV_S32_FROM_F64)
@ -1617,7 +1654,7 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
FAIL_IF(push_inst(compiler, (FCVTZS ^ inv_bits) | RD(dst_r) | VN(src)));
if (dst_r == TMP_REG1 && dst != SLJIT_UNUSED)
if (dst & SLJIT_MEM)
return emit_op_mem(compiler, ((GET_OPCODE(op) == SLJIT_CONV_S32_FROM_F64) ? INT_SIZE : WORD_SIZE) | STORE, TMP_REG1, dst, dstw);
return SLJIT_SUCCESS;
}
@ -1775,10 +1812,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
/* For UNUSED dst. Uncommon, but possible. */
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
if (FAST_IS_REG(dst))
return push_inst(compiler, ORR | RD(dst) | RN(TMP_ZERO) | RM(TMP_LR));
@ -1856,7 +1889,7 @@ static sljit_uw get_cc(sljit_s32 type)
return 0x6;
default:
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return 0xe;
}
}
@ -1966,19 +1999,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw,
sljit_s32 type)
{
sljit_s32 dst_r, flags, mem_flags;
sljit_s32 dst_r, src_r, flags, mem_flags;
sljit_ins cc;
CHECK_ERROR();
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type));
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
ADJUST_LOCAL_OFFSET(dst, dstw);
ADJUST_LOCAL_OFFSET(src, srcw);
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
cc = get_cc(type & 0xff);
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
@ -1992,26 +2020,50 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
compiler->cache_arg = 0;
compiler->cache_argw = 0;
flags = GET_FLAGS(op) ? SET_FLAGS : 0;
flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
mem_flags = WORD_SIZE;
if (op & SLJIT_I32_OP) {
flags |= INT_OP;
mem_flags = INT_SIZE;
}
if (src & SLJIT_MEM) {
FAIL_IF(emit_op_mem2(compiler, mem_flags, TMP_REG1, src, srcw, dst, dstw));
src = TMP_REG1;
srcw = 0;
} else if (src & SLJIT_IMM)
flags |= ARG1_IMM;
src_r = dst;
if (dst & SLJIT_MEM) {
FAIL_IF(emit_op_mem2(compiler, mem_flags, TMP_REG1, dst, dstw, dst, dstw));
src_r = TMP_REG1;
}
FAIL_IF(push_inst(compiler, CSINC | (cc << 12) | RD(TMP_REG2) | RN(TMP_ZERO) | RM(TMP_ZERO)));
emit_op_imm(compiler, flags | GET_OPCODE(op), dst_r, src, TMP_REG2);
emit_op_imm(compiler, flags | GET_OPCODE(op), dst_r, src_r, TMP_REG2);
if (dst_r != TMP_REG1)
return SLJIT_SUCCESS;
if (dst & SLJIT_MEM)
return emit_op_mem2(compiler, mem_flags | STORE, TMP_REG1, dst, dstw, 0, 0);
return SLJIT_SUCCESS;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
sljit_s32 src, sljit_sw srcw)
{
sljit_ins inv_bits = (dst_reg & SLJIT_I32_OP) ? (1 << 31) : 0;
sljit_ins cc;
CHECK_ERROR();
CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
if (dst_reg & SLJIT_I32_OP)
srcw = (sljit_s32)srcw;
FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
src = TMP_REG1;
srcw = 0;
}
cc = get_cc(type & 0xff);
dst_reg &= ~SLJIT_I32_OP;
return push_inst(compiler, (CSEL ^ inv_bits) | (cc << 12) | RD(dst_reg) | RN(dst_reg) | RM(src));
}
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
@ -2027,7 +2079,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
PTR_FAIL_IF(!const_);
set_const(const_, compiler);
dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
PTR_FAIL_IF(emit_imm64_const(compiler, dst_r, init_value));
if (dst & SLJIT_MEM)
@ -2035,16 +2087,18 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
return const_;
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
sljit_ins* inst = (sljit_ins*)addr;
modify_imm64_const(inst, new_addr);
modify_imm64_const(inst, new_target);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 4);
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
{
sljit_ins* inst = (sljit_ins*)addr;
modify_imm64_const(inst, new_constant);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 4);
}

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -40,35 +40,37 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_a
#define EMIT_LOGICAL(op_imm, op_norm) \
if (flags & SRC2_IMM) { \
if (op & SLJIT_SET_E) \
if (op & SLJIT_SET_Z) \
FAIL_IF(push_inst(compiler, op_imm | S(src1) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG)); \
if (CHECK_FLAGS(SLJIT_SET_E)) \
if (!(flags & UNUSED_DEST)) \
FAIL_IF(push_inst(compiler, op_imm | S(src1) | T(dst) | IMM(src2), DR(dst))); \
} \
else { \
if (op & SLJIT_SET_E) \
if (op & SLJIT_SET_Z) \
FAIL_IF(push_inst(compiler, op_norm | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG)); \
if (CHECK_FLAGS(SLJIT_SET_E)) \
if (!(flags & UNUSED_DEST)) \
FAIL_IF(push_inst(compiler, op_norm | S(src1) | T(src2) | D(dst), DR(dst))); \
}
#define EMIT_SHIFT(op_imm, op_v) \
if (flags & SRC2_IMM) { \
if (op & SLJIT_SET_E) \
if (op & SLJIT_SET_Z) \
FAIL_IF(push_inst(compiler, op_imm | T(src1) | DA(EQUAL_FLAG) | SH_IMM(src2), EQUAL_FLAG)); \
if (CHECK_FLAGS(SLJIT_SET_E)) \
if (!(flags & UNUSED_DEST)) \
FAIL_IF(push_inst(compiler, op_imm | T(src1) | D(dst) | SH_IMM(src2), DR(dst))); \
} \
else { \
if (op & SLJIT_SET_E) \
if (op & SLJIT_SET_Z) \
FAIL_IF(push_inst(compiler, op_v | S(src2) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG)); \
if (CHECK_FLAGS(SLJIT_SET_E)) \
if (!(flags & UNUSED_DEST)) \
FAIL_IF(push_inst(compiler, op_v | S(src2) | T(src1) | D(dst), DR(dst))); \
}
static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
{
sljit_s32 is_overflow, is_carry, is_handled;
switch (GET_OPCODE(op)) {
case SLJIT_MOV:
case SLJIT_MOV_U32:
@ -93,8 +95,9 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
}
return push_inst(compiler, ANDI | S(src2) | T(dst) | IMM(0xff), DR(dst));
}
else if (dst != src2)
SLJIT_ASSERT_STOP();
else {
SLJIT_ASSERT(dst == src2);
}
return SLJIT_SUCCESS;
case SLJIT_MOV_U16:
@ -111,24 +114,25 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
}
return push_inst(compiler, ANDI | S(src2) | T(dst) | IMM(0xffff), DR(dst));
}
else if (dst != src2)
SLJIT_ASSERT_STOP();
else {
SLJIT_ASSERT(dst == src2);
}
return SLJIT_SUCCESS;
case SLJIT_NOT:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if (op & SLJIT_SET_E)
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, NOR | S(src2) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
if (CHECK_FLAGS(SLJIT_SET_E))
if (!(flags & UNUSED_DEST))
FAIL_IF(push_inst(compiler, NOR | S(src2) | T(src2) | D(dst), DR(dst)));
return SLJIT_SUCCESS;
case SLJIT_CLZ:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
if (op & SLJIT_SET_E)
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, CLZ | S(src2) | TA(EQUAL_FLAG) | DA(EQUAL_FLAG), EQUAL_FLAG));
if (CHECK_FLAGS(SLJIT_SET_E))
if (!(flags & UNUSED_DEST))
FAIL_IF(push_inst(compiler, CLZ | S(src2) | T(dst) | D(dst), DR(dst)));
#else
if (SLJIT_UNLIKELY(flags & UNUSED_DEST)) {
@ -145,130 +149,192 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
FAIL_IF(push_inst(compiler, ADDIU | S(dst) | T(dst) | IMM(1), DR(dst)));
FAIL_IF(push_inst(compiler, BGEZ | S(TMP_REG1) | IMM(-2), UNMOVABLE_INS));
FAIL_IF(push_inst(compiler, SLL | T(TMP_REG1) | D(TMP_REG1) | SH_IMM(1), UNMOVABLE_INS));
if (op & SLJIT_SET_E)
return push_inst(compiler, ADDU | S(dst) | TA(0) | DA(EQUAL_FLAG), EQUAL_FLAG);
#endif
return SLJIT_SUCCESS;
case SLJIT_ADD:
is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
if (flags & SRC2_IMM) {
if (op & SLJIT_SET_O) {
if (is_overflow) {
if (src2 >= 0)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src1) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG));
else
FAIL_IF(push_inst(compiler, NOR | S(src1) | T(src1) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, NOR | S(src1) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG));
}
if (op & SLJIT_SET_E)
else if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, ADDIU | S(src1) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG));
if (op & (SLJIT_SET_C | SLJIT_SET_O)) {
if (is_overflow || is_carry) {
if (src2 >= 0)
FAIL_IF(push_inst(compiler, ORI | S(src1) | TA(ULESS_FLAG) | IMM(src2), ULESS_FLAG));
FAIL_IF(push_inst(compiler, ORI | S(src1) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
else {
FAIL_IF(push_inst(compiler, ADDIU | SA(0) | TA(ULESS_FLAG) | IMM(src2), ULESS_FLAG));
FAIL_IF(push_inst(compiler, OR | S(src1) | TA(ULESS_FLAG) | DA(ULESS_FLAG), ULESS_FLAG));
FAIL_IF(push_inst(compiler, ADDIU | SA(0) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
FAIL_IF(push_inst(compiler, OR | S(src1) | TA(OTHER_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
}
}
/* dst may be the same as src1 or src2. */
if (CHECK_FLAGS(SLJIT_SET_E))
if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
FAIL_IF(push_inst(compiler, ADDIU | S(src1) | T(dst) | IMM(src2), DR(dst)));
}
else {
if (op & SLJIT_SET_O)
FAIL_IF(push_inst(compiler, XOR | S(src1) | T(src2) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (op & SLJIT_SET_E)
if (is_overflow)
FAIL_IF(push_inst(compiler, XOR | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
else if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, ADDU | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
if (op & (SLJIT_SET_C | SLJIT_SET_O))
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src2) | DA(ULESS_FLAG), ULESS_FLAG));
if (is_overflow || is_carry)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src2) | DA(OTHER_FLAG), OTHER_FLAG));
/* dst may be the same as src1 or src2. */
if (CHECK_FLAGS(SLJIT_SET_E))
if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
FAIL_IF(push_inst(compiler, ADDU | S(src1) | T(src2) | D(dst), DR(dst)));
}
/* a + b >= a | b (otherwise, the carry should be set to 1). */
if (op & (SLJIT_SET_C | SLJIT_SET_O))
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(ULESS_FLAG) | DA(ULESS_FLAG), ULESS_FLAG));
if (!(op & SLJIT_SET_O))
if (is_overflow || is_carry)
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(OTHER_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
if (!is_overflow)
return SLJIT_SUCCESS;
FAIL_IF(push_inst(compiler, SLL | TA(ULESS_FLAG) | D(TMP_REG1) | SH_IMM(31), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, XOR | S(TMP_REG1) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, XOR | S(dst) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
return push_inst(compiler, SLL | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG) | SH_IMM(31), OVERFLOW_FLAG);
FAIL_IF(push_inst(compiler, SLL | TA(OTHER_FLAG) | D(TMP_REG1) | SH_IMM(31), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, XOR | S(TMP_REG1) | TA(EQUAL_FLAG) | DA(EQUAL_FLAG), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, XOR | S(dst) | TA(EQUAL_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, ADDU | S(dst) | TA(0) | DA(EQUAL_FLAG), EQUAL_FLAG));
return push_inst(compiler, SRL | TA(OTHER_FLAG) | DA(OTHER_FLAG) | SH_IMM(31), OTHER_FLAG);
case SLJIT_ADDC:
is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
if (flags & SRC2_IMM) {
if (op & SLJIT_SET_C) {
if (is_carry) {
if (src2 >= 0)
FAIL_IF(push_inst(compiler, ORI | S(src1) | TA(OVERFLOW_FLAG) | IMM(src2), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, ORI | S(src1) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG));
else {
FAIL_IF(push_inst(compiler, ADDIU | SA(0) | TA(OVERFLOW_FLAG) | IMM(src2), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, OR | S(src1) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, ADDIU | SA(0) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, OR | S(src1) | TA(EQUAL_FLAG) | DA(EQUAL_FLAG), EQUAL_FLAG));
}
}
FAIL_IF(push_inst(compiler, ADDIU | S(src1) | T(dst) | IMM(src2), DR(dst)));
} else {
if (op & SLJIT_SET_C)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src2) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (is_carry)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
/* dst may be the same as src1 or src2. */
FAIL_IF(push_inst(compiler, ADDU | S(src1) | T(src2) | D(dst), DR(dst)));
}
if (op & SLJIT_SET_C)
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (is_carry)
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(EQUAL_FLAG) | DA(EQUAL_FLAG), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, ADDU | S(dst) | TA(ULESS_FLAG) | D(dst), DR(dst)));
if (!(op & SLJIT_SET_C))
FAIL_IF(push_inst(compiler, ADDU | S(dst) | TA(OTHER_FLAG) | D(dst), DR(dst)));
if (!is_carry)
return SLJIT_SUCCESS;
/* Set ULESS_FLAG (dst == 0) && (ULESS_FLAG == 1). */
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(ULESS_FLAG) | DA(ULESS_FLAG), ULESS_FLAG));
/* Set ULESS_FLAG (dst == 0) && (OTHER_FLAG == 1). */
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(OTHER_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
/* Set carry flag. */
return push_inst(compiler, OR | SA(ULESS_FLAG) | TA(OVERFLOW_FLAG) | DA(ULESS_FLAG), ULESS_FLAG);
return push_inst(compiler, OR | SA(OTHER_FLAG) | TA(EQUAL_FLAG) | DA(OTHER_FLAG), OTHER_FLAG);
case SLJIT_SUB:
if ((flags & SRC2_IMM) && ((op & (SLJIT_SET_U | SLJIT_SET_S)) || src2 == SIMM_MIN)) {
if ((flags & SRC2_IMM) && src2 == SIMM_MIN) {
FAIL_IF(push_inst(compiler, ADDIU | SA(0) | T(TMP_REG2) | IMM(src2), DR(TMP_REG2)));
src2 = TMP_REG2;
flags &= ~SRC2_IMM;
}
is_handled = 0;
if (flags & SRC2_IMM) {
if (op & SLJIT_SET_O) {
if (src2 >= 0)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src1) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
else
FAIL_IF(push_inst(compiler, NOR | S(src1) | T(src1) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (GET_FLAG_TYPE(op) == SLJIT_LESS || GET_FLAG_TYPE(op) == SLJIT_GREATER_EQUAL) {
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
is_handled = 1;
}
if (op & SLJIT_SET_E)
else if (GET_FLAG_TYPE(op) == SLJIT_SIG_LESS || GET_FLAG_TYPE(op) == SLJIT_SIG_GREATER_EQUAL) {
FAIL_IF(push_inst(compiler, SLTI | S(src1) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
is_handled = 1;
}
}
if (!is_handled && GET_FLAG_TYPE(op) >= SLJIT_LESS && GET_FLAG_TYPE(op) <= SLJIT_SIG_LESS_EQUAL) {
is_handled = 1;
if (flags & SRC2_IMM) {
FAIL_IF(push_inst(compiler, ADDIU | SA(0) | T(TMP_REG2) | IMM(src2), DR(TMP_REG2)));
src2 = TMP_REG2;
flags &= ~SRC2_IMM;
}
if (GET_FLAG_TYPE(op) == SLJIT_LESS || GET_FLAG_TYPE(op) == SLJIT_GREATER_EQUAL) {
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(OTHER_FLAG), OTHER_FLAG));
}
else if (GET_FLAG_TYPE(op) == SLJIT_GREATER || GET_FLAG_TYPE(op) == SLJIT_LESS_EQUAL)
{
FAIL_IF(push_inst(compiler, SLTU | S(src2) | T(src1) | DA(OTHER_FLAG), OTHER_FLAG));
}
else if (GET_FLAG_TYPE(op) == SLJIT_SIG_LESS || GET_FLAG_TYPE(op) == SLJIT_SIG_GREATER_EQUAL) {
FAIL_IF(push_inst(compiler, SLT | S(src1) | T(src2) | DA(OTHER_FLAG), OTHER_FLAG));
}
else if (GET_FLAG_TYPE(op) == SLJIT_SIG_GREATER || GET_FLAG_TYPE(op) == SLJIT_SIG_LESS_EQUAL)
{
FAIL_IF(push_inst(compiler, SLT | S(src2) | T(src1) | DA(OTHER_FLAG), OTHER_FLAG));
}
}
if (is_handled) {
if (flags & SRC2_IMM) {
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, ADDIU | S(src1) | TA(EQUAL_FLAG) | IMM(-src2), EQUAL_FLAG));
if (op & (SLJIT_SET_C | SLJIT_SET_O))
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(ULESS_FLAG) | IMM(src2), ULESS_FLAG));
if (!(flags & UNUSED_DEST))
return push_inst(compiler, ADDIU | S(src1) | T(dst) | IMM(-src2), DR(dst));
}
else {
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SUBU | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
if (!(flags & UNUSED_DEST))
return push_inst(compiler, SUBU | S(src1) | T(src2) | D(dst), DR(dst));
}
return SLJIT_SUCCESS;
}
is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
if (flags & SRC2_IMM) {
if (is_overflow) {
if (src2 >= 0)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG));
else
FAIL_IF(push_inst(compiler, NOR | S(src1) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG));
}
else if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, ADDIU | S(src1) | TA(EQUAL_FLAG) | IMM(-src2), EQUAL_FLAG));
if (is_overflow || is_carry)
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
/* dst may be the same as src1 or src2. */
if (CHECK_FLAGS(SLJIT_SET_E))
if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
FAIL_IF(push_inst(compiler, ADDIU | S(src1) | T(dst) | IMM(-src2), DR(dst)));
}
else {
if (op & SLJIT_SET_O)
FAIL_IF(push_inst(compiler, XOR | S(src1) | T(src2) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (op & SLJIT_SET_E)
if (is_overflow)
FAIL_IF(push_inst(compiler, XOR | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
else if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SUBU | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
if (op & (SLJIT_SET_U | SLJIT_SET_C | SLJIT_SET_O))
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(ULESS_FLAG), ULESS_FLAG));
if (op & SLJIT_SET_U)
FAIL_IF(push_inst(compiler, SLTU | S(src2) | T(src1) | DA(UGREATER_FLAG), UGREATER_FLAG));
if (op & SLJIT_SET_S) {
FAIL_IF(push_inst(compiler, SLT | S(src1) | T(src2) | DA(LESS_FLAG), LESS_FLAG));
FAIL_IF(push_inst(compiler, SLT | S(src2) | T(src1) | DA(GREATER_FLAG), GREATER_FLAG));
}
if (is_overflow || is_carry)
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(OTHER_FLAG), OTHER_FLAG));
/* dst may be the same as src1 or src2. */
if (CHECK_FLAGS(SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_C))
if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
FAIL_IF(push_inst(compiler, SUBU | S(src1) | T(src2) | D(dst), DR(dst)));
}
if (!(op & SLJIT_SET_O))
if (!is_overflow)
return SLJIT_SUCCESS;
FAIL_IF(push_inst(compiler, SLL | TA(ULESS_FLAG) | D(TMP_REG1) | SH_IMM(31), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, XOR | S(TMP_REG1) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, XOR | S(dst) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
return push_inst(compiler, SRL | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG) | SH_IMM(31), OVERFLOW_FLAG);
FAIL_IF(push_inst(compiler, SLL | TA(OTHER_FLAG) | D(TMP_REG1) | SH_IMM(31), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, XOR | S(TMP_REG1) | TA(EQUAL_FLAG) | DA(EQUAL_FLAG), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, XOR | S(dst) | TA(EQUAL_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, ADDU | S(dst) | TA(0) | DA(EQUAL_FLAG), EQUAL_FLAG));
return push_inst(compiler, SRL | TA(OTHER_FLAG) | DA(OTHER_FLAG) | SH_IMM(31), OTHER_FLAG);
case SLJIT_SUBC:
if ((flags & SRC2_IMM) && src2 == SIMM_MIN) {
@ -277,28 +343,31 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
flags &= ~SRC2_IMM;
}
is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
if (flags & SRC2_IMM) {
if (op & SLJIT_SET_C)
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(OVERFLOW_FLAG) | IMM(src2), OVERFLOW_FLAG));
if (is_carry)
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG));
/* dst may be the same as src1 or src2. */
FAIL_IF(push_inst(compiler, ADDIU | S(src1) | T(dst) | IMM(-src2), DR(dst)));
}
else {
if (op & SLJIT_SET_C)
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (is_carry)
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
/* dst may be the same as src1 or src2. */
FAIL_IF(push_inst(compiler, SUBU | S(src1) | T(src2) | D(dst), DR(dst)));
}
if (op & SLJIT_SET_C)
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(ULESS_FLAG) | DA(LESS_FLAG), LESS_FLAG));
if (is_carry)
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(OTHER_FLAG) | D(TMP_REG1), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, SUBU | S(dst) | TA(ULESS_FLAG) | D(dst), DR(dst)));
return (op & SLJIT_SET_C) ? push_inst(compiler, OR | SA(OVERFLOW_FLAG) | TA(LESS_FLAG) | DA(ULESS_FLAG), ULESS_FLAG) : SLJIT_SUCCESS;
FAIL_IF(push_inst(compiler, SUBU | S(dst) | TA(OTHER_FLAG) | D(dst), DR(dst)));
return (is_carry) ? push_inst(compiler, OR | SA(EQUAL_FLAG) | T(TMP_REG1) | DA(OTHER_FLAG), OTHER_FLAG) : SLJIT_SUCCESS;
case SLJIT_MUL:
SLJIT_ASSERT(!(flags & SRC2_IMM));
if (!(op & SLJIT_SET_O)) {
if (GET_FLAG_TYPE(op) != SLJIT_MUL_OVERFLOW) {
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
return push_inst(compiler, MUL | S(src1) | T(src2) | D(dst), DR(dst));
#else
@ -307,10 +376,10 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
#endif
}
FAIL_IF(push_inst(compiler, MULT | S(src1) | T(src2), MOVABLE_INS));
FAIL_IF(push_inst(compiler, MFHI | DA(ULESS_FLAG), ULESS_FLAG));
FAIL_IF(push_inst(compiler, MFHI | DA(EQUAL_FLAG), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, MFLO | D(dst), DR(dst)));
FAIL_IF(push_inst(compiler, SRA | T(dst) | DA(UGREATER_FLAG) | SH_IMM(31), UGREATER_FLAG));
return push_inst(compiler, SUBU | SA(ULESS_FLAG) | TA(UGREATER_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG);
FAIL_IF(push_inst(compiler, SRA | T(dst) | DA(OTHER_FLAG) | SH_IMM(31), OTHER_FLAG));
return push_inst(compiler, SUBU | SA(EQUAL_FLAG) | TA(OTHER_FLAG) | DA(OTHER_FLAG), OTHER_FLAG);
case SLJIT_AND:
EMIT_LOGICAL(ANDI, AND);
@ -337,7 +406,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return SLJIT_SUCCESS;
}
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
}
@ -347,20 +416,22 @@ static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_
return push_inst(compiler, ORI | S(dst) | T(dst) | IMM(init_value), DR(dst));
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
sljit_ins *inst = (sljit_ins *)addr;
inst[0] = (inst[0] & 0xffff0000) | ((new_addr >> 16) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | (new_addr & 0xffff);
inst[0] = (inst[0] & 0xffff0000) | ((new_target >> 16) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | (new_target & 0xffff);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
sljit_ins *inst = (sljit_ins *)addr;
inst[0] = (inst[0] & 0xffff0000) | ((new_constant >> 16) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | (new_constant & 0xffff);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
}

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -123,15 +123,15 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_a
#define EMIT_LOGICAL(op_imm, op_norm) \
if (flags & SRC2_IMM) { \
if (op & SLJIT_SET_E) \
if (op & SLJIT_SET_Z) \
FAIL_IF(push_inst(compiler, op_imm | S(src1) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG)); \
if (CHECK_FLAGS(SLJIT_SET_E)) \
if (!(flags & UNUSED_DEST)) \
FAIL_IF(push_inst(compiler, op_imm | S(src1) | T(dst) | IMM(src2), DR(dst))); \
} \
else { \
if (op & SLJIT_SET_E) \
if (op & SLJIT_SET_Z) \
FAIL_IF(push_inst(compiler, op_norm | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG)); \
if (CHECK_FLAGS(SLJIT_SET_E)) \
if (!(flags & UNUSED_DEST)) \
FAIL_IF(push_inst(compiler, op_norm | S(src1) | T(src2) | D(dst), DR(dst))); \
}
@ -144,16 +144,16 @@ static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst_a
} \
else \
ins = (op & SLJIT_I32_OP) ? op_imm : op_dimm; \
if (op & SLJIT_SET_E) \
if (op & SLJIT_SET_Z) \
FAIL_IF(push_inst(compiler, ins | T(src1) | DA(EQUAL_FLAG) | SH_IMM(src2), EQUAL_FLAG)); \
if (CHECK_FLAGS(SLJIT_SET_E)) \
if (!(flags & UNUSED_DEST)) \
FAIL_IF(push_inst(compiler, ins | T(src1) | D(dst) | SH_IMM(src2), DR(dst))); \
} \
else { \
ins = (op & SLJIT_I32_OP) ? op_v : op_dv; \
if (op & SLJIT_SET_E) \
if (op & SLJIT_SET_Z) \
FAIL_IF(push_inst(compiler, ins | S(src2) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG)); \
if (CHECK_FLAGS(SLJIT_SET_E)) \
if (!(flags & UNUSED_DEST)) \
FAIL_IF(push_inst(compiler, ins | S(src2) | T(src1) | D(dst), DR(dst))); \
}
@ -161,6 +161,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
{
sljit_ins ins;
sljit_s32 is_overflow, is_carry, is_handled;
switch (GET_OPCODE(op)) {
case SLJIT_MOV:
@ -180,8 +181,9 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
}
return push_inst(compiler, ANDI | S(src2) | T(dst) | IMM(0xff), DR(dst));
}
else if (dst != src2)
SLJIT_ASSERT_STOP();
else {
SLJIT_ASSERT(dst == src2);
}
return SLJIT_SUCCESS;
case SLJIT_MOV_U16:
@ -194,8 +196,9 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
}
return push_inst(compiler, ANDI | S(src2) | T(dst) | IMM(0xffff), DR(dst));
}
else if (dst != src2)
SLJIT_ASSERT_STOP();
else {
SLJIT_ASSERT(dst == src2);
}
return SLJIT_SUCCESS;
case SLJIT_MOV_U32:
@ -209,18 +212,18 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
case SLJIT_NOT:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
if (op & SLJIT_SET_E)
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, NOR | S(src2) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
if (CHECK_FLAGS(SLJIT_SET_E))
if (!(flags & UNUSED_DEST))
FAIL_IF(push_inst(compiler, NOR | S(src2) | T(src2) | D(dst), DR(dst)));
return SLJIT_SUCCESS;
case SLJIT_CLZ:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
if (op & SLJIT_SET_E)
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SELECT_OP(DCLZ, CLZ) | S(src2) | TA(EQUAL_FLAG) | DA(EQUAL_FLAG), EQUAL_FLAG));
if (CHECK_FLAGS(SLJIT_SET_E))
if (!(flags & UNUSED_DEST))
FAIL_IF(push_inst(compiler, SELECT_OP(DCLZ, CLZ) | S(src2) | T(dst) | D(dst), DR(dst)));
#else
if (SLJIT_UNLIKELY(flags & UNUSED_DEST)) {
@ -237,130 +240,192 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(dst) | T(dst) | IMM(1), DR(dst)));
FAIL_IF(push_inst(compiler, BGEZ | S(TMP_REG1) | IMM(-2), UNMOVABLE_INS));
FAIL_IF(push_inst(compiler, SELECT_OP(DSLL, SLL) | T(TMP_REG1) | D(TMP_REG1) | SH_IMM(1), UNMOVABLE_INS));
if (op & SLJIT_SET_E)
return push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(dst) | TA(0) | DA(EQUAL_FLAG), EQUAL_FLAG);
#endif
return SLJIT_SUCCESS;
case SLJIT_ADD:
is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
if (flags & SRC2_IMM) {
if (op & SLJIT_SET_O) {
if (is_overflow) {
if (src2 >= 0)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src1) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG));
else
FAIL_IF(push_inst(compiler, NOR | S(src1) | T(src1) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, NOR | S(src1) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG));
}
if (op & SLJIT_SET_E)
else if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(src1) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG));
if (op & (SLJIT_SET_C | SLJIT_SET_O)) {
if (is_overflow || is_carry) {
if (src2 >= 0)
FAIL_IF(push_inst(compiler, ORI | S(src1) | TA(ULESS_FLAG) | IMM(src2), ULESS_FLAG));
FAIL_IF(push_inst(compiler, ORI | S(src1) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
else {
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | SA(0) | TA(ULESS_FLAG) | IMM(src2), ULESS_FLAG));
FAIL_IF(push_inst(compiler, OR | S(src1) | TA(ULESS_FLAG) | DA(ULESS_FLAG), ULESS_FLAG));
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | SA(0) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
FAIL_IF(push_inst(compiler, OR | S(src1) | TA(OTHER_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
}
}
/* dst may be the same as src1 or src2. */
if (CHECK_FLAGS(SLJIT_SET_E))
if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(src1) | T(dst) | IMM(src2), DR(dst)));
}
else {
if (op & SLJIT_SET_O)
FAIL_IF(push_inst(compiler, XOR | S(src1) | T(src2) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (op & SLJIT_SET_E)
if (is_overflow)
FAIL_IF(push_inst(compiler, XOR | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
else if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
if (op & (SLJIT_SET_C | SLJIT_SET_O))
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src2) | DA(ULESS_FLAG), ULESS_FLAG));
if (is_overflow || is_carry)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src2) | DA(OTHER_FLAG), OTHER_FLAG));
/* dst may be the same as src1 or src2. */
if (CHECK_FLAGS(SLJIT_SET_E))
if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
FAIL_IF(push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(src1) | T(src2) | D(dst), DR(dst)));
}
/* a + b >= a | b (otherwise, the carry should be set to 1). */
if (op & (SLJIT_SET_C | SLJIT_SET_O))
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(ULESS_FLAG) | DA(ULESS_FLAG), ULESS_FLAG));
if (!(op & SLJIT_SET_O))
if (is_overflow || is_carry)
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(OTHER_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
if (!is_overflow)
return SLJIT_SUCCESS;
FAIL_IF(push_inst(compiler, SELECT_OP(DSLL32, SLL) | TA(ULESS_FLAG) | D(TMP_REG1) | SH_IMM(31), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, XOR | S(TMP_REG1) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, XOR | S(dst) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
return push_inst(compiler, SELECT_OP(DSRL32, SLL) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG) | SH_IMM(31), OVERFLOW_FLAG);
FAIL_IF(push_inst(compiler, SELECT_OP(DSLL32, SLL) | TA(OTHER_FLAG) | D(TMP_REG1) | SH_IMM(31), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, XOR | S(TMP_REG1) | TA(EQUAL_FLAG) | DA(EQUAL_FLAG), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, XOR | S(dst) | TA(EQUAL_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(dst) | TA(0) | DA(EQUAL_FLAG), EQUAL_FLAG));
return push_inst(compiler, SELECT_OP(DSRL32, SRL) | TA(OTHER_FLAG) | DA(OTHER_FLAG) | SH_IMM(31), OTHER_FLAG);
case SLJIT_ADDC:
is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
if (flags & SRC2_IMM) {
if (op & SLJIT_SET_C) {
if (is_carry) {
if (src2 >= 0)
FAIL_IF(push_inst(compiler, ORI | S(src1) | TA(OVERFLOW_FLAG) | IMM(src2), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, ORI | S(src1) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG));
else {
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | SA(0) | TA(OVERFLOW_FLAG) | IMM(src2), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, OR | S(src1) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | SA(0) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, OR | S(src1) | TA(EQUAL_FLAG) | DA(EQUAL_FLAG), EQUAL_FLAG));
}
}
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(src1) | T(dst) | IMM(src2), DR(dst)));
} else {
if (op & SLJIT_SET_C)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src2) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (is_carry)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
/* dst may be the same as src1 or src2. */
FAIL_IF(push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(src1) | T(src2) | D(dst), DR(dst)));
}
if (op & SLJIT_SET_C)
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (is_carry)
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(EQUAL_FLAG) | DA(EQUAL_FLAG), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(dst) | TA(ULESS_FLAG) | D(dst), DR(dst)));
if (!(op & SLJIT_SET_C))
FAIL_IF(push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(dst) | TA(OTHER_FLAG) | D(dst), DR(dst)));
if (!is_carry)
return SLJIT_SUCCESS;
/* Set ULESS_FLAG (dst == 0) && (ULESS_FLAG == 1). */
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(ULESS_FLAG) | DA(ULESS_FLAG), ULESS_FLAG));
/* Set ULESS_FLAG (dst == 0) && (OTHER_FLAG == 1). */
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(OTHER_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
/* Set carry flag. */
return push_inst(compiler, OR | SA(ULESS_FLAG) | TA(OVERFLOW_FLAG) | DA(ULESS_FLAG), ULESS_FLAG);
return push_inst(compiler, OR | SA(OTHER_FLAG) | TA(EQUAL_FLAG) | DA(OTHER_FLAG), OTHER_FLAG);
case SLJIT_SUB:
if ((flags & SRC2_IMM) && ((op & (SLJIT_SET_U | SLJIT_SET_S)) || src2 == SIMM_MIN)) {
if ((flags & SRC2_IMM) && src2 == SIMM_MIN) {
FAIL_IF(push_inst(compiler, ADDIU | SA(0) | T(TMP_REG2) | IMM(src2), DR(TMP_REG2)));
src2 = TMP_REG2;
flags &= ~SRC2_IMM;
}
is_handled = 0;
if (flags & SRC2_IMM) {
if (op & SLJIT_SET_O) {
if (src2 >= 0)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src1) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
else
FAIL_IF(push_inst(compiler, NOR | S(src1) | T(src1) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (GET_FLAG_TYPE(op) == SLJIT_LESS || GET_FLAG_TYPE(op) == SLJIT_GREATER_EQUAL) {
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
is_handled = 1;
}
if (op & SLJIT_SET_E)
else if (GET_FLAG_TYPE(op) == SLJIT_SIG_LESS || GET_FLAG_TYPE(op) == SLJIT_SIG_GREATER_EQUAL) {
FAIL_IF(push_inst(compiler, SLTI | S(src1) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
is_handled = 1;
}
}
if (!is_handled && GET_FLAG_TYPE(op) >= SLJIT_LESS && GET_FLAG_TYPE(op) <= SLJIT_SIG_LESS_EQUAL) {
is_handled = 1;
if (flags & SRC2_IMM) {
FAIL_IF(push_inst(compiler, ADDIU | SA(0) | T(TMP_REG2) | IMM(src2), DR(TMP_REG2)));
src2 = TMP_REG2;
flags &= ~SRC2_IMM;
}
if (GET_FLAG_TYPE(op) == SLJIT_LESS || GET_FLAG_TYPE(op) == SLJIT_GREATER_EQUAL) {
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(OTHER_FLAG), OTHER_FLAG));
}
else if (GET_FLAG_TYPE(op) == SLJIT_GREATER || GET_FLAG_TYPE(op) == SLJIT_LESS_EQUAL)
{
FAIL_IF(push_inst(compiler, SLTU | S(src2) | T(src1) | DA(OTHER_FLAG), OTHER_FLAG));
}
else if (GET_FLAG_TYPE(op) == SLJIT_SIG_LESS || GET_FLAG_TYPE(op) == SLJIT_SIG_GREATER_EQUAL) {
FAIL_IF(push_inst(compiler, SLT | S(src1) | T(src2) | DA(OTHER_FLAG), OTHER_FLAG));
}
else if (GET_FLAG_TYPE(op) == SLJIT_SIG_GREATER || GET_FLAG_TYPE(op) == SLJIT_SIG_LESS_EQUAL)
{
FAIL_IF(push_inst(compiler, SLT | S(src2) | T(src1) | DA(OTHER_FLAG), OTHER_FLAG));
}
}
if (is_handled) {
if (flags & SRC2_IMM) {
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(src1) | TA(EQUAL_FLAG) | IMM(-src2), EQUAL_FLAG));
if (op & (SLJIT_SET_C | SLJIT_SET_O))
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(ULESS_FLAG) | IMM(src2), ULESS_FLAG));
if (!(flags & UNUSED_DEST))
return push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(src1) | T(dst) | IMM(-src2), DR(dst));
}
else {
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SELECT_OP(DSUBU, SUBU) | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
if (!(flags & UNUSED_DEST))
return push_inst(compiler, SELECT_OP(DSUBU, SUBU) | S(src1) | T(src2) | D(dst), DR(dst));
}
return SLJIT_SUCCESS;
}
is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
if (flags & SRC2_IMM) {
if (is_overflow) {
if (src2 >= 0)
FAIL_IF(push_inst(compiler, OR | S(src1) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG));
else
FAIL_IF(push_inst(compiler, NOR | S(src1) | T(src1) | DA(EQUAL_FLAG), EQUAL_FLAG));
}
else if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(src1) | TA(EQUAL_FLAG) | IMM(-src2), EQUAL_FLAG));
if (is_overflow || is_carry)
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(OTHER_FLAG) | IMM(src2), OTHER_FLAG));
/* dst may be the same as src1 or src2. */
if (CHECK_FLAGS(SLJIT_SET_E))
if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(src1) | T(dst) | IMM(-src2), DR(dst)));
}
else {
if (op & SLJIT_SET_O)
FAIL_IF(push_inst(compiler, XOR | S(src1) | T(src2) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (op & SLJIT_SET_E)
if (is_overflow)
FAIL_IF(push_inst(compiler, XOR | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
else if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SELECT_OP(DSUBU, SUBU) | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
if (op & (SLJIT_SET_U | SLJIT_SET_C | SLJIT_SET_O))
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(ULESS_FLAG), ULESS_FLAG));
if (op & SLJIT_SET_U)
FAIL_IF(push_inst(compiler, SLTU | S(src2) | T(src1) | DA(UGREATER_FLAG), UGREATER_FLAG));
if (op & SLJIT_SET_S) {
FAIL_IF(push_inst(compiler, SLT | S(src1) | T(src2) | DA(LESS_FLAG), LESS_FLAG));
FAIL_IF(push_inst(compiler, SLT | S(src2) | T(src1) | DA(GREATER_FLAG), GREATER_FLAG));
}
if (is_overflow || is_carry)
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(OTHER_FLAG), OTHER_FLAG));
/* dst may be the same as src1 or src2. */
if (CHECK_FLAGS(SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_C))
if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
FAIL_IF(push_inst(compiler, SELECT_OP(DSUBU, SUBU) | S(src1) | T(src2) | D(dst), DR(dst)));
}
if (!(op & SLJIT_SET_O))
if (!is_overflow)
return SLJIT_SUCCESS;
FAIL_IF(push_inst(compiler, SELECT_OP(DSLL32, SLL) | TA(ULESS_FLAG) | D(TMP_REG1) | SH_IMM(31), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, XOR | S(TMP_REG1) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
FAIL_IF(push_inst(compiler, XOR | S(dst) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
return push_inst(compiler, SELECT_OP(DSRL32, SRL) | TA(OVERFLOW_FLAG) | DA(OVERFLOW_FLAG) | SH_IMM(31), OVERFLOW_FLAG);
FAIL_IF(push_inst(compiler, SELECT_OP(DSLL32, SLL) | TA(OTHER_FLAG) | D(TMP_REG1) | SH_IMM(31), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, XOR | S(TMP_REG1) | TA(EQUAL_FLAG) | DA(EQUAL_FLAG), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, XOR | S(dst) | TA(EQUAL_FLAG) | DA(OTHER_FLAG), OTHER_FLAG));
if (op & SLJIT_SET_Z)
FAIL_IF(push_inst(compiler, SELECT_OP(DADDU, ADDU) | S(dst) | TA(0) | DA(EQUAL_FLAG), EQUAL_FLAG));
return push_inst(compiler, SELECT_OP(DSRL32, SRL) | TA(OTHER_FLAG) | DA(OTHER_FLAG) | SH_IMM(31), OTHER_FLAG);
case SLJIT_SUBC:
if ((flags & SRC2_IMM) && src2 == SIMM_MIN) {
@ -369,28 +434,31 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
flags &= ~SRC2_IMM;
}
is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
if (flags & SRC2_IMM) {
if (op & SLJIT_SET_C)
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(OVERFLOW_FLAG) | IMM(src2), OVERFLOW_FLAG));
if (is_carry)
FAIL_IF(push_inst(compiler, SLTIU | S(src1) | TA(EQUAL_FLAG) | IMM(src2), EQUAL_FLAG));
/* dst may be the same as src1 or src2. */
FAIL_IF(push_inst(compiler, SELECT_OP(DADDIU, ADDIU) | S(src1) | T(dst) | IMM(-src2), DR(dst)));
}
else {
if (op & SLJIT_SET_C)
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG));
if (is_carry)
FAIL_IF(push_inst(compiler, SLTU | S(src1) | T(src2) | DA(EQUAL_FLAG), EQUAL_FLAG));
/* dst may be the same as src1 or src2. */
FAIL_IF(push_inst(compiler, SELECT_OP(DSUBU, SUBU) | S(src1) | T(src2) | D(dst), DR(dst)));
}
if (op & SLJIT_SET_C)
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(ULESS_FLAG) | DA(LESS_FLAG), LESS_FLAG));
if (is_carry)
FAIL_IF(push_inst(compiler, SLTU | S(dst) | TA(OTHER_FLAG) | D(TMP_REG1), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, SELECT_OP(DSUBU, SUBU) | S(dst) | TA(ULESS_FLAG) | D(dst), DR(dst)));
return (op & SLJIT_SET_C) ? push_inst(compiler, OR | SA(OVERFLOW_FLAG) | TA(LESS_FLAG) | DA(ULESS_FLAG), ULESS_FLAG) : SLJIT_SUCCESS;
FAIL_IF(push_inst(compiler, SELECT_OP(DSUBU, SUBU) | S(dst) | TA(OTHER_FLAG) | D(dst), DR(dst)));
return (is_carry) ? push_inst(compiler, OR | SA(EQUAL_FLAG) | T(TMP_REG1) | DA(OTHER_FLAG), OTHER_FLAG) : SLJIT_SUCCESS;
case SLJIT_MUL:
SLJIT_ASSERT(!(flags & SRC2_IMM));
if (!(op & SLJIT_SET_O)) {
if (GET_FLAG_TYPE(op) != SLJIT_MUL_OVERFLOW) {
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
if (op & SLJIT_I32_OP)
return push_inst(compiler, MUL | S(src1) | T(src2) | D(dst), DR(dst));
@ -402,10 +470,10 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
#endif
}
FAIL_IF(push_inst(compiler, SELECT_OP(DMULT, MULT) | S(src1) | T(src2), MOVABLE_INS));
FAIL_IF(push_inst(compiler, MFHI | DA(ULESS_FLAG), ULESS_FLAG));
FAIL_IF(push_inst(compiler, MFHI | DA(EQUAL_FLAG), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, MFLO | D(dst), DR(dst)));
FAIL_IF(push_inst(compiler, SELECT_OP(DSRA32, SRA) | T(dst) | DA(UGREATER_FLAG) | SH_IMM(31), UGREATER_FLAG));
return push_inst(compiler, SELECT_OP(DSUBU, SUBU) | SA(ULESS_FLAG) | TA(UGREATER_FLAG) | DA(OVERFLOW_FLAG), OVERFLOW_FLAG);
FAIL_IF(push_inst(compiler, SELECT_OP(DSRA32, SRA) | T(dst) | DA(OTHER_FLAG) | SH_IMM(31), OTHER_FLAG));
return push_inst(compiler, SELECT_OP(DSUBU, SUBU) | SA(EQUAL_FLAG) | TA(OTHER_FLAG) | DA(OTHER_FLAG), OTHER_FLAG);
case SLJIT_AND:
EMIT_LOGICAL(ANDI, AND);
@ -432,7 +500,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return SLJIT_SUCCESS;
}
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
}
@ -446,24 +514,26 @@ static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_
return push_inst(compiler, ORI | S(dst) | T(dst) | IMM(init_value), DR(dst));
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
sljit_ins *inst = (sljit_ins *)addr;
inst[0] = (inst[0] & 0xffff0000) | ((new_addr >> 48) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | ((new_addr >> 32) & 0xffff);
inst[3] = (inst[3] & 0xffff0000) | ((new_addr >> 16) & 0xffff);
inst[5] = (inst[5] & 0xffff0000) | (new_addr & 0xffff);
inst[0] = (inst[0] & 0xffff0000) | ((new_target >> 48) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | ((new_target >> 32) & 0xffff);
inst[3] = (inst[3] & 0xffff0000) | ((new_target >> 16) & 0xffff);
inst[5] = (inst[5] & 0xffff0000) | (new_target & 0xffff);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 6);
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
sljit_ins *inst = (sljit_ins *)addr;
inst[0] = (inst[0] & 0xffff0000) | ((new_constant >> 48) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | ((new_constant >> 32) & 0xffff);
inst[3] = (inst[3] & 0xffff0000) | ((new_constant >> 16) & 0xffff);
inst[5] = (inst[5] & 0xffff0000) | (new_constant & 0xffff);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 6);
}

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -57,19 +57,14 @@ typedef sljit_u32 sljit_ins;
#define RETURN_ADDR_REG 31
/* Flags are kept in volatile registers. */
#define EQUAL_FLAG 12
/* And carry flag as well. */
#define ULESS_FLAG 13
#define UGREATER_FLAG 14
#define LESS_FLAG 15
#define GREATER_FLAG 31
#define OVERFLOW_FLAG 1
#define EQUAL_FLAG 31
#define OTHER_FLAG 1
#define TMP_FREG1 (0)
#define TMP_FREG2 ((SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1) << 1)
static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
0, 2, 5, 6, 7, 8, 9, 10, 11, 24, 23, 22, 21, 20, 19, 18, 17, 16, 29, 3, 25, 4
0, 2, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 24, 23, 22, 21, 20, 19, 18, 17, 16, 29, 3, 25, 4
};
/* --------------------------------------------------------------------- */
@ -178,7 +173,13 @@ static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 5] = {
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
#define CLZ (HI(28) | LO(32))
#define DCLZ (HI(28) | LO(36))
#define MOVF (HI(0) | (0 << 16) | LO(1))
#define MOVN (HI(0) | LO(11))
#define MOVT (HI(0) | (1 << 16) | LO(1))
#define MOVZ (HI(0) | LO(10))
#define MUL (HI(28) | LO(2))
#define PREF (HI(51))
#define PREFX (HI(19) | LO(15))
#define SEB (HI(31) | (16 << 6) | LO(32))
#define SEH (HI(31) | (24 << 6) | LO(32))
#endif
@ -218,7 +219,7 @@ static SLJIT_INLINE sljit_ins invert_branch(sljit_s32 flags)
return (flags & IS_BIT26_COND) ? (1 << 26) : (1 << 16);
}
static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code)
static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
{
sljit_sw diff;
sljit_uw target_addr;
@ -237,9 +238,10 @@ static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_i
target_addr = jump->u.target;
else {
SLJIT_ASSERT(jump->flags & JUMP_LABEL);
target_addr = (sljit_uw)(code + jump->u.label->size);
target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
}
inst = (sljit_ins*)jump->addr;
inst = (sljit_ins *)jump->addr;
if (jump->flags & IS_COND)
inst--;
@ -250,7 +252,7 @@ static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_i
/* B instructions. */
if (jump->flags & IS_MOVABLE) {
diff = ((sljit_sw)target_addr - (sljit_sw)(inst)) >> 2;
diff = ((sljit_sw)target_addr - (sljit_sw)inst - executable_offset) >> 2;
if (diff <= SIMM_MAX && diff >= SIMM_MIN) {
jump->flags |= PATCH_B;
@ -268,7 +270,7 @@ static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_i
}
}
else {
diff = ((sljit_sw)target_addr - (sljit_sw)(inst + 1)) >> 2;
diff = ((sljit_sw)target_addr - (sljit_sw)(inst + 1) - executable_offset) >> 2;
if (diff <= SIMM_MAX && diff >= SIMM_MIN) {
jump->flags |= PATCH_B;
@ -364,6 +366,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_ins *buf_ptr;
sljit_ins *buf_end;
sljit_uw word_count;
sljit_sw executable_offset;
sljit_uw addr;
struct sljit_label *label;
@ -380,9 +383,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
code_ptr = code;
word_count = 0;
executable_offset = SLJIT_EXEC_OFFSET(code);
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
do {
buf_ptr = (sljit_ins*)buf->memory;
buf_end = buf_ptr + (buf->used_size >> 2);
@ -393,8 +399,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
SLJIT_ASSERT(!const_ || const_->addr >= word_count);
/* These structures are ordered by their address. */
if (label && label->size == word_count) {
/* Just recording the address. */
label->addr = (sljit_uw)code_ptr;
label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = code_ptr - code;
label = label->next;
}
@ -404,7 +409,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
#else
jump->addr = (sljit_uw)(code_ptr - 7);
#endif
code_ptr = detect_jump_type(jump, code_ptr, code);
code_ptr = detect_jump_type(jump, code_ptr, code, executable_offset);
jump = jump->next;
}
if (const_ && const_->addr == word_count) {
@ -434,16 +439,16 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
while (jump) {
do {
addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
buf_ptr = (sljit_ins*)jump->addr;
buf_ptr = (sljit_ins *)jump->addr;
if (jump->flags & PATCH_B) {
addr = (sljit_sw)(addr - (jump->addr + sizeof(sljit_ins))) >> 2;
addr = (sljit_sw)(addr - ((sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset) + sizeof(sljit_ins))) >> 2;
SLJIT_ASSERT((sljit_sw)addr <= SIMM_MAX && (sljit_sw)addr >= SIMM_MIN);
buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | (addr & 0xffff);
break;
}
if (jump->flags & PATCH_J) {
SLJIT_ASSERT((addr & ~0xfffffff) == ((jump->addr + sizeof(sljit_ins)) & ~0xfffffff));
SLJIT_ASSERT((addr & ~0xfffffff) == (((sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset) + sizeof(sljit_ins)) & ~0xfffffff));
buf_ptr[0] |= (addr >> 2) & 0x03ffffff;
break;
}
@ -476,7 +481,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
}
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_offset = executable_offset;
compiler->executable_size = (code_ptr - code) * sizeof(sljit_ins);
code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
#ifndef __GNUC__
SLJIT_CACHE_FLUSH(code, code_ptr);
#else
@ -486,6 +496,32 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
return code;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
{
sljit_sw fir = 0;
switch (feature_type) {
case SLJIT_HAS_FPU:
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
#elif defined(__GNUC__)
asm ("cfc1 %0, $0" : "=r"(fir));
return (fir >> 22) & 0x1;
#else
#error "FIR check is not implemented for this architecture"
#endif
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
case SLJIT_HAS_CLZ:
case SLJIT_HAS_CMOV:
return 1;
#endif
default:
return fir;
}
}
/* --------------------------------------------------------------------- */
/* Entry, exit */
/* --------------------------------------------------------------------- */
@ -520,10 +556,6 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
#define SLOW_SRC2 0x20000
#define SLOW_DEST 0x40000
/* Only these flags are set. UNUSED_DEST is not set when no flags should be set. */
#define CHECK_FLAGS(list) \
(!(flags & UNUSED_DEST) || (op & GET_FLAGS(~(list))))
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
#define STACK_STORE SW
#define STACK_LOAD LW
@ -759,18 +791,19 @@ static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sl
base = arg & REG_MASK;
if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
argw &= 0x3;
if ((flags & WRITE_BACK) && reg_ar == DR(base)) {
SLJIT_ASSERT(!(flags & LOAD_DATA) && DR(TMP_REG1) != reg_ar);
FAIL_IF(push_inst(compiler, ADDU_W | SA(reg_ar) | TA(0) | D(TMP_REG1), DR(TMP_REG1)));
reg_ar = DR(TMP_REG1);
if (SLJIT_UNLIKELY(flags & WRITE_BACK)) {
SLJIT_ASSERT(argw == 0);
FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(OFFS_REG(arg)) | D(base), DR(base)));
return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar), delay_slot);
}
argw &= 0x3;
/* Using the cache. */
if (argw == compiler->cache_argw) {
if (!(flags & WRITE_BACK)) {
if (arg == compiler->cache_arg)
return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(TMP_REG3) | TA(reg_ar), delay_slot);
if ((SLJIT_MEM | (arg & OFFS_REG_MASK)) == compiler->cache_arg) {
if (arg == next_arg && argw == (next_argw & 0x3)) {
compiler->cache_arg = arg;
@ -782,13 +815,6 @@ static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sl
return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | SA(tmp_ar) | TA(reg_ar), delay_slot);
}
}
else {
if ((SLJIT_MEM | (arg & OFFS_REG_MASK)) == compiler->cache_arg) {
FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(TMP_REG3) | D(base), DR(base)));
return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar), delay_slot);
}
}
}
if (SLJIT_UNLIKELY(argw)) {
compiler->cache_arg = SLJIT_MEM | (arg & OFFS_REG_MASK);
@ -796,7 +822,6 @@ static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sl
FAIL_IF(push_inst(compiler, SLL_W | T(OFFS_REG(arg)) | D(TMP_REG3) | SH_IMM(argw), DR(TMP_REG3)));
}
if (!(flags & WRITE_BACK)) {
if (arg == next_arg && argw == (next_argw & 0x3)) {
compiler->cache_arg = arg;
compiler->cache_argw = argw;
@ -807,24 +832,8 @@ static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sl
FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(!argw ? OFFS_REG(arg) : TMP_REG3) | DA(tmp_ar), tmp_ar));
return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | SA(tmp_ar) | TA(reg_ar), delay_slot);
}
FAIL_IF(push_inst(compiler, ADDU_W | S(base) | T(!argw ? OFFS_REG(arg) : TMP_REG3) | D(base), DR(base)));
return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar), delay_slot);
}
if (SLJIT_UNLIKELY(flags & WRITE_BACK) && base) {
/* Update only applies if a base register exists. */
if (reg_ar == DR(base)) {
SLJIT_ASSERT(!(flags & LOAD_DATA) && DR(TMP_REG1) != reg_ar);
if (argw <= SIMM_MAX && argw >= SIMM_MIN) {
FAIL_IF(push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | S(base) | TA(reg_ar) | IMM(argw), MOVABLE_INS));
if (argw)
return push_inst(compiler, ADDIU_W | S(base) | T(base) | IMM(argw), DR(base));
return SLJIT_SUCCESS;
}
FAIL_IF(push_inst(compiler, ADDU_W | SA(reg_ar) | TA(0) | D(TMP_REG1), DR(TMP_REG1)));
reg_ar = DR(TMP_REG1);
}
if (argw <= SIMM_MAX && argw >= SIMM_MIN) {
if (argw)
FAIL_IF(push_inst(compiler, ADDIU_W | S(base) | T(base) | IMM(argw), DR(base)));
@ -914,9 +923,7 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
}
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) {
if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM))
return SLJIT_SUCCESS;
if (GET_FLAGS(op))
SLJIT_ASSERT(HAS_FLAGS(op));
flags |= UNUSED_DEST;
}
else if (FAST_IS_REG(dst)) {
@ -1079,6 +1086,29 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
static sljit_s32 emit_prefetch(struct sljit_compiler *compiler,
sljit_s32 src, sljit_sw srcw)
{
if (!(src & OFFS_REG_MASK)) {
if (srcw <= SIMM_MAX && srcw >= SIMM_MIN)
return push_inst(compiler, PREF | S(src & REG_MASK) | IMM(srcw), MOVABLE_INS);
FAIL_IF(load_immediate(compiler, DR(TMP_REG1), srcw));
return push_inst(compiler, PREFX | S(src & REG_MASK) | T(TMP_REG1), MOVABLE_INS);
}
srcw &= 0x3;
if (SLJIT_UNLIKELY(srcw != 0)) {
FAIL_IF(push_inst(compiler, SLL_W | T(OFFS_REG(src)) | D(TMP_REG1) | SH_IMM(srcw), DR(TMP_REG1)));
return push_inst(compiler, PREFX | S(src & REG_MASK) | T(TMP_REG1), MOVABLE_INS);
}
return push_inst(compiler, PREFX | S(src & REG_MASK) | T(OFFS_REG(src)), MOVABLE_INS);
}
#endif
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
@ -1094,6 +1124,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
ADJUST_LOCAL_OFFSET(dst, dstw);
ADJUST_LOCAL_OFFSET(src, srcw);
if (dst == SLJIT_UNUSED && !HAS_FLAGS(op)) {
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
if (op <= SLJIT_MOV_P && (src & SLJIT_MEM))
return emit_prefetch(compiler, src, srcw);
#endif
return SLJIT_SUCCESS;
}
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
if ((op & SLJIT_I32_OP) && GET_OPCODE(op) >= SLJIT_NOT) {
flags |= INT_DATA | SIGNED_DATA;
@ -1197,6 +1235,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
ADJUST_LOCAL_OFFSET(src1, src1w);
ADJUST_LOCAL_OFFSET(src2, src2w);
if (dst == SLJIT_UNUSED && !HAS_FLAGS(op))
return SLJIT_SUCCESS;
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
if (op & SLJIT_I32_OP) {
flags |= INT_DATA | SIGNED_DATA;
@ -1273,19 +1314,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *c
/* Floating point operators */
/* --------------------------------------------------------------------- */
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
#elif defined(__GNUC__)
sljit_sw fir;
asm ("cfc1 %0, $0" : "=r"(fir));
return (fir >> 22) & 0x1;
#else
#error "FIR check is not implemented for this architecture"
#endif
}
#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 7))
#define FMT(op) (((op & SLJIT_F32_OP) ^ SLJIT_F32_OP) << (21 - 8))
@ -1308,9 +1336,6 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
FAIL_IF(push_inst(compiler, (TRUNC_W_S ^ (flags >> 19)) | FMT(op) | FS(src) | FD(TMP_FREG1), MOVABLE_INS));
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
if (FAST_IS_REG(dst))
return push_inst(compiler, MFC1 | flags | T(dst) | FS(TMP_FREG1), MOVABLE_INS);
@ -1364,6 +1389,8 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compile
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
sljit_ins inst;
if (src1 & SLJIT_MEM) {
FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
src1 = TMP_FREG1;
@ -1378,25 +1405,26 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compile
else
src2 <<= 1;
/* src2 and src1 are swapped. */
if (op & SLJIT_SET_E) {
FAIL_IF(push_inst(compiler, C_UEQ_S | FMT(op) | FT(src2) | FS(src1), UNMOVABLE_INS));
FAIL_IF(push_inst(compiler, CFC1 | TA(EQUAL_FLAG) | DA(FCSR_REG), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, SRL | TA(EQUAL_FLAG) | DA(EQUAL_FLAG) | SH_IMM(23), EQUAL_FLAG));
FAIL_IF(push_inst(compiler, ANDI | SA(EQUAL_FLAG) | TA(EQUAL_FLAG) | IMM(1), EQUAL_FLAG));
switch (GET_FLAG_TYPE(op)) {
case SLJIT_EQUAL_F64:
case SLJIT_NOT_EQUAL_F64:
inst = C_UEQ_S;
break;
case SLJIT_LESS_F64:
case SLJIT_GREATER_EQUAL_F64:
inst = C_ULT_S;
break;
case SLJIT_GREATER_F64:
case SLJIT_LESS_EQUAL_F64:
inst = C_ULE_S;
break;
default:
SLJIT_ASSERT(GET_FLAG_TYPE(op) == SLJIT_UNORDERED_F64 || GET_FLAG_TYPE(op) == SLJIT_ORDERED_F64);
inst = C_UN_S;
break;
}
if (op & SLJIT_SET_S) {
/* Mixing the instructions for the two checks. */
FAIL_IF(push_inst(compiler, C_ULT_S | FMT(op) | FT(src2) | FS(src1), UNMOVABLE_INS));
FAIL_IF(push_inst(compiler, CFC1 | TA(ULESS_FLAG) | DA(FCSR_REG), ULESS_FLAG));
FAIL_IF(push_inst(compiler, C_ULT_S | FMT(op) | FT(src1) | FS(src2), UNMOVABLE_INS));
FAIL_IF(push_inst(compiler, SRL | TA(ULESS_FLAG) | DA(ULESS_FLAG) | SH_IMM(23), ULESS_FLAG));
FAIL_IF(push_inst(compiler, ANDI | SA(ULESS_FLAG) | TA(ULESS_FLAG) | IMM(1), ULESS_FLAG));
FAIL_IF(push_inst(compiler, CFC1 | TA(UGREATER_FLAG) | DA(FCSR_REG), UGREATER_FLAG));
FAIL_IF(push_inst(compiler, SRL | TA(UGREATER_FLAG) | DA(UGREATER_FLAG) | SH_IMM(23), UGREATER_FLAG));
FAIL_IF(push_inst(compiler, ANDI | SA(UGREATER_FLAG) | TA(UGREATER_FLAG) | IMM(1), UGREATER_FLAG));
}
return push_inst(compiler, C_UN_S | FMT(op) | FT(src2) | FS(src1), FCSR_FCC);
return push_inst(compiler, inst | FMT(op) | FT(src2) | FS(src1), UNMOVABLE_INS);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
@ -1542,10 +1570,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
/* For UNUSED dst. Uncommon, but possible. */
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
if (FAST_IS_REG(dst))
return push_inst(compiler, ADDU_W | SA(RETURN_ADDR_REG) | TA(0) | D(dst), DR(dst));
@ -1634,55 +1658,39 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compile
switch (type) {
case SLJIT_EQUAL:
case SLJIT_NOT_EQUAL_F64:
BR_NZ(EQUAL_FLAG);
break;
case SLJIT_NOT_EQUAL:
case SLJIT_EQUAL_F64:
BR_Z(EQUAL_FLAG);
break;
case SLJIT_LESS:
case SLJIT_LESS_F64:
BR_Z(ULESS_FLAG);
break;
case SLJIT_GREATER_EQUAL:
case SLJIT_GREATER_EQUAL_F64:
BR_NZ(ULESS_FLAG);
break;
case SLJIT_GREATER:
case SLJIT_GREATER_F64:
BR_Z(UGREATER_FLAG);
break;
case SLJIT_LESS_EQUAL:
case SLJIT_LESS_EQUAL_F64:
BR_NZ(UGREATER_FLAG);
break;
case SLJIT_SIG_LESS:
BR_Z(LESS_FLAG);
break;
case SLJIT_SIG_GREATER_EQUAL:
BR_NZ(LESS_FLAG);
break;
case SLJIT_SIG_GREATER:
BR_Z(GREATER_FLAG);
break;
case SLJIT_SIG_LESS_EQUAL:
BR_NZ(GREATER_FLAG);
break;
case SLJIT_OVERFLOW:
case SLJIT_MUL_OVERFLOW:
BR_Z(OVERFLOW_FLAG);
BR_Z(OTHER_FLAG);
break;
case SLJIT_GREATER_EQUAL:
case SLJIT_LESS_EQUAL:
case SLJIT_SIG_GREATER_EQUAL:
case SLJIT_SIG_LESS_EQUAL:
case SLJIT_NOT_OVERFLOW:
case SLJIT_MUL_NOT_OVERFLOW:
BR_NZ(OVERFLOW_FLAG);
break;
case SLJIT_UNORDERED_F64:
BR_F();
BR_NZ(OTHER_FLAG);
break;
case SLJIT_NOT_EQUAL_F64:
case SLJIT_GREATER_EQUAL_F64:
case SLJIT_GREATER_F64:
case SLJIT_ORDERED_F64:
BR_T();
break;
case SLJIT_EQUAL_F64:
case SLJIT_LESS_F64:
case SLJIT_LESS_EQUAL_F64:
case SLJIT_UNORDERED_F64:
BR_F();
break;
default:
/* Not conditional branch. */
inst = 0;
@ -1854,86 +1862,6 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler
#undef RESOLVE_IMM1
#undef RESOLVE_IMM2
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_fcmp(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
struct sljit_jump *jump;
sljit_ins inst;
sljit_s32 if_true;
CHECK_ERROR_PTR();
CHECK_PTR(check_sljit_emit_fcmp(compiler, type, src1, src1w, src2, src2w));
compiler->cache_arg = 0;
compiler->cache_argw = 0;
if (src1 & SLJIT_MEM) {
PTR_FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(type) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
src1 = TMP_FREG1;
}
else
src1 <<= 1;
if (src2 & SLJIT_MEM) {
PTR_FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(type) | LOAD_DATA, TMP_FREG2, src2, src2w, 0, 0));
src2 = TMP_FREG2;
}
else
src2 <<= 1;
jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
PTR_FAIL_IF(!jump);
set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
jump->flags |= IS_BIT16_COND;
switch (type & 0xff) {
case SLJIT_EQUAL_F64:
inst = C_UEQ_S;
if_true = 1;
break;
case SLJIT_NOT_EQUAL_F64:
inst = C_UEQ_S;
if_true = 0;
break;
case SLJIT_LESS_F64:
inst = C_ULT_S;
if_true = 1;
break;
case SLJIT_GREATER_EQUAL_F64:
inst = C_ULT_S;
if_true = 0;
break;
case SLJIT_GREATER_F64:
inst = C_ULE_S;
if_true = 0;
break;
case SLJIT_LESS_EQUAL_F64:
inst = C_ULE_S;
if_true = 1;
break;
case SLJIT_UNORDERED_F64:
inst = C_UN_S;
if_true = 1;
break;
default: /* Make compilers happy. */
SLJIT_ASSERT_STOP();
case SLJIT_ORDERED_F64:
inst = C_UN_S;
if_true = 0;
break;
}
PTR_FAIL_IF(push_inst(compiler, inst | FMT(type) | FT(src2) | FS(src1), UNMOVABLE_INS));
/* Intentionally the other opcode. */
PTR_FAIL_IF(push_inst(compiler, (if_true ? BC1F : BC1T) | JUMP_LENGTH, UNMOVABLE_INS));
PTR_FAIL_IF(emit_const(compiler, TMP_REG2, 0));
PTR_FAIL_IF(push_inst(compiler, JR | S(TMP_REG2), UNMOVABLE_INS));
jump->addr = compiler->size;
PTR_FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
return jump;
}
#undef JUMP_LENGTH
#undef BR_Z
#undef BR_NZ
@ -2003,115 +1931,160 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw,
sljit_s32 type)
{
sljit_s32 sugg_dst_ar, dst_ar;
sljit_s32 flags = GET_ALL_FLAGS(op);
sljit_s32 src_ar, dst_ar;
sljit_s32 saved_op = op;
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
# define mem_type WORD_DATA
sljit_s32 mem_type = WORD_DATA;
#else
sljit_s32 mem_type = (op & SLJIT_I32_OP) ? (INT_DATA | SIGNED_DATA) : WORD_DATA;
#endif
CHECK_ERROR();
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type));
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
ADJUST_LOCAL_OFFSET(dst, dstw);
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
op = GET_OPCODE(op);
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
if (op == SLJIT_MOV_S32 || op == SLJIT_MOV_U32)
if (op == SLJIT_MOV_S32)
mem_type = INT_DATA | SIGNED_DATA;
#endif
sugg_dst_ar = DR((op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2);
dst_ar = DR((op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2);
compiler->cache_arg = 0;
compiler->cache_argw = 0;
if (op >= SLJIT_ADD && (src & SLJIT_MEM)) {
ADJUST_LOCAL_OFFSET(src, srcw);
FAIL_IF(emit_op_mem2(compiler, mem_type | LOAD_DATA, DR(TMP_REG1), src, srcw, dst, dstw));
src = TMP_REG1;
srcw = 0;
}
if (op >= SLJIT_ADD && (dst & SLJIT_MEM))
FAIL_IF(emit_op_mem2(compiler, mem_type | LOAD_DATA, DR(TMP_REG1), dst, dstw, dst, dstw));
switch (type & 0xff) {
case SLJIT_EQUAL:
case SLJIT_NOT_EQUAL:
FAIL_IF(push_inst(compiler, SLTIU | SA(EQUAL_FLAG) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
dst_ar = sugg_dst_ar;
break;
case SLJIT_LESS:
case SLJIT_GREATER_EQUAL:
case SLJIT_LESS_F64:
case SLJIT_GREATER_EQUAL_F64:
dst_ar = ULESS_FLAG;
break;
case SLJIT_GREATER:
case SLJIT_LESS_EQUAL:
case SLJIT_GREATER_F64:
case SLJIT_LESS_EQUAL_F64:
dst_ar = UGREATER_FLAG;
break;
case SLJIT_SIG_LESS:
case SLJIT_SIG_GREATER_EQUAL:
dst_ar = LESS_FLAG;
break;
case SLJIT_SIG_GREATER:
case SLJIT_SIG_LESS_EQUAL:
dst_ar = GREATER_FLAG;
break;
case SLJIT_OVERFLOW:
case SLJIT_NOT_OVERFLOW:
dst_ar = OVERFLOW_FLAG;
FAIL_IF(push_inst(compiler, SLTIU | SA(EQUAL_FLAG) | TA(dst_ar) | IMM(1), dst_ar));
src_ar = dst_ar;
break;
case SLJIT_MUL_OVERFLOW:
case SLJIT_MUL_NOT_OVERFLOW:
FAIL_IF(push_inst(compiler, SLTIU | SA(OVERFLOW_FLAG) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
dst_ar = sugg_dst_ar;
FAIL_IF(push_inst(compiler, SLTIU | SA(OTHER_FLAG) | TA(dst_ar) | IMM(1), dst_ar));
src_ar = dst_ar;
type ^= 0x1; /* Flip type bit for the XORI below. */
break;
case SLJIT_GREATER_F64:
case SLJIT_LESS_EQUAL_F64:
type ^= 0x1; /* Flip type bit for the XORI below. */
case SLJIT_EQUAL_F64:
case SLJIT_NOT_EQUAL_F64:
dst_ar = EQUAL_FLAG;
break;
case SLJIT_LESS_F64:
case SLJIT_GREATER_EQUAL_F64:
case SLJIT_UNORDERED_F64:
case SLJIT_ORDERED_F64:
FAIL_IF(push_inst(compiler, CFC1 | TA(sugg_dst_ar) | DA(FCSR_REG), sugg_dst_ar));
FAIL_IF(push_inst(compiler, SRL | TA(sugg_dst_ar) | DA(sugg_dst_ar) | SH_IMM(23), sugg_dst_ar));
FAIL_IF(push_inst(compiler, ANDI | SA(sugg_dst_ar) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
dst_ar = sugg_dst_ar;
FAIL_IF(push_inst(compiler, CFC1 | TA(dst_ar) | DA(FCSR_REG), dst_ar));
FAIL_IF(push_inst(compiler, SRL | TA(dst_ar) | DA(dst_ar) | SH_IMM(23), dst_ar));
FAIL_IF(push_inst(compiler, ANDI | SA(dst_ar) | TA(dst_ar) | IMM(1), dst_ar));
src_ar = dst_ar;
break;
default:
SLJIT_ASSERT_STOP();
dst_ar = sugg_dst_ar;
src_ar = OTHER_FLAG;
break;
}
if (type & 0x1) {
FAIL_IF(push_inst(compiler, XORI | SA(dst_ar) | TA(sugg_dst_ar) | IMM(1), sugg_dst_ar));
dst_ar = sugg_dst_ar;
FAIL_IF(push_inst(compiler, XORI | SA(src_ar) | TA(dst_ar) | IMM(1), dst_ar));
src_ar = dst_ar;
}
if (op >= SLJIT_ADD) {
if (DR(TMP_REG2) != dst_ar)
FAIL_IF(push_inst(compiler, ADDU_W | SA(dst_ar) | TA(0) | D(TMP_REG2), DR(TMP_REG2)));
return emit_op(compiler, op | flags, mem_type | CUMULATIVE_OP | LOGICAL_OP | IMM_OP | ALT_KEEP_CACHE, dst, dstw, src, srcw, TMP_REG2, 0);
if (op < SLJIT_ADD) {
if (dst & SLJIT_MEM)
return emit_op_mem(compiler, mem_type, src_ar, dst, dstw);
if (src_ar != dst_ar)
return push_inst(compiler, ADDU_W | SA(src_ar) | TA(0) | DA(dst_ar), dst_ar);
return SLJIT_SUCCESS;
}
/* OTHER_FLAG cannot be specified as src2 argument at the moment. */
if (DR(TMP_REG2) != src_ar)
FAIL_IF(push_inst(compiler, ADDU_W | SA(src_ar) | TA(0) | D(TMP_REG2), DR(TMP_REG2)));
mem_type |= CUMULATIVE_OP | LOGICAL_OP | IMM_OP | ALT_KEEP_CACHE;
if (dst & SLJIT_MEM)
return emit_op_mem(compiler, mem_type, dst_ar, dst, dstw);
return emit_op(compiler, saved_op, mem_type, dst, dstw, TMP_REG1, 0, TMP_REG2, 0);
return emit_op(compiler, saved_op, mem_type, dst, dstw, dst, dstw, TMP_REG2, 0);
}
if (sugg_dst_ar != dst_ar)
return push_inst(compiler, ADDU_W | SA(dst_ar) | TA(0) | DA(sugg_dst_ar), sugg_dst_ar);
return SLJIT_SUCCESS;
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
sljit_s32 src, sljit_sw srcw)
{
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
sljit_ins ins;
#endif
#if (defined SLJIT_CONFIG_MIPS_32 && SLJIT_CONFIG_MIPS_32)
# undef mem_type
CHECK_ERROR();
CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
#if (defined SLJIT_MIPS_R1 && SLJIT_MIPS_R1)
if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
#if (defined SLJIT_CONFIG_MIPS_64 && SLJIT_CONFIG_MIPS_64)
if (dst_reg & SLJIT_I32_OP)
srcw = (sljit_s32)srcw;
#endif
FAIL_IF(load_immediate(compiler, DR(TMP_REG1), srcw));
src = TMP_REG1;
srcw = 0;
}
dst_reg &= ~SLJIT_I32_OP;
switch (type & 0xff) {
case SLJIT_EQUAL:
ins = MOVZ | TA(EQUAL_FLAG);
break;
case SLJIT_NOT_EQUAL:
ins = MOVN | TA(EQUAL_FLAG);
break;
case SLJIT_LESS:
case SLJIT_GREATER:
case SLJIT_SIG_LESS:
case SLJIT_SIG_GREATER:
case SLJIT_OVERFLOW:
case SLJIT_MUL_OVERFLOW:
ins = MOVN | TA(OTHER_FLAG);
break;
case SLJIT_GREATER_EQUAL:
case SLJIT_LESS_EQUAL:
case SLJIT_SIG_GREATER_EQUAL:
case SLJIT_SIG_LESS_EQUAL:
case SLJIT_NOT_OVERFLOW:
case SLJIT_MUL_NOT_OVERFLOW:
ins = MOVZ | TA(OTHER_FLAG);
break;
case SLJIT_EQUAL_F64:
case SLJIT_LESS_F64:
case SLJIT_LESS_EQUAL_F64:
case SLJIT_UNORDERED_F64:
ins = MOVT;
break;
case SLJIT_NOT_EQUAL_F64:
case SLJIT_GREATER_EQUAL_F64:
case SLJIT_GREATER_F64:
case SLJIT_ORDERED_F64:
ins = MOVF;
break;
default:
ins = MOVZ | TA(OTHER_FLAG);
SLJIT_UNREACHABLE();
break;
}
return push_inst(compiler, ins | S(src) | D(dst_reg), DR(dst_reg));
#else
return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
#endif
}
@ -2128,7 +2101,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
PTR_FAIL_IF(!const_);
set_const(const_, compiler);
reg = SLOW_IS_REG(dst) ? dst : TMP_REG2;
reg = FAST_IS_REG(dst) ? dst : TMP_REG2;
PTR_FAIL_IF(emit_const(compiler, reg, init_value));

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -88,77 +88,86 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
case SLJIT_NEG:
SLJIT_ASSERT(src1 == TMP_REG1);
return push_inst(compiler, NEG | OERC(flags) | D(dst) | A(src2));
/* Setting XER SO is not enough, CR SO is also needed. */
return push_inst(compiler, NEG | OE((flags & ALT_FORM1) ? ALT_SET_FLAGS : 0) | RC(flags) | D(dst) | A(src2));
case SLJIT_CLZ:
SLJIT_ASSERT(src1 == TMP_REG1);
return push_inst(compiler, CNTLZW | RC(flags) | S(src2) | A(dst));
return push_inst(compiler, CNTLZW | S(src2) | A(dst));
case SLJIT_ADD:
if (flags & ALT_FORM1) {
/* Flags does not set: BIN_IMM_EXTS unnecessary. */
SLJIT_ASSERT(src2 == TMP_REG2);
return push_inst(compiler, ADDI | D(dst) | A(src1) | compiler->imm);
/* Setting XER SO is not enough, CR SO is also needed. */
return push_inst(compiler, ADD | OE(ALT_SET_FLAGS) | RC(ALT_SET_FLAGS) | D(dst) | A(src1) | B(src2));
}
if (flags & ALT_FORM2) {
/* Flags does not set: BIN_IMM_EXTS unnecessary. */
SLJIT_ASSERT(src2 == TMP_REG2);
if (flags & ALT_FORM3)
return push_inst(compiler, ADDIS | D(dst) | A(src1) | compiler->imm);
if (flags & ALT_FORM4) {
FAIL_IF(push_inst(compiler, ADDIS | D(dst) | A(src1) | (((compiler->imm >> 16) & 0xffff) + ((compiler->imm >> 15) & 0x1))));
src1 = dst;
}
return push_inst(compiler, ADDI | D(dst) | A(src1) | (compiler->imm & 0xffff));
}
if (flags & ALT_FORM3) {
SLJIT_ASSERT(src2 == TMP_REG2);
return push_inst(compiler, ADDIC | D(dst) | A(src1) | compiler->imm);
}
if (flags & ALT_FORM4) {
/* Flags does not set: BIN_IMM_EXTS unnecessary. */
FAIL_IF(push_inst(compiler, ADDI | D(dst) | A(src1) | (compiler->imm & 0xffff)));
return push_inst(compiler, ADDIS | D(dst) | A(dst) | (((compiler->imm >> 16) & 0xffff) + ((compiler->imm >> 15) & 0x1)));
}
if (!(flags & ALT_SET_FLAGS))
return push_inst(compiler, ADD | D(dst) | A(src1) | B(src2));
return push_inst(compiler, ADDC | OERC(ALT_SET_FLAGS) | D(dst) | A(src1) | B(src2));
if (flags & ALT_FORM4)
return push_inst(compiler, ADDC | RC(ALT_SET_FLAGS) | D(dst) | A(src1) | B(src2));
return push_inst(compiler, ADD | RC(flags) | D(dst) | A(src1) | B(src2));
case SLJIT_ADDC:
if (flags & ALT_FORM1) {
FAIL_IF(push_inst(compiler, MFXER | D(0)));
FAIL_IF(push_inst(compiler, ADDE | D(dst) | A(src1) | B(src2)));
return push_inst(compiler, MTXER | S(0));
}
return push_inst(compiler, ADDE | D(dst) | A(src1) | B(src2));
case SLJIT_SUB:
if (flags & ALT_FORM1) {
if (flags & ALT_FORM2) {
FAIL_IF(push_inst(compiler, CMPLI | CRD(0) | A(src1) | compiler->imm));
if (!(flags & ALT_FORM3))
return SLJIT_SUCCESS;
return push_inst(compiler, ADDI | D(dst) | A(src1) | (-compiler->imm & 0xffff));
}
FAIL_IF(push_inst(compiler, CMPL | CRD(0) | A(src1) | B(src2)));
if (!(flags & ALT_FORM3))
return SLJIT_SUCCESS;
return push_inst(compiler, SUBF | D(dst) | A(src2) | B(src1));
}
if (flags & ALT_FORM2) {
/* Setting XER SO is not enough, CR SO is also needed. */
return push_inst(compiler, SUBF | OE(ALT_SET_FLAGS) | RC(ALT_SET_FLAGS) | D(dst) | A(src2) | B(src1));
}
if (flags & ALT_FORM3) {
/* Flags does not set: BIN_IMM_EXTS unnecessary. */
SLJIT_ASSERT(src2 == TMP_REG2);
return push_inst(compiler, SUBFIC | D(dst) | A(src1) | compiler->imm);
}
if (flags & (ALT_FORM2 | ALT_FORM3)) {
if (flags & ALT_FORM4) {
if (flags & ALT_FORM5) {
SLJIT_ASSERT(src2 == TMP_REG2);
if (flags & ALT_FORM2)
FAIL_IF(push_inst(compiler, CMPI | CRD(0) | A(src1) | compiler->imm));
if (flags & ALT_FORM3)
return push_inst(compiler, CMPLI | CRD(4) | A(src1) | compiler->imm);
return SLJIT_SUCCESS;
return push_inst(compiler, CMPI | CRD(0) | A(src1) | compiler->imm);
}
if (flags & (ALT_FORM4 | ALT_FORM5)) {
if (flags & ALT_FORM4)
FAIL_IF(push_inst(compiler, CMPL | CRD(4) | A(src1) | B(src2)));
if (flags & ALT_FORM5)
FAIL_IF(push_inst(compiler, CMP | CRD(0) | A(src1) | B(src2)));
return SLJIT_SUCCESS;
return push_inst(compiler, CMP | CRD(0) | A(src1) | B(src2));
}
if (!(flags & ALT_SET_FLAGS))
return push_inst(compiler, SUBF | D(dst) | A(src2) | B(src1));
if (flags & ALT_FORM6)
FAIL_IF(push_inst(compiler, CMPL | CRD(4) | A(src1) | B(src2)));
return push_inst(compiler, SUBFC | OERC(ALT_SET_FLAGS) | D(dst) | A(src2) | B(src1));
if (flags & ALT_FORM5)
return push_inst(compiler, SUBFC | RC(ALT_SET_FLAGS) | D(dst) | A(src2) | B(src1));
return push_inst(compiler, SUBF | RC(flags) | D(dst) | A(src2) | B(src1));
case SLJIT_SUBC:
if (flags & ALT_FORM1) {
FAIL_IF(push_inst(compiler, MFXER | D(0)));
FAIL_IF(push_inst(compiler, SUBFE | D(dst) | A(src2) | B(src1)));
return push_inst(compiler, MTXER | S(0));
}
return push_inst(compiler, SUBFE | D(dst) | A(src2) | B(src1));
case SLJIT_MUL:
@ -166,7 +175,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
SLJIT_ASSERT(src2 == TMP_REG2);
return push_inst(compiler, MULLI | D(dst) | A(src1) | compiler->imm);
}
return push_inst(compiler, MULLW | OERC(flags) | D(dst) | A(src2) | B(src1));
return push_inst(compiler, MULLW | OE(flags) | RC(flags) | D(dst) | A(src2) | B(src1));
case SLJIT_AND:
if (flags & ALT_FORM1) {
@ -228,19 +237,15 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return push_inst(compiler, SRW | RC(flags) | S(src1) | A(dst) | B(src2));
case SLJIT_ASHR:
if (flags & ALT_FORM3)
FAIL_IF(push_inst(compiler, MFXER | D(0)));
if (flags & ALT_FORM1) {
SLJIT_ASSERT(src2 == TMP_REG2);
compiler->imm &= 0x1f;
FAIL_IF(push_inst(compiler, SRAWI | RC(flags) | S(src1) | A(dst) | (compiler->imm << 11)));
return push_inst(compiler, SRAWI | RC(flags) | S(src1) | A(dst) | (compiler->imm << 11));
}
else
FAIL_IF(push_inst(compiler, SRAW | RC(flags) | S(src1) | A(dst) | B(src2)));
return (flags & ALT_FORM3) ? push_inst(compiler, MTXER | S(0)) : SLJIT_SUCCESS;
return push_inst(compiler, SRAW | RC(flags) | S(src1) | A(dst) | B(src2));
}
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
}
@ -250,20 +255,22 @@ static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_
return push_inst(compiler, ORI | S(reg) | A(reg) | IMM(init_value));
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
sljit_ins *inst = (sljit_ins *)addr;
inst[0] = (inst[0] & 0xffff0000) | ((new_addr >> 16) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | (new_addr & 0xffff);
inst[0] = (inst[0] & 0xffff0000) | ((new_target >> 16) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | (new_target & 0xffff);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
sljit_ins *inst = (sljit_ins *)addr;
inst[0] = (inst[0] & 0xffff0000) | ((new_constant >> 16) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | (new_constant & 0xffff);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
}

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -204,84 +204,118 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
case SLJIT_NEG:
SLJIT_ASSERT(src1 == TMP_REG1);
if ((flags & (ALT_FORM1 | ALT_SIGN_EXT)) == (ALT_FORM1 | ALT_SIGN_EXT)) {
FAIL_IF(push_inst(compiler, RLDI(TMP_REG2, src2, 32, 31, 1)));
FAIL_IF(push_inst(compiler, NEG | OE(ALT_SET_FLAGS) | RC(ALT_SET_FLAGS) | D(dst) | A(TMP_REG2)));
return push_inst(compiler, RLDI(dst, dst, 32, 32, 0));
}
UN_EXTS();
return push_inst(compiler, NEG | OERC(flags) | D(dst) | A(src2));
/* Setting XER SO is not enough, CR SO is also needed. */
return push_inst(compiler, NEG | OE((flags & ALT_FORM1) ? ALT_SET_FLAGS : 0) | RC(flags) | D(dst) | A(src2));
case SLJIT_CLZ:
SLJIT_ASSERT(src1 == TMP_REG1);
if (flags & ALT_FORM1)
return push_inst(compiler, CNTLZW | RC(flags) | S(src2) | A(dst));
return push_inst(compiler, CNTLZD | RC(flags) | S(src2) | A(dst));
return push_inst(compiler, CNTLZW | S(src2) | A(dst));
return push_inst(compiler, CNTLZD | S(src2) | A(dst));
case SLJIT_ADD:
if (flags & ALT_FORM1) {
/* Flags does not set: BIN_IMM_EXTS unnecessary. */
SLJIT_ASSERT(src2 == TMP_REG2);
return push_inst(compiler, ADDI | D(dst) | A(src1) | compiler->imm);
if (flags & ALT_SIGN_EXT) {
FAIL_IF(push_inst(compiler, RLDI(TMP_REG1, src1, 32, 31, 1)));
src1 = TMP_REG1;
FAIL_IF(push_inst(compiler, RLDI(TMP_REG2, src2, 32, 31, 1)));
src2 = TMP_REG2;
}
/* Setting XER SO is not enough, CR SO is also needed. */
FAIL_IF(push_inst(compiler, ADD | OE(ALT_SET_FLAGS) | RC(ALT_SET_FLAGS) | D(dst) | A(src1) | B(src2)));
if (flags & ALT_SIGN_EXT)
return push_inst(compiler, RLDI(dst, dst, 32, 32, 0));
return SLJIT_SUCCESS;
}
if (flags & ALT_FORM2) {
/* Flags does not set: BIN_IMM_EXTS unnecessary. */
SLJIT_ASSERT(src2 == TMP_REG2);
if (flags & ALT_FORM3)
return push_inst(compiler, ADDIS | D(dst) | A(src1) | compiler->imm);
if (flags & ALT_FORM4) {
FAIL_IF(push_inst(compiler, ADDIS | D(dst) | A(src1) | (((compiler->imm >> 16) & 0xffff) + ((compiler->imm >> 15) & 0x1))));
src1 = dst;
}
return push_inst(compiler, ADDI | D(dst) | A(src1) | (compiler->imm & 0xffff));
}
if (flags & ALT_FORM3) {
SLJIT_ASSERT(src2 == TMP_REG2);
BIN_IMM_EXTS();
return push_inst(compiler, ADDIC | D(dst) | A(src1) | compiler->imm);
}
if (flags & ALT_FORM4) {
/* Flags does not set: BIN_IMM_EXTS unnecessary. */
FAIL_IF(push_inst(compiler, ADDI | D(dst) | A(src1) | (compiler->imm & 0xffff)));
return push_inst(compiler, ADDIS | D(dst) | A(dst) | (((compiler->imm >> 16) & 0xffff) + ((compiler->imm >> 15) & 0x1)));
}
if (!(flags & ALT_SET_FLAGS))
return push_inst(compiler, ADD | D(dst) | A(src1) | B(src2));
BIN_EXTS();
return push_inst(compiler, ADDC | OERC(ALT_SET_FLAGS) | D(dst) | A(src1) | B(src2));
if (flags & ALT_FORM4)
return push_inst(compiler, ADDC | RC(ALT_SET_FLAGS) | D(dst) | A(src1) | B(src2));
return push_inst(compiler, ADD | RC(flags) | D(dst) | A(src1) | B(src2));
case SLJIT_ADDC:
if (flags & ALT_FORM1) {
FAIL_IF(push_inst(compiler, MFXER | D(0)));
FAIL_IF(push_inst(compiler, ADDE | D(dst) | A(src1) | B(src2)));
return push_inst(compiler, MTXER | S(0));
}
BIN_EXTS();
return push_inst(compiler, ADDE | D(dst) | A(src1) | B(src2));
case SLJIT_SUB:
if (flags & ALT_FORM1) {
if (flags & ALT_FORM2) {
FAIL_IF(push_inst(compiler, CMPLI | CRD(0 | ((flags & ALT_SIGN_EXT) ? 0 : 1)) | A(src1) | compiler->imm));
if (!(flags & ALT_FORM3))
return SLJIT_SUCCESS;
return push_inst(compiler, ADDI | D(dst) | A(src1) | (-compiler->imm & 0xffff));
}
FAIL_IF(push_inst(compiler, CMPL | CRD(0 | ((flags & ALT_SIGN_EXT) ? 0 : 1)) | A(src1) | B(src2)));
if (!(flags & ALT_FORM3))
return SLJIT_SUCCESS;
return push_inst(compiler, SUBF | D(dst) | A(src2) | B(src1));
}
if (flags & ALT_FORM2) {
if (flags & ALT_SIGN_EXT) {
FAIL_IF(push_inst(compiler, RLDI(TMP_REG1, src1, 32, 31, 1)));
src1 = TMP_REG1;
FAIL_IF(push_inst(compiler, RLDI(TMP_REG2, src2, 32, 31, 1)));
src2 = TMP_REG2;
}
/* Setting XER SO is not enough, CR SO is also needed. */
FAIL_IF(push_inst(compiler, SUBF | OE(ALT_SET_FLAGS) | RC(ALT_SET_FLAGS) | D(dst) | A(src2) | B(src1)));
if (flags & ALT_SIGN_EXT)
return push_inst(compiler, RLDI(dst, dst, 32, 32, 0));
return SLJIT_SUCCESS;
}
if (flags & ALT_FORM3) {
/* Flags does not set: BIN_IMM_EXTS unnecessary. */
SLJIT_ASSERT(src2 == TMP_REG2);
return push_inst(compiler, SUBFIC | D(dst) | A(src1) | compiler->imm);
}
if (flags & (ALT_FORM2 | ALT_FORM3)) {
if (flags & ALT_FORM4) {
if (flags & ALT_FORM5) {
SLJIT_ASSERT(src2 == TMP_REG2);
if (flags & ALT_FORM2)
FAIL_IF(push_inst(compiler, CMPI | CRD(0 | ((flags & ALT_SIGN_EXT) ? 0 : 1)) | A(src1) | compiler->imm));
if (flags & ALT_FORM3)
return push_inst(compiler, CMPLI | CRD(4 | ((flags & ALT_SIGN_EXT) ? 0 : 1)) | A(src1) | compiler->imm);
return SLJIT_SUCCESS;
return push_inst(compiler, CMPI | CRD(0 | ((flags & ALT_SIGN_EXT) ? 0 : 1)) | A(src1) | compiler->imm);
}
if (flags & (ALT_FORM4 | ALT_FORM5)) {
if (flags & ALT_FORM4)
FAIL_IF(push_inst(compiler, CMPL | CRD(4 | ((flags & ALT_SIGN_EXT) ? 0 : 1)) | A(src1) | B(src2)));
if (flags & ALT_FORM5)
return push_inst(compiler, CMP | CRD(0 | ((flags & ALT_SIGN_EXT) ? 0 : 1)) | A(src1) | B(src2));
return SLJIT_SUCCESS;
}
if (!(flags & ALT_SET_FLAGS))
return push_inst(compiler, SUBF | D(dst) | A(src2) | B(src1));
BIN_EXTS();
if (flags & ALT_FORM6)
FAIL_IF(push_inst(compiler, CMPL | CRD(4 | ((flags & ALT_SIGN_EXT) ? 0 : 1)) | A(src1) | B(src2)));
return push_inst(compiler, SUBFC | OERC(ALT_SET_FLAGS) | D(dst) | A(src2) | B(src1));
if (flags & ALT_FORM5)
return push_inst(compiler, SUBFC | RC(ALT_SET_FLAGS) | D(dst) | A(src2) | B(src1));
return push_inst(compiler, SUBF | RC(flags) | D(dst) | A(src2) | B(src1));
case SLJIT_SUBC:
if (flags & ALT_FORM1) {
FAIL_IF(push_inst(compiler, MFXER | D(0)));
FAIL_IF(push_inst(compiler, SUBFE | D(dst) | A(src2) | B(src1)));
return push_inst(compiler, MTXER | S(0));
}
BIN_EXTS();
return push_inst(compiler, SUBFE | D(dst) | A(src2) | B(src1));
@ -292,8 +326,8 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
}
BIN_EXTS();
if (flags & ALT_FORM2)
return push_inst(compiler, MULLW | OERC(flags) | D(dst) | A(src2) | B(src1));
return push_inst(compiler, MULLD | OERC(flags) | D(dst) | A(src2) | B(src1));
return push_inst(compiler, MULLW | OE(flags) | RC(flags) | D(dst) | A(src2) | B(src1));
return push_inst(compiler, MULLD | OE(flags) | RC(flags) | D(dst) | A(src2) | B(src1));
case SLJIT_AND:
if (flags & ALT_FORM1) {
@ -345,11 +379,9 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
compiler->imm &= 0x1f;
return push_inst(compiler, RLWINM | RC(flags) | S(src1) | A(dst) | (compiler->imm << 11) | ((31 - compiler->imm) << 1));
}
else {
compiler->imm &= 0x3f;
return push_inst(compiler, RLDI(dst, src1, compiler->imm, 63 - compiler->imm, 1) | RC(flags));
}
}
return push_inst(compiler, ((flags & ALT_FORM2) ? SLW : SLD) | RC(flags) | S(src1) | A(dst) | B(src2));
case SLJIT_LSHR:
@ -359,33 +391,25 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
compiler->imm &= 0x1f;
return push_inst(compiler, RLWINM | RC(flags) | S(src1) | A(dst) | (((32 - compiler->imm) & 0x1f) << 11) | (compiler->imm << 6) | (31 << 1));
}
else {
compiler->imm &= 0x3f;
return push_inst(compiler, RLDI(dst, src1, 64 - compiler->imm, compiler->imm, 0) | RC(flags));
}
}
return push_inst(compiler, ((flags & ALT_FORM2) ? SRW : SRD) | RC(flags) | S(src1) | A(dst) | B(src2));
case SLJIT_ASHR:
if (flags & ALT_FORM3)
FAIL_IF(push_inst(compiler, MFXER | D(0)));
if (flags & ALT_FORM1) {
SLJIT_ASSERT(src2 == TMP_REG2);
if (flags & ALT_FORM2) {
compiler->imm &= 0x1f;
FAIL_IF(push_inst(compiler, SRAWI | RC(flags) | S(src1) | A(dst) | (compiler->imm << 11)));
return push_inst(compiler, SRAWI | RC(flags) | S(src1) | A(dst) | (compiler->imm << 11));
}
else {
compiler->imm &= 0x3f;
FAIL_IF(push_inst(compiler, SRADI | RC(flags) | S(src1) | A(dst) | ((compiler->imm & 0x1f) << 11) | ((compiler->imm & 0x20) >> 4)));
return push_inst(compiler, SRADI | RC(flags) | S(src1) | A(dst) | ((compiler->imm & 0x1f) << 11) | ((compiler->imm & 0x20) >> 4));
}
}
else
FAIL_IF(push_inst(compiler, ((flags & ALT_FORM2) ? SRAW : SRAD) | RC(flags) | S(src1) | A(dst) | B(src2)));
return (flags & ALT_FORM3) ? push_inst(compiler, MTXER | S(0)) : SLJIT_SUCCESS;
return push_inst(compiler, ((flags & ALT_FORM2) ? SRAW : SRAD) | RC(flags) | S(src1) | A(dst) | B(src2));
}
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
}
@ -398,18 +422,19 @@ static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_
return push_inst(compiler, ORI | S(reg) | A(reg) | IMM(init_value));
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
inst[0] = (inst[0] & 0xffff0000) | ((new_addr >> 48) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | ((new_addr >> 32) & 0xffff);
inst[3] = (inst[3] & 0xffff0000) | ((new_addr >> 16) & 0xffff);
inst[4] = (inst[4] & 0xffff0000) | (new_addr & 0xffff);
inst[0] = (inst[0] & 0xffff0000) | ((new_target >> 48) & 0xffff);
inst[1] = (inst[1] & 0xffff0000) | ((new_target >> 32) & 0xffff);
inst[3] = (inst[3] & 0xffff0000) | ((new_target >> 16) & 0xffff);
inst[4] = (inst[4] & 0xffff0000) | (new_target & 0xffff);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 5);
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
@ -417,5 +442,6 @@ SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_consta
inst[1] = (inst[1] & 0xffff0000) | ((new_constant >> 32) & 0xffff);
inst[3] = (inst[3] & 0xffff0000) | ((new_constant >> 16) & 0xffff);
inst[4] = (inst[4] & 0xffff0000) | (new_constant & 0xffff);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 5);
}

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -127,9 +127,9 @@ static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 7] = {
/* Instruction bit sections.
OE and Rc flag (see ALT_SET_FLAGS). */
#define OERC(flags) (((flags & ALT_SET_FLAGS) >> 10) | (flags & ALT_SET_FLAGS))
#define OE(flags) ((flags) & ALT_SET_FLAGS)
/* Rc flag (see ALT_SET_FLAGS). */
#define RC(flags) ((flags & ALT_SET_FLAGS) >> 10)
#define RC(flags) (((flags) & ALT_SET_FLAGS) >> 10)
#define HI(opcode) ((opcode) << 26)
#define LO(opcode) ((opcode) << 1)
@ -154,6 +154,7 @@ static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 7] = {
#define CMPL (HI(31) | LO(32))
#define CMPLI (HI(10))
#define CROR (HI(19) | LO(449))
#define DCBT (HI(31) | LO(278))
#define DIVD (HI(31) | LO(489))
#define DIVDU (HI(31) | LO(457))
#define DIVW (HI(31) | LO(491))
@ -249,7 +250,7 @@ static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
return SLJIT_SUCCESS;
}
static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code)
static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
{
sljit_sw diff;
sljit_uw target_addr;
@ -267,7 +268,7 @@ static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_in
target_addr = jump->u.target;
else {
SLJIT_ASSERT(jump->flags & JUMP_LABEL);
target_addr = (sljit_uw)(code + jump->u.label->size);
target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
}
#if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL) && (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
@ -275,7 +276,7 @@ static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_in
goto keep_address;
#endif
diff = ((sljit_sw)target_addr - (sljit_sw)(code_ptr)) & ~0x3l;
diff = ((sljit_sw)target_addr - (sljit_sw)(code_ptr) - executable_offset) & ~0x3l;
extra_jump_flags = 0;
if (jump->flags & IS_COND) {
@ -296,6 +297,7 @@ static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_in
jump->flags |= PATCH_B | extra_jump_flags;
return 1;
}
if (target_addr <= 0x03ffffff) {
jump->flags |= PATCH_B | PATCH_ABS_B | extra_jump_flags;
return 1;
@ -309,6 +311,7 @@ keep_address:
jump->flags |= PATCH_ABS32;
return 1;
}
if (target_addr <= 0x7fffffffffffl) {
jump->flags |= PATCH_ABS48;
return 1;
@ -326,6 +329,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_ins *buf_ptr;
sljit_ins *buf_end;
sljit_uw word_count;
sljit_sw executable_offset;
sljit_uw addr;
struct sljit_label *label;
@ -349,9 +353,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
code_ptr = code;
word_count = 0;
executable_offset = SLJIT_EXEC_OFFSET(code);
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
do {
buf_ptr = (sljit_ins*)buf->memory;
buf_end = buf_ptr + (buf->used_size >> 2);
@ -363,7 +370,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
/* These structures are ordered by their address. */
if (label && label->size == word_count) {
/* Just recording the address. */
label->addr = (sljit_uw)code_ptr;
label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = code_ptr - code;
label = label->next;
}
@ -373,7 +380,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
#else
jump->addr = (sljit_uw)(code_ptr - 6);
#endif
if (detect_jump_type(jump, code_ptr, code)) {
if (detect_jump_type(jump, code_ptr, code, executable_offset)) {
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
code_ptr[-3] = code_ptr[0];
code_ptr -= 3;
@ -420,7 +427,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
} while (buf);
if (label && label->size == word_count) {
label->addr = (sljit_uw)code_ptr;
label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = code_ptr - code;
label = label->next;
}
@ -438,11 +445,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
while (jump) {
do {
addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
buf_ptr = (sljit_ins*)jump->addr;
buf_ptr = (sljit_ins *)jump->addr;
if (jump->flags & PATCH_B) {
if (jump->flags & IS_COND) {
if (!(jump->flags & PATCH_ABS_B)) {
addr = addr - jump->addr;
addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset);
SLJIT_ASSERT((sljit_sw)addr <= 0x7fff && (sljit_sw)addr >= -0x8000);
*buf_ptr = BCx | (addr & 0xfffc) | ((*buf_ptr) & 0x03ff0001);
}
@ -453,7 +461,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
}
else {
if (!(jump->flags & PATCH_ABS_B)) {
addr = addr - jump->addr;
addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset);
SLJIT_ASSERT((sljit_sw)addr <= 0x01ffffff && (sljit_sw)addr >= -0x02000000);
*buf_ptr = Bx | (addr & 0x03fffffc) | ((*buf_ptr) & 0x1);
}
@ -464,6 +472,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
}
break;
}
/* Set the fields of immediate loads. */
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
buf_ptr[0] = (buf_ptr[0] & 0xffff0000) | ((addr >> 16) & 0xffff);
@ -492,24 +501,50 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
}
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_offset = executable_offset;
compiler->executable_size = (code_ptr - code) * sizeof(sljit_ins);
SLJIT_CACHE_FLUSH(code, code_ptr);
code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
if (((sljit_sw)code_ptr) & 0x4)
code_ptr++;
sljit_set_function_context(NULL, (struct sljit_function_context*)code_ptr, (sljit_sw)code, (void*)sljit_generate_code);
return code_ptr;
#else
sljit_set_function_context(NULL, (struct sljit_function_context*)code_ptr, (sljit_sw)code, (void*)sljit_generate_code);
return code_ptr;
#endif
sljit_set_function_context(NULL, (struct sljit_function_context*)code_ptr, (sljit_sw)code, (void*)sljit_generate_code);
#endif
code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
SLJIT_CACHE_FLUSH(code, code_ptr);
#if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
return code_ptr;
#else
return code;
#endif
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
{
switch (feature_type) {
case SLJIT_HAS_FPU:
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
#else
/* Available by default. */
return 1;
#endif
case SLJIT_HAS_PRE_UPDATE:
case SLJIT_HAS_CLZ:
return 1;
default:
return 0;
}
}
/* --------------------------------------------------------------------- */
/* Entry, exit */
/* --------------------------------------------------------------------- */
@ -544,7 +579,6 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
#define ALT_FORM3 0x040000
#define ALT_FORM4 0x080000
#define ALT_FORM5 0x100000
#define ALT_FORM6 0x200000
/* Source and destination is register. */
#define REG_DEST 0x000001
@ -559,7 +593,7 @@ ALT_SIGN_EXT 0x000200
ALT_SET_FLAGS 0x000400
ALT_FORM1 0x010000
...
ALT_FORM6 0x200000 */
ALT_FORM5 0x100000 */
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
#include "sljitNativePPC_32.c"
@ -726,7 +760,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *comp
(((inst) & ~(INT_ALIGNED | UPDATE_REQ)) | (((flags) & MEM_MASK) <= GPR_REG ? D(reg) : FD(reg)))
#endif
static const sljit_ins data_transfer_insts[64 + 8] = {
static const sljit_ins data_transfer_insts[64 + 16] = {
/* -------- Unsigned -------- */
@ -835,11 +869,20 @@ static const sljit_ins data_transfer_insts[64 + 8] = {
/* d n x s */ HI(31) | LO(727) /* stfdx */,
/* d n x l */ HI(31) | LO(599) /* lfdx */,
/* d w i s */ HI(55) /* stfdu */,
/* d w i l */ HI(51) /* lfdu */,
/* d w x s */ HI(31) | LO(759) /* stfdux */,
/* d w x l */ HI(31) | LO(631) /* lfdux */,
/* s n i s */ HI(52) /* stfs */,
/* s n i l */ HI(48) /* lfs */,
/* s n x s */ HI(31) | LO(663) /* stfsx */,
/* s n x l */ HI(31) | LO(535) /* lfsx */,
/* s w i s */ HI(53) /* stfsu */,
/* s w i l */ HI(49) /* lfsu */,
/* s w x s */ HI(31) | LO(695) /* stfsux */,
/* s w x l */ HI(31) | LO(567) /* lfsux */,
};
#undef ARCH_32_64
@ -850,7 +893,7 @@ static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 inp_
sljit_ins inst;
/* Should work when (arg & REG_MASK) == 0. */
SLJIT_COMPILE_ASSERT(A(0) == 0, a0_must_be_0);
SLJIT_ASSERT(A(0) == 0);
SLJIT_ASSERT(arg & SLJIT_MEM);
if (arg & OFFS_REG_MASK) {
@ -1005,10 +1048,6 @@ static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 inp_flags
#endif
if (inp_flags & WRITE_BACK) {
if (arg == reg) {
FAIL_IF(push_inst(compiler, OR | S(reg) | A(tmp_r) | B(reg)));
reg = tmp_r;
}
tmp_r = arg;
FAIL_IF(push_inst(compiler, ADDIS | D(arg) | A(arg) | IMM(high_short >> 16)));
}
@ -1131,7 +1170,7 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
sljit_s32 src1_r;
sljit_s32 src2_r;
sljit_s32 sugg_src2_r = TMP_REG2;
sljit_s32 flags = input_flags & (ALT_FORM1 | ALT_FORM2 | ALT_FORM3 | ALT_FORM4 | ALT_FORM5 | ALT_FORM6 | ALT_SIGN_EXT | ALT_SET_FLAGS);
sljit_s32 flags = input_flags & (ALT_FORM1 | ALT_FORM2 | ALT_FORM3 | ALT_FORM4 | ALT_FORM5 | ALT_SIGN_EXT | ALT_SET_FLAGS);
if (!(input_flags & ALT_KEEP_CACHE)) {
compiler->cache_arg = 0;
@ -1140,8 +1179,6 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
/* Destination check. */
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) {
if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM))
return SLJIT_SUCCESS;
dst_r = TMP_REG2;
}
else if (FAST_IS_REG(dst)) {
@ -1294,6 +1331,31 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
return SLJIT_SUCCESS;
}
static sljit_s32 emit_prefetch(struct sljit_compiler *compiler,
sljit_s32 src, sljit_sw srcw)
{
if (!(src & OFFS_REG_MASK)) {
if (srcw == 0 && (src & REG_MASK) != SLJIT_UNUSED)
return push_inst(compiler, DCBT | A(0) | B(src & REG_MASK));
FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
/* Works with SLJIT_MEM0() case as well. */
return push_inst(compiler, DCBT | A(src & REG_MASK) | B(TMP_REG1));
}
srcw &= 0x3;
if (srcw == 0)
return push_inst(compiler, DCBT | A(src & REG_MASK) | B(OFFS_REG(src)));
#if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
FAIL_IF(push_inst(compiler, RLWINM | S(OFFS_REG(src)) | A(TMP_REG1) | (srcw << 11) | ((31 - srcw) << 1)));
#else
FAIL_IF(push_inst(compiler, RLDI(TMP_REG1, OFFS_REG(src), srcw, 63 - srcw, 1)));
#endif
return push_inst(compiler, DCBT | A(src & REG_MASK) | B(TMP_REG1));
}
#define EMIT_MOV(type, type_flags, type_cast) \
emit_op(compiler, (src & SLJIT_IMM) ? SLJIT_MOV : type, flags | (type_flags), dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? type_cast srcw : srcw)
@ -1301,7 +1363,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
sljit_s32 flags = GET_FLAGS(op) ? ALT_SET_FLAGS : 0;
sljit_s32 flags = HAS_FLAGS(op) ? ALT_SET_FLAGS : 0;
sljit_s32 op_flags = GET_ALL_FLAGS(op);
CHECK_ERROR();
@ -1309,11 +1371,18 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
ADJUST_LOCAL_OFFSET(dst, dstw);
ADJUST_LOCAL_OFFSET(src, srcw);
if (dst == SLJIT_UNUSED && !HAS_FLAGS(op)) {
if (op <= SLJIT_MOV_P && (src & SLJIT_MEM))
return emit_prefetch(compiler, src, srcw);
return SLJIT_SUCCESS;
}
op = GET_OPCODE(op);
if ((src & SLJIT_IMM) && srcw == 0)
src = TMP_ZERO;
if (op_flags & SLJIT_SET_O)
if (GET_FLAG_TYPE(op_flags) == SLJIT_OVERFLOW)
FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO)));
if (op_flags & SLJIT_I32_OP) {
@ -1339,6 +1408,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
flags |= INT_DATA | SIGNED_DATA;
if (src & SLJIT_IMM)
srcw = (sljit_s32)srcw;
if (HAS_FLAGS(op_flags))
flags |= ALT_SIGN_EXT;
}
#endif
}
@ -1404,7 +1475,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
return emit_op(compiler, SLJIT_NOT, flags, dst, dstw, TMP_REG1, 0, src, srcw);
case SLJIT_NEG:
return emit_op(compiler, SLJIT_NEG, flags, dst, dstw, TMP_REG1, 0, src, srcw);
return emit_op(compiler, SLJIT_NEG, flags | (GET_FLAG_TYPE(op_flags) ? ALT_FORM1 : 0), dst, dstw, TMP_REG1, 0, src, srcw);
case SLJIT_CLZ:
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
@ -1457,7 +1528,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
sljit_s32 flags = GET_FLAGS(op) ? ALT_SET_FLAGS : 0;
sljit_s32 flags = HAS_FLAGS(op) ? ALT_SET_FLAGS : 0;
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@ -1465,6 +1536,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
ADJUST_LOCAL_OFFSET(src1, src1w);
ADJUST_LOCAL_OFFSET(src2, src2w);
if (dst == SLJIT_UNUSED && !HAS_FLAGS(op))
return SLJIT_SUCCESS;
if ((src1 & SLJIT_IMM) && src1w == 0)
src1 = TMP_ZERO;
if ((src2 & SLJIT_IMM) && src2w == 0)
@ -1478,45 +1552,48 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
src1w = (sljit_s32)(src1w);
if (src2 & SLJIT_IMM)
src2w = (sljit_s32)(src2w);
if (GET_FLAGS(op))
if (HAS_FLAGS(op))
flags |= ALT_SIGN_EXT;
}
#endif
if (op & SLJIT_SET_O)
if (GET_FLAG_TYPE(op) == SLJIT_OVERFLOW)
FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO)));
if (src2 == TMP_REG2)
flags |= ALT_KEEP_CACHE;
switch (GET_OPCODE(op)) {
case SLJIT_ADD:
if (!GET_FLAGS(op) && ((src1 | src2) & SLJIT_IMM)) {
if (GET_FLAG_TYPE(op) == SLJIT_OVERFLOW)
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM1, dst, dstw, src1, src1w, src2, src2w);
if (!HAS_FLAGS(op) && ((src1 | src2) & SLJIT_IMM)) {
if (TEST_SL_IMM(src2, src2w)) {
compiler->imm = src2w & 0xffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
}
if (TEST_SL_IMM(src1, src1w)) {
compiler->imm = src1w & 0xffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM1, dst, dstw, src2, src2w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src2, src2w, TMP_REG2, 0);
}
if (TEST_SH_IMM(src2, src2w)) {
compiler->imm = (src2w >> 16) & 0xffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
}
if (TEST_SH_IMM(src1, src1w)) {
compiler->imm = (src1w >> 16) & 0xffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src2, src2w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src2, src2w, TMP_REG2, 0);
}
/* Range between -1 and -32768 is covered above. */
if (TEST_ADD_IMM(src2, src2w)) {
compiler->imm = src2w & 0xffffffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0);
}
if (TEST_ADD_IMM(src1, src1w)) {
compiler->imm = src1w & 0xffffffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM4, dst, dstw, src2, src2w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM4, dst, dstw, src2, src2w, TMP_REG2, 0);
}
}
if (!(GET_FLAGS(op) & (SLJIT_SET_E | SLJIT_SET_O))) {
if (HAS_FLAGS(op)) {
if (TEST_SL_IMM(src2, src2w)) {
compiler->imm = src2w & 0xffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
@ -1526,75 +1603,75 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src2, src2w, TMP_REG2, 0);
}
}
return emit_op(compiler, SLJIT_ADD, flags, dst, dstw, src1, src1w, src2, src2w);
return emit_op(compiler, SLJIT_ADD, flags | ((GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY)) ? ALT_FORM4 : 0), dst, dstw, src1, src1w, src2, src2w);
case SLJIT_ADDC:
return emit_op(compiler, SLJIT_ADDC, flags | (!(op & SLJIT_KEEP_FLAGS) ? 0 : ALT_FORM1), dst, dstw, src1, src1w, src2, src2w);
return emit_op(compiler, SLJIT_ADDC, flags, dst, dstw, src1, src1w, src2, src2w);
case SLJIT_SUB:
if (!GET_FLAGS(op) && ((src1 | src2) & SLJIT_IMM)) {
if (GET_FLAG_TYPE(op) >= SLJIT_LESS && GET_FLAG_TYPE(op) <= SLJIT_LESS_EQUAL) {
if (dst == SLJIT_UNUSED) {
if (TEST_UL_IMM(src2, src2w)) {
compiler->imm = src2w & 0xffff;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
}
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1, dst, dstw, src1, src1w, src2, src2w);
}
if ((src2 & SLJIT_IMM) && src2w >= 0 && src2w <= (SIMM_MAX + 1)) {
compiler->imm = src2w;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
}
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM3, dst, dstw, src1, src1w, src2, src2w);
}
if (GET_FLAG_TYPE(op) == SLJIT_OVERFLOW)
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2, dst, dstw, src1, src1w, src2, src2w);
if (!HAS_FLAGS(op) && ((src1 | src2) & SLJIT_IMM)) {
if (TEST_SL_IMM(src2, -src2w)) {
compiler->imm = (-src2w) & 0xffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
}
if (TEST_SL_IMM(src1, src1w)) {
compiler->imm = src1w & 0xffff;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1, dst, dstw, src2, src2w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM3, dst, dstw, src2, src2w, TMP_REG2, 0);
}
if (TEST_SH_IMM(src2, -src2w)) {
compiler->imm = ((-src2w) >> 16) & 0xffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
}
/* Range between -1 and -32768 is covered above. */
if (TEST_ADD_IMM(src2, -src2w)) {
compiler->imm = -src2w & 0xffffffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0);
}
}
if (dst == SLJIT_UNUSED && (op & (SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S)) && !(op & (SLJIT_SET_O | SLJIT_SET_C))) {
if (!(op & SLJIT_SET_U)) {
/* We know ALT_SIGN_EXT is set if it is an SLJIT_I32_OP on 64 bit systems. */
if (dst == SLJIT_UNUSED && GET_FLAG_TYPE(op) != GET_FLAG_TYPE(SLJIT_SET_CARRY)) {
if (TEST_SL_IMM(src2, src2w)) {
compiler->imm = src2w & 0xffff;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
}
if (GET_FLAGS(op) == SLJIT_SET_E && TEST_SL_IMM(src1, src1w)) {
compiler->imm = src1w & 0xffff;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2, dst, dstw, src2, src2w, TMP_REG2, 0);
}
}
if (!(op & (SLJIT_SET_E | SLJIT_SET_S))) {
/* We know ALT_SIGN_EXT is set if it is an SLJIT_I32_OP on 64 bit systems. */
if (TEST_UL_IMM(src2, src2w)) {
compiler->imm = src2w & 0xffff;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM4 | ALT_FORM5, dst, dstw, src1, src1w, TMP_REG2, 0);
}
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM4, dst, dstw, src1, src1w, src2, src2w);
}
if ((src2 & SLJIT_IMM) && src2w >= 0 && src2w <= 0x7fff) {
compiler->imm = src2w;
return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
}
return emit_op(compiler, SLJIT_SUB, flags | ((op & SLJIT_SET_U) ? ALT_FORM4 : 0) | ((op & (SLJIT_SET_E | SLJIT_SET_S)) ? ALT_FORM5 : 0), dst, dstw, src1, src1w, src2, src2w);
}
if (!(op & (SLJIT_SET_E | SLJIT_SET_U | SLJIT_SET_S | SLJIT_SET_O))) {
if (TEST_SL_IMM(src2, -src2w)) {
compiler->imm = (-src2w) & 0xffff;
return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
}
}
/* We know ALT_SIGN_EXT is set if it is an SLJIT_I32_OP on 64 bit systems. */
return emit_op(compiler, SLJIT_SUB, flags | (!(op & SLJIT_SET_U) ? 0 : ALT_FORM6), dst, dstw, src1, src1w, src2, src2w);
return emit_op(compiler, SLJIT_SUB, flags | ((GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY)) ? ALT_FORM5 : 0), dst, dstw, src1, src1w, src2, src2w);
case SLJIT_SUBC:
return emit_op(compiler, SLJIT_SUBC, flags | (!(op & SLJIT_KEEP_FLAGS) ? 0 : ALT_FORM1), dst, dstw, src1, src1w, src2, src2w);
return emit_op(compiler, SLJIT_SUBC, flags, dst, dstw, src1, src1w, src2, src2w);
case SLJIT_MUL:
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
if (op & SLJIT_I32_OP)
flags |= ALT_FORM2;
#endif
if (!GET_FLAGS(op)) {
if (!HAS_FLAGS(op)) {
if (TEST_SL_IMM(src2, src2w)) {
compiler->imm = src2w & 0xffff;
return emit_op(compiler, SLJIT_MUL, flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0);
@ -1604,13 +1681,15 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
return emit_op(compiler, SLJIT_MUL, flags | ALT_FORM1, dst, dstw, src2, src2w, TMP_REG2, 0);
}
}
else
FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO)));
return emit_op(compiler, SLJIT_MUL, flags, dst, dstw, src1, src1w, src2, src2w);
case SLJIT_AND:
case SLJIT_OR:
case SLJIT_XOR:
/* Commutative unsigned operations. */
if (!GET_FLAGS(op) || GET_OPCODE(op) == SLJIT_AND) {
if (!HAS_FLAGS(op) || GET_OPCODE(op) == SLJIT_AND) {
if (TEST_UL_IMM(src2, src2w)) {
compiler->imm = src2w;
return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0);
@ -1628,7 +1707,8 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM2, dst, dstw, src2, src2w, TMP_REG2, 0);
}
}
if (!GET_FLAGS(op) && GET_OPCODE(op) != SLJIT_AND) {
if (GET_OPCODE(op) != SLJIT_AND && GET_OPCODE(op) != SLJIT_AND) {
/* Unlike or and xor, and resets unwanted bits as well. */
if (TEST_UI_IMM(src2, src2w)) {
compiler->imm = src2w;
return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
@ -1640,12 +1720,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
}
return emit_op(compiler, GET_OPCODE(op), flags, dst, dstw, src1, src1w, src2, src2w);
case SLJIT_ASHR:
if (op & SLJIT_KEEP_FLAGS)
flags |= ALT_FORM3;
/* Fall through. */
case SLJIT_SHL:
case SLJIT_LSHR:
case SLJIT_ASHR:
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
if (op & SLJIT_I32_OP)
flags |= ALT_FORM2;
@ -1685,17 +1762,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *c
/* Floating point operators */
/* --------------------------------------------------------------------- */
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
#else
/* Available by default. */
return 1;
#endif
}
#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 6))
#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 5))
#define SELECT_FOP(op, single, double) ((op & SLJIT_F32_OP) ? single : double)
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
@ -1727,9 +1794,6 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
op = GET_OPCODE(op);
FAIL_IF(push_inst(compiler, (op == SLJIT_CONV_S32_FROM_F64 ? FCTIWZ : FCTIDZ) | FD(TMP_FREG1) | FB(src)));
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
if (op == SLJIT_CONV_SW_FROM_F64) {
if (FAST_IS_REG(dst)) {
FAIL_IF(emit_op_mem2(compiler, DOUBLE_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, 0, 0));
@ -1737,12 +1801,8 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
}
return emit_op_mem2(compiler, DOUBLE_DATA, TMP_FREG1, dst, dstw, 0, 0);
}
#else
FAIL_IF(push_inst(compiler, FCTIWZ | FD(TMP_FREG1) | FB(src)));
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
#endif
if (FAST_IS_REG(dst)) {
@ -2019,10 +2079,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
/* For UNUSED dst. Uncommon, but possible. */
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
if (FAST_IS_REG(dst))
return push_inst(compiler, MFLR | D(dst));
@ -2079,33 +2135,33 @@ static sljit_ins get_bo_bi_flags(sljit_s32 type)
return (4 << 21) | (2 << 16);
case SLJIT_LESS:
case SLJIT_LESS_F64:
return (12 << 21) | ((4 + 0) << 16);
case SLJIT_GREATER_EQUAL:
case SLJIT_GREATER_EQUAL_F64:
return (4 << 21) | ((4 + 0) << 16);
case SLJIT_GREATER:
case SLJIT_GREATER_F64:
return (12 << 21) | ((4 + 1) << 16);
case SLJIT_LESS_EQUAL:
case SLJIT_LESS_EQUAL_F64:
return (4 << 21) | ((4 + 1) << 16);
case SLJIT_SIG_LESS:
return (12 << 21) | (0 << 16);
case SLJIT_GREATER_EQUAL:
case SLJIT_SIG_GREATER_EQUAL:
return (4 << 21) | (0 << 16);
case SLJIT_GREATER:
case SLJIT_SIG_GREATER:
return (12 << 21) | (1 << 16);
case SLJIT_LESS_EQUAL:
case SLJIT_SIG_LESS_EQUAL:
return (4 << 21) | (1 << 16);
case SLJIT_LESS_F64:
return (12 << 21) | ((4 + 0) << 16);
case SLJIT_GREATER_EQUAL_F64:
return (4 << 21) | ((4 + 0) << 16);
case SLJIT_GREATER_F64:
return (12 << 21) | ((4 + 1) << 16);
case SLJIT_LESS_EQUAL_F64:
return (4 << 21) | ((4 + 1) << 16);
case SLJIT_OVERFLOW:
case SLJIT_MUL_OVERFLOW:
return (12 << 21) | (3 << 16);
@ -2207,153 +2263,147 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
return push_inst(compiler, BCCTR | (20 << 21) | (type >= SLJIT_FAST_CALL ? 1 : 0));
}
/* Get a bit from CR, all other bits are zeroed. */
#define GET_CR_BIT(bit, dst) \
FAIL_IF(push_inst(compiler, MFCR | D(dst))); \
FAIL_IF(push_inst(compiler, RLWINM | S(dst) | A(dst) | ((1 + (bit)) << 11) | (31 << 6) | (31 << 1)));
#define INVERT_BIT(dst) \
FAIL_IF(push_inst(compiler, XORI | S(dst) | A(dst) | 0x1));
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw,
sljit_s32 type)
{
sljit_s32 reg, input_flags;
sljit_s32 flags = GET_ALL_FLAGS(op);
sljit_sw original_dstw = dstw;
sljit_s32 reg, input_flags, cr_bit, invert;
sljit_s32 saved_op = op;
sljit_sw saved_dstw = dstw;
CHECK_ERROR();
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type));
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
ADJUST_LOCAL_OFFSET(dst, dstw);
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
input_flags = (op & SLJIT_I32_OP) ? INT_DATA : WORD_DATA;
#else
input_flags = WORD_DATA;
#endif
op = GET_OPCODE(op);
reg = (op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2;
compiler->cache_arg = 0;
compiler->cache_argw = 0;
if (op >= SLJIT_ADD && (src & SLJIT_MEM)) {
ADJUST_LOCAL_OFFSET(src, srcw);
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
input_flags = (flags & SLJIT_I32_OP) ? INT_DATA : WORD_DATA;
#else
input_flags = WORD_DATA;
#endif
FAIL_IF(emit_op_mem2(compiler, input_flags | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw));
src = TMP_REG1;
srcw = 0;
}
if (op >= SLJIT_ADD && (dst & SLJIT_MEM))
FAIL_IF(emit_op_mem2(compiler, input_flags | LOAD_DATA, TMP_REG1, dst, dstw, dst, dstw));
invert = 0;
cr_bit = 0;
switch (type & 0xff) {
case SLJIT_EQUAL:
GET_CR_BIT(2, reg);
break;
case SLJIT_NOT_EQUAL:
GET_CR_BIT(2, reg);
INVERT_BIT(reg);
break;
case SLJIT_LESS:
case SLJIT_LESS_F64:
GET_CR_BIT(4 + 0, reg);
case SLJIT_SIG_LESS:
break;
case SLJIT_GREATER_EQUAL:
case SLJIT_GREATER_EQUAL_F64:
GET_CR_BIT(4 + 0, reg);
INVERT_BIT(reg);
case SLJIT_SIG_GREATER_EQUAL:
invert = 1;
break;
case SLJIT_GREATER:
case SLJIT_GREATER_F64:
GET_CR_BIT(4 + 1, reg);
case SLJIT_SIG_GREATER:
cr_bit = 1;
break;
case SLJIT_LESS_EQUAL:
case SLJIT_LESS_EQUAL_F64:
GET_CR_BIT(4 + 1, reg);
INVERT_BIT(reg);
break;
case SLJIT_SIG_LESS:
GET_CR_BIT(0, reg);
break;
case SLJIT_SIG_GREATER_EQUAL:
GET_CR_BIT(0, reg);
INVERT_BIT(reg);
break;
case SLJIT_SIG_GREATER:
GET_CR_BIT(1, reg);
break;
case SLJIT_SIG_LESS_EQUAL:
GET_CR_BIT(1, reg);
INVERT_BIT(reg);
cr_bit = 1;
invert = 1;
break;
case SLJIT_EQUAL:
cr_bit = 2;
break;
case SLJIT_NOT_EQUAL:
cr_bit = 2;
invert = 1;
break;
case SLJIT_OVERFLOW:
case SLJIT_MUL_OVERFLOW:
GET_CR_BIT(3, reg);
cr_bit = 3;
break;
case SLJIT_NOT_OVERFLOW:
case SLJIT_MUL_NOT_OVERFLOW:
GET_CR_BIT(3, reg);
INVERT_BIT(reg);
cr_bit = 3;
invert = 1;
break;
case SLJIT_LESS_F64:
cr_bit = 4 + 0;
break;
case SLJIT_GREATER_EQUAL_F64:
cr_bit = 4 + 0;
invert = 1;
break;
case SLJIT_GREATER_F64:
cr_bit = 4 + 1;
break;
case SLJIT_LESS_EQUAL_F64:
cr_bit = 4 + 1;
invert = 1;
break;
case SLJIT_EQUAL_F64:
GET_CR_BIT(4 + 2, reg);
cr_bit = 4 + 2;
break;
case SLJIT_NOT_EQUAL_F64:
GET_CR_BIT(4 + 2, reg);
INVERT_BIT(reg);
cr_bit = 4 + 2;
invert = 1;
break;
case SLJIT_UNORDERED_F64:
GET_CR_BIT(4 + 3, reg);
cr_bit = 4 + 3;
break;
case SLJIT_ORDERED_F64:
GET_CR_BIT(4 + 3, reg);
INVERT_BIT(reg);
cr_bit = 4 + 3;
invert = 1;
break;
default:
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
break;
}
FAIL_IF(push_inst(compiler, MFCR | D(reg)));
FAIL_IF(push_inst(compiler, RLWINM | S(reg) | A(reg) | ((1 + (cr_bit)) << 11) | (31 << 6) | (31 << 1)));
if (invert)
FAIL_IF(push_inst(compiler, XORI | S(reg) | A(reg) | 0x1));
if (op < SLJIT_ADD) {
#if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
if (op == SLJIT_MOV)
input_flags = WORD_DATA;
else {
op = SLJIT_MOV_U32;
input_flags = INT_DATA;
}
#else
op = SLJIT_MOV;
input_flags = WORD_DATA;
#endif
if (reg != TMP_REG2)
if (!(dst & SLJIT_MEM))
return SLJIT_SUCCESS;
return emit_op(compiler, op, input_flags, dst, dstw, TMP_REG1, 0, TMP_REG2, 0);
return emit_op_mem2(compiler, input_flags, reg, dst, dstw, reg, 0);
}
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
compiler->skip_checks = 1;
#endif
return sljit_emit_op2(compiler, op | flags, dst, original_dstw, src, srcw, TMP_REG2, 0);
if (dst & SLJIT_MEM)
return sljit_emit_op2(compiler, saved_op, dst, saved_dstw, TMP_REG1, 0, TMP_REG2, 0);
return sljit_emit_op2(compiler, saved_op, dst, 0, dst, 0, TMP_REG2, 0);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);;
}
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
@ -2369,7 +2419,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
PTR_FAIL_IF(!const_);
set_const(const_, compiler);
reg = SLOW_IS_REG(dst) ? dst : TMP_REG2;
reg = FAST_IS_REG(dst) ? dst : TMP_REG2;
PTR_FAIL_IF(emit_const(compiler, reg, init_value));

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -60,7 +60,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return push_inst(compiler, SRA | D(dst) | S1(dst) | IMM(24), DR(dst));
}
else if (dst != src2)
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
case SLJIT_MOV_U16:
@ -71,7 +71,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return push_inst(compiler, (op == SLJIT_MOV_S16 ? SRA : SRL) | D(dst) | S1(dst) | IMM(16), DR(dst));
}
else if (dst != src2)
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
case SLJIT_NOT:
@ -80,18 +80,17 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
case SLJIT_CLZ:
SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
/* sparc 32 does not support SLJIT_KEEP_FLAGS. Not sure I can fix this. */
FAIL_IF(push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(src2) | S2(0), SET_FLAGS));
FAIL_IF(push_inst(compiler, OR | D(TMP_REG1) | S1(0) | S2(src2), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, BICC | DA(0x1) | (7 & DISP_MASK), UNMOVABLE_INS));
FAIL_IF(push_inst(compiler, OR | (flags & SET_FLAGS) | D(dst) | S1(0) | IMM(32), UNMOVABLE_INS | (flags & SET_FLAGS)));
FAIL_IF(push_inst(compiler, OR | D(dst) | S1(0) | IMM(32), UNMOVABLE_INS));
FAIL_IF(push_inst(compiler, OR | D(dst) | S1(0) | IMM(-1), DR(dst)));
/* Loop. */
FAIL_IF(push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(TMP_REG1) | S2(0), SET_FLAGS));
FAIL_IF(push_inst(compiler, SLL | D(TMP_REG1) | S1(TMP_REG1) | IMM(1), DR(TMP_REG1)));
FAIL_IF(push_inst(compiler, BICC | DA(0xe) | (-2 & DISP_MASK), UNMOVABLE_INS));
return push_inst(compiler, ADD | (flags & SET_FLAGS) | D(dst) | S1(dst) | IMM(1), UNMOVABLE_INS | (flags & SET_FLAGS));
return push_inst(compiler, ADD | D(dst) | S1(dst) | IMM(1), UNMOVABLE_INS);
case SLJIT_ADD:
return push_inst(compiler, ADD | (flags & SET_FLAGS) | D(dst) | S1(src1) | ARG2(flags, src2), DR(dst) | (flags & SET_FLAGS));
@ -135,7 +134,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return !(flags & SET_FLAGS) ? SLJIT_SUCCESS : push_inst(compiler, SUB | SET_FLAGS | D(0) | S1(dst) | S2(0), SET_FLAGS);
}
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
}
@ -145,20 +144,22 @@ static SLJIT_INLINE sljit_s32 emit_const(struct sljit_compiler *compiler, sljit_
return push_inst(compiler, OR | D(dst) | S1(dst) | IMM_ARG | (init_value & 0x3ff), DR(dst));
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
sljit_ins *inst = (sljit_ins *)addr;
inst[0] = (inst[0] & 0xffc00000) | ((new_addr >> 10) & 0x3fffff);
inst[1] = (inst[1] & 0xfffffc00) | (new_addr & 0x3ff);
inst[0] = (inst[0] & 0xffc00000) | ((new_target >> 10) & 0x3fffff);
inst[1] = (inst[1] & 0xfffffc00) | (new_target & 0x3ff);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
{
sljit_ins *inst = (sljit_ins*)addr;
sljit_ins *inst = (sljit_ins *)addr;
inst[0] = (inst[0] & 0xffc00000) | ((new_constant >> 10) & 0x3fffff);
inst[1] = (inst[1] & 0xfffffc00) | (new_constant & 0x3ff);
inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
SLJIT_CACHE_FLUSH(inst, inst + 2);
}

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -199,7 +199,7 @@ static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit
return SLJIT_SUCCESS;
}
static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code)
static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
{
sljit_sw diff;
sljit_uw target_addr;
@ -213,7 +213,7 @@ static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_i
target_addr = jump->u.target;
else {
SLJIT_ASSERT(jump->flags & JUMP_LABEL);
target_addr = (sljit_uw)(code + jump->u.label->size);
target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
}
inst = (sljit_ins*)jump->addr;
@ -239,8 +239,9 @@ static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_i
if (jump->flags & IS_COND)
inst--;
diff = ((sljit_sw)target_addr - (sljit_sw)(inst - 1) - executable_offset) >> 2;
if (jump->flags & IS_MOVABLE) {
diff = ((sljit_sw)target_addr - (sljit_sw)(inst - 1)) >> 2;
if (diff <= MAX_DISP && diff >= MIN_DISP) {
jump->flags |= PATCH_B;
inst--;
@ -257,7 +258,8 @@ static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_i
}
}
diff = ((sljit_sw)target_addr - (sljit_sw)(inst)) >> 2;
diff += sizeof(sljit_ins);
if (diff <= MAX_DISP && diff >= MIN_DISP) {
jump->flags |= PATCH_B;
if (jump->flags & IS_COND)
@ -280,6 +282,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
sljit_ins *buf_ptr;
sljit_ins *buf_end;
sljit_uw word_count;
sljit_sw executable_offset;
sljit_uw addr;
struct sljit_label *label;
@ -296,9 +299,12 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
code_ptr = code;
word_count = 0;
executable_offset = SLJIT_EXEC_OFFSET(code);
label = compiler->labels;
jump = compiler->jumps;
const_ = compiler->consts;
do {
buf_ptr = (sljit_ins*)buf->memory;
buf_end = buf_ptr + (buf->used_size >> 2);
@ -310,7 +316,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
/* These structures are ordered by their address. */
if (label && label->size == word_count) {
/* Just recording the address. */
label->addr = (sljit_uw)code_ptr;
label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = code_ptr - code;
label = label->next;
}
@ -320,7 +326,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
#else
jump->addr = (sljit_uw)(code_ptr - 6);
#endif
code_ptr = detect_jump_type(jump, code_ptr, code);
code_ptr = detect_jump_type(jump, code_ptr, code, executable_offset);
jump = jump->next;
}
if (const_ && const_->addr == word_count) {
@ -336,7 +342,7 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
} while (buf);
if (label && label->size == word_count) {
label->addr = (sljit_uw)code_ptr;
label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
label->size = code_ptr - code;
label = label->next;
}
@ -350,16 +356,16 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
while (jump) {
do {
addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
buf_ptr = (sljit_ins*)jump->addr;
buf_ptr = (sljit_ins *)jump->addr;
if (jump->flags & PATCH_CALL) {
addr = (sljit_sw)(addr - jump->addr) >> 2;
addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
SLJIT_ASSERT((sljit_sw)addr <= 0x1fffffff && (sljit_sw)addr >= -0x20000000);
buf_ptr[0] = CALL | (addr & 0x3fffffff);
break;
}
if (jump->flags & PATCH_B) {
addr = (sljit_sw)(addr - jump->addr) >> 2;
addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
SLJIT_ASSERT((sljit_sw)addr <= MAX_DISP && (sljit_sw)addr >= MIN_DISP);
buf_ptr[0] = (buf_ptr[0] & ~DISP_MASK) | (addr & DISP_MASK);
break;
@ -378,11 +384,37 @@ SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compil
compiler->error = SLJIT_ERR_COMPILED;
compiler->executable_offset = executable_offset;
compiler->executable_size = (code_ptr - code) * sizeof(sljit_ins);
code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
SLJIT_CACHE_FLUSH(code, code_ptr);
return code;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
{
switch (feature_type) {
case SLJIT_HAS_FPU:
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
#else
/* Available by default. */
return 1;
#endif
#if (defined SLJIT_CONFIG_SPARC_64 && SLJIT_CONFIG_SPARC_64)
case SLJIT_HAS_CMOV:
return 1;
#endif
default:
return 0;
}
}
/* --------------------------------------------------------------------- */
/* Entry, exit */
/* --------------------------------------------------------------------- */
@ -567,7 +599,6 @@ static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sl
base = arg & REG_MASK;
if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
argw &= 0x3;
SLJIT_ASSERT(argw != 0);
/* Using the cache. */
if (((SLJIT_MEM | (arg & OFFS_REG_MASK)) == compiler->cache_arg) && (argw == compiler->cache_argw))
@ -652,11 +683,8 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
compiler->cache_argw = 0;
}
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) {
if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32 && !(src2 & SLJIT_MEM))
return SLJIT_SUCCESS;
}
else if (FAST_IS_REG(dst)) {
if (dst != SLJIT_UNUSED) {
if (FAST_IS_REG(dst)) {
dst_r = dst;
flags |= REG_DEST;
if (op >= SLJIT_MOV && op <= SLJIT_MOVU_S32)
@ -664,6 +692,7 @@ static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s3
}
else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1, dst, dstw))
flags |= SLOW_DEST;
}
if (flags & IMM_OP) {
if ((src2 & SLJIT_IMM) && src2w) {
@ -812,13 +841,16 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compile
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw)
{
sljit_s32 flags = GET_FLAGS(op) ? SET_FLAGS : 0;
sljit_s32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
CHECK_ERROR();
CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
ADJUST_LOCAL_OFFSET(dst, dstw);
ADJUST_LOCAL_OFFSET(src, srcw);
if (dst == SLJIT_UNUSED && !HAS_FLAGS(op))
return SLJIT_SUCCESS;
op = GET_OPCODE(op);
switch (op) {
case SLJIT_MOV:
@ -881,7 +913,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
sljit_s32 src1, sljit_sw src1w,
sljit_s32 src2, sljit_sw src2w)
{
sljit_s32 flags = GET_FLAGS(op) ? SET_FLAGS : 0;
sljit_s32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
CHECK_ERROR();
CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
@ -889,6 +921,9 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
ADJUST_LOCAL_OFFSET(src1, src1w);
ADJUST_LOCAL_OFFSET(src2, src2w);
if (dst == SLJIT_UNUSED && !HAS_FLAGS(op))
return SLJIT_SUCCESS;
op = GET_OPCODE(op);
switch (op) {
case SLJIT_ADD:
@ -910,7 +945,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compile
if (src2 & SLJIT_IMM)
src2w &= 0x1f;
#else
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
#endif
return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
}
@ -943,16 +978,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *c
/* Floating point operators */
/* --------------------------------------------------------------------- */
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
#ifdef SLJIT_IS_FPU_AVAILABLE
return SLJIT_IS_FPU_AVAILABLE;
#else
/* Available by default. */
return 1;
#endif
}
#define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_F32_OP) >> 7))
#define SELECT_FOP(op, single, double) ((op & SLJIT_F32_OP) ? single : double)
#define FLOAT_TMP_MEM_OFFSET (22 * sizeof(sljit_sw))
@ -970,9 +995,6 @@ static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_comp
FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSTOI, FDTOI) | DA(TMP_FREG1) | S2A(src), MOVABLE_INS));
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
if (FAST_IS_REG(dst)) {
FAIL_IF(emit_op_mem2(compiler, SINGLE_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET));
return emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, dst, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET);
@ -1186,10 +1208,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *
CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
ADJUST_LOCAL_OFFSET(dst, dstw);
/* For UNUSED dst. Uncommon, but possible. */
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
if (FAST_IS_REG(dst))
return push_inst(compiler, OR | D(dst) | S1(0) | S2(TMP_LINK), DR(dst));
@ -1285,7 +1303,7 @@ static sljit_ins get_cc(sljit_s32 type)
return DA(0xf);
default:
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return DA(0x8);
}
}
@ -1373,30 +1391,23 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compi
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
sljit_s32 dst, sljit_sw dstw,
sljit_s32 src, sljit_sw srcw,
sljit_s32 type)
{
sljit_s32 reg, flags = (GET_FLAGS(op) ? SET_FLAGS : 0);
sljit_s32 reg, flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
CHECK_ERROR();
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type));
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
ADJUST_LOCAL_OFFSET(dst, dstw);
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
op = GET_OPCODE(op);
reg = (op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2;
compiler->cache_arg = 0;
compiler->cache_argw = 0;
if (op >= SLJIT_ADD && (src & SLJIT_MEM)) {
ADJUST_LOCAL_OFFSET(src, srcw);
FAIL_IF(emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw));
src = TMP_REG1;
srcw = 0;
}
if (op >= SLJIT_ADD && (dst & SLJIT_MEM))
FAIL_IF(emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, dst, dstw, dst, dstw));
type &= 0xff;
if (type < SLJIT_EQUAL_F64)
@ -1407,10 +1418,31 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
FAIL_IF(push_inst(compiler, OR | D(reg) | S1(0) | IMM(1), UNMOVABLE_INS));
FAIL_IF(push_inst(compiler, OR | D(reg) | S1(0) | IMM(0), UNMOVABLE_INS));
if (op >= SLJIT_ADD)
return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP | ALT_KEEP_CACHE, dst, dstw, src, srcw, TMP_REG2, 0);
if (op >= SLJIT_ADD) {
flags |= CUMULATIVE_OP | IMM_OP | ALT_KEEP_CACHE;
if (dst & SLJIT_MEM)
return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, TMP_REG2, 0);
return emit_op(compiler, op, flags, dst, 0, dst, 0, TMP_REG2, 0);
}
return (reg == TMP_REG2) ? emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw) : SLJIT_SUCCESS;
if (!(dst & SLJIT_MEM))
return SLJIT_SUCCESS;
return emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw);
#else
#error "Implementation required"
#endif
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
sljit_s32 dst_reg,
sljit_s32 src, sljit_sw srcw)
{
CHECK_ERROR();
CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
#if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);;
#else
#error "Implementation required"
#endif
@ -1429,7 +1461,7 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compi
PTR_FAIL_IF(!const_);
set_const(const_, compiler);
reg = SLOW_IS_REG(dst) ? dst : TMP_REG2;
reg = FAST_IS_REG(dst) ? dst : TMP_REG2;
PTR_FAIL_IF(emit_const(compiler, reg, init_value));

View File

@ -2,7 +2,7 @@
* Stack-less Just-In-Time compiler
*
* Copyright 2013-2013 Tilera Corporation(jiwang@tilera.com). All rights reserved.
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:

View File

@ -2,7 +2,7 @@
* Stack-less Just-In-Time compiler
*
* Copyright 2013-2013 Tilera Corporation(jiwang@tilera.com). All rights reserved.
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -687,7 +687,7 @@ static sljit_s32 update_buffer(struct sljit_compiler *compiler)
inst_buf[0] = inst1;
inst_buf_index = 1;
} else
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
#ifdef TILEGX_JIT_DEBUG
return push_inst_nodebug(compiler, bits);
@ -727,10 +727,10 @@ static sljit_s32 update_buffer(struct sljit_compiler *compiler)
return push_inst(compiler, bits);
#endif
} else
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
}
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
}
static sljit_s32 flush_buffer(struct sljit_compiler *compiler)
@ -814,7 +814,7 @@ static sljit_s32 push_3_buffer(struct sljit_compiler *compiler, tilegx_mnemonic
break;
default:
printf("unrecoginzed opc: %s\n", opcode->name);
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
}
inst_buf_index++;
@ -859,7 +859,7 @@ static sljit_s32 push_2_buffer(struct sljit_compiler *compiler, tilegx_mnemonic
break;
default:
printf("unrecoginzed opc: %s\n", opcode->name);
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
}
inst_buf_index++;
@ -1952,7 +1952,7 @@ static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sl
return SLJIT_SUCCESS;
}
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
return SLJIT_SUCCESS;
}
@ -2092,9 +2092,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type));
ADJUST_LOCAL_OFFSET(dst, dstw);
if (dst == SLJIT_UNUSED)
return SLJIT_SUCCESS;
op = GET_OPCODE(op);
if (op == SLJIT_MOV_S32 || op == SLJIT_MOV_U32)
mem_type = INT_DATA | SIGNED_DATA;
@ -2143,7 +2140,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *co
break;
default:
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
dst_ar = sugg_dst_ar;
break;
}
@ -2186,7 +2183,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compile
case SLJIT_DIVMOD_SW:
case SLJIT_DIV_UW:
case SLJIT_DIV_SW:
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
}
return SLJIT_SUCCESS;
@ -2487,19 +2484,14 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump * sljit_emit_jump(struct sljit_compil
return jump;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_is_fpu_available(void)
{
return 0;
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)
{
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
}
SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)
{
SLJIT_ASSERT_STOP();
SLJIT_UNREACHABLE();
}
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const * sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
@ -2526,13 +2518,13 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_const * sljit_emit_const(struct sljit_comp
return const_;
}
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr)
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target)
{
sljit_ins *inst = (sljit_ins *)addr;
inst[0] = (inst[0] & ~(0xFFFFL << 43)) | (((new_addr >> 32) & 0xffff) << 43);
inst[1] = (inst[1] & ~(0xFFFFL << 43)) | (((new_addr >> 16) & 0xffff) << 43);
inst[2] = (inst[2] & ~(0xFFFFL << 43)) | ((new_addr & 0xffff) << 43);
inst[0] = (inst[0] & ~(0xFFFFL << 43)) | (((new_target >> 32) & 0xffff) << 43);
inst[1] = (inst[1] & ~(0xFFFFL << 43)) | (((new_target >> 16) & 0xffff) << 43);
inst[2] = (inst[2] & ~(0xFFFFL << 43)) | ((new_target & 0xffff) << 43);
SLJIT_CACHE_FLUSH(inst, inst + 3);
}

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -38,7 +38,7 @@ static sljit_s32 emit_do_imm(struct sljit_compiler *compiler, sljit_u8 opcode, s
return SLJIT_SUCCESS;
}
static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type)
static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type, sljit_sw executable_offset)
{
if (type == SLJIT_JUMP) {
*code_ptr++ = JMP_i32;
@ -57,7 +57,7 @@ static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_
if (jump->flags & JUMP_LABEL)
jump->flags |= PATCH_MW;
else
sljit_unaligned_store_sw(code_ptr, jump->u.target - (jump->addr + 4));
sljit_unaligned_store_sw(code_ptr, jump->u.target - (jump->addr + 4) - (sljit_uw)executable_offset);
code_ptr += 4;
return code_ptr;
@ -75,9 +75,30 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
set_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size);
compiler->args = args;
compiler->flags_saved = 0;
size = 1 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3);
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
/* [esp+0] for saving temporaries and third argument for calls. */
compiler->saveds_offset = 1 * sizeof(sljit_sw);
#else
/* [esp+0] for saving temporaries and space for maximum three arguments. */
if (scratches <= 1)
compiler->saveds_offset = 1 * sizeof(sljit_sw);
else
compiler->saveds_offset = ((scratches == 2) ? 2 : 3) * sizeof(sljit_sw);
#endif
if (scratches > 3)
compiler->saveds_offset += ((scratches > (3 + 6)) ? 6 : (scratches - 3)) * sizeof(sljit_sw);
compiler->locals_offset = compiler->saveds_offset;
if (saveds > 3)
compiler->locals_offset += (saveds - 3) * sizeof(sljit_sw);
if (options & SLJIT_F64_ALIGNMENT)
compiler->locals_offset = (compiler->locals_offset + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1);
size = 1 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3);
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
size += (args > 0 ? (args * 2) : 0) + (args > 2 ? 2 : 0);
#else
@ -94,11 +115,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
*inst++ = MOD_REG | (reg_map[TMP_REG1] << 3) | 0x4 /* esp */;
}
#endif
if (saveds > 2 || scratches > 7)
if (saveds > 2 || scratches > 9)
PUSH_REG(reg_map[SLJIT_S2]);
if (saveds > 1 || scratches > 8)
if (saveds > 1 || scratches > 10)
PUSH_REG(reg_map[SLJIT_S1]);
if (saveds > 0 || scratches > 9)
if (saveds > 0 || scratches > 11)
PUSH_REG(reg_map[SLJIT_S0]);
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
@ -134,51 +155,64 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
}
#endif
SLJIT_COMPILE_ASSERT(SLJIT_LOCALS_OFFSET >= (2 + 4) * sizeof(sljit_uw), require_at_least_two_words);
SLJIT_ASSERT(SLJIT_LOCALS_OFFSET > 0);
#if defined(__APPLE__)
/* Ignore pushed registers and SLJIT_LOCALS_OFFSET when computing the aligned local size. */
saveds = (2 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw);
saveds = (2 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw);
local_size = ((SLJIT_LOCALS_OFFSET + saveds + local_size + 15) & ~15) - saveds;
#else
if (options & SLJIT_DOUBLE_ALIGNMENT) {
local_size = SLJIT_LOCALS_OFFSET + ((local_size + 7) & ~7);
inst = (sljit_u8*)ensure_buf(compiler, 1 + 17);
FAIL_IF(!inst);
INC_SIZE(17);
inst[0] = MOV_r_rm;
inst[1] = MOD_REG | (reg_map[TMP_REG1] << 3) | reg_map[SLJIT_SP];
inst[2] = GROUP_F7;
inst[3] = MOD_REG | (0 << 3) | reg_map[SLJIT_SP];
sljit_unaligned_store_sw(inst + 4, 0x4);
inst[8] = JNE_i8;
inst[9] = 6;
inst[10] = GROUP_BINARY_81;
inst[11] = MOD_REG | (5 << 3) | reg_map[SLJIT_SP];
sljit_unaligned_store_sw(inst + 12, 0x4);
inst[16] = PUSH_r + reg_map[TMP_REG1];
}
if (options & SLJIT_F64_ALIGNMENT)
local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1));
else
local_size = SLJIT_LOCALS_OFFSET + ((local_size + 3) & ~3);
local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_sw) - 1) & ~(sizeof(sljit_sw) - 1));
#endif
compiler->local_size = local_size;
#ifdef _WIN32
if (local_size > 1024) {
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
FAIL_IF(emit_do_imm(compiler, MOV_r_i32 + reg_map[SLJIT_R0], local_size));
#else
local_size -= SLJIT_LOCALS_OFFSET;
/* Space for a single argument. This amount is excluded when the stack is allocated below. */
local_size -= sizeof(sljit_sw);
FAIL_IF(emit_do_imm(compiler, MOV_r_i32 + reg_map[SLJIT_R0], local_size));
FAIL_IF(emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32,
SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, SLJIT_LOCALS_OFFSET));
SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, sizeof(sljit_sw)));
#endif
FAIL_IF(sljit_emit_ijump(compiler, SLJIT_CALL1, SLJIT_IMM, SLJIT_FUNC_OFFSET(sljit_grow_stack)));
}
#endif
SLJIT_ASSERT(local_size > 0);
#if !defined(__APPLE__)
if (options & SLJIT_F64_ALIGNMENT) {
EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_SP, 0);
/* Some space might allocated during sljit_grow_stack() above on WIN32. */
FAIL_IF(emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32,
SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size + sizeof(sljit_sw)));
#if defined _WIN32 && !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
if (compiler->local_size > 1024)
FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32,
TMP_REG1, 0, TMP_REG1, 0, SLJIT_IMM, sizeof(sljit_sw)));
#endif
inst = (sljit_u8*)ensure_buf(compiler, 1 + 6);
FAIL_IF(!inst);
INC_SIZE(6);
inst[0] = GROUP_BINARY_81;
inst[1] = MOD_REG | AND | reg_map[SLJIT_SP];
sljit_unaligned_store_sw(inst + 2, ~(sizeof(sljit_f64) - 1));
/* The real local size must be used. */
return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), compiler->local_size, TMP_REG1, 0);
}
#endif
return emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32,
SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size);
}
@ -193,14 +227,36 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *comp
compiler->args = args;
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
/* [esp+0] for saving temporaries and third argument for calls. */
compiler->saveds_offset = 1 * sizeof(sljit_sw);
#else
/* [esp+0] for saving temporaries and space for maximum three arguments. */
if (scratches <= 1)
compiler->saveds_offset = 1 * sizeof(sljit_sw);
else
compiler->saveds_offset = ((scratches == 2) ? 2 : 3) * sizeof(sljit_sw);
#endif
if (scratches > 3)
compiler->saveds_offset += ((scratches > (3 + 6)) ? 6 : (scratches - 3)) * sizeof(sljit_sw);
compiler->locals_offset = compiler->saveds_offset;
if (saveds > 3)
compiler->locals_offset += (saveds - 3) * sizeof(sljit_sw);
if (options & SLJIT_F64_ALIGNMENT)
compiler->locals_offset = (compiler->locals_offset + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1);
#if defined(__APPLE__)
saveds = (2 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw);
saveds = (2 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw);
compiler->local_size = ((SLJIT_LOCALS_OFFSET + saveds + local_size + 15) & ~15) - saveds;
#else
if (options & SLJIT_DOUBLE_ALIGNMENT)
compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + 7) & ~7);
if (options & SLJIT_F64_ALIGNMENT)
compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1));
else
compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + 3) & ~3);
compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_sw) - 1) & ~(sizeof(sljit_sw) - 1));
#endif
return SLJIT_SUCCESS;
}
@ -214,23 +270,19 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *comp
CHECK(check_sljit_emit_return(compiler, op, src, srcw));
SLJIT_ASSERT(compiler->args >= 0);
compiler->flags_saved = 0;
FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
SLJIT_ASSERT(compiler->local_size > 0);
FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32,
SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size));
#if !defined(__APPLE__)
if (compiler->options & SLJIT_DOUBLE_ALIGNMENT) {
inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
FAIL_IF(!inst);
INC_SIZE(3);
inst[0] = MOV_r_rm;
inst[1] = (reg_map[SLJIT_SP] << 3) | 0x4 /* SIB */;
inst[2] = (4 << 3) | reg_map[SLJIT_SP];
}
if (compiler->options & SLJIT_F64_ALIGNMENT)
EMIT_MOV(compiler, SLJIT_SP, 0, SLJIT_MEM1(SLJIT_SP), compiler->local_size)
else
FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32,
SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size));
#else
FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32,
SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size));
#endif
size = 2 + (compiler->scratches > 7 ? (compiler->scratches - 7) : 0) +
@ -247,11 +299,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *comp
INC_SIZE(size);
if (compiler->saveds > 0 || compiler->scratches > 9)
if (compiler->saveds > 0 || compiler->scratches > 11)
POP_REG(reg_map[SLJIT_S0]);
if (compiler->saveds > 1 || compiler->scratches > 8)
if (compiler->saveds > 1 || compiler->scratches > 10)
POP_REG(reg_map[SLJIT_S1]);
if (compiler->saveds > 2 || compiler->scratches > 7)
if (compiler->saveds > 2 || compiler->scratches > 9)
POP_REG(reg_map[SLJIT_S2]);
POP_REG(reg_map[TMP_REG1]);
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -47,9 +47,8 @@ static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_
*code_ptr++ = 10 + 3;
}
SLJIT_COMPILE_ASSERT(reg_map[TMP_REG3] == 9, tmp3_is_9_first);
*code_ptr++ = REX_W | REX_B;
*code_ptr++ = MOV_r_i32 + 1;
*code_ptr++ = REX_W | ((reg_map[TMP_REG2] <= 7) ? 0 : REX_B);
*code_ptr++ = MOV_r_i32 | reg_lmap[TMP_REG2];
jump->addr = (sljit_uw)code_ptr;
if (jump->flags & JUMP_LABEL)
@ -58,31 +57,10 @@ static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_
sljit_unaligned_store_sw(code_ptr, jump->u.target);
code_ptr += sizeof(sljit_sw);
if (reg_map[TMP_REG2] >= 8)
*code_ptr++ = REX_B;
*code_ptr++ = GROUP_FF;
*code_ptr++ = (type >= SLJIT_FAST_CALL) ? (MOD_REG | CALL_rm | 1) : (MOD_REG | JMP_rm | 1);
return code_ptr;
}
static sljit_u8* generate_fixed_jump(sljit_u8 *code_ptr, sljit_sw addr, sljit_s32 type)
{
sljit_sw delta = addr - ((sljit_sw)code_ptr + 1 + sizeof(sljit_s32));
if (delta <= HALFWORD_MAX && delta >= HALFWORD_MIN) {
*code_ptr++ = (type == 2) ? CALL_i32 : JMP_i32;
sljit_unaligned_store_sw(code_ptr, delta);
}
else {
SLJIT_COMPILE_ASSERT(reg_map[TMP_REG3] == 9, tmp3_is_9_second);
*code_ptr++ = REX_W | REX_B;
*code_ptr++ = MOV_r_i32 + 1;
sljit_unaligned_store_sw(code_ptr, addr);
code_ptr += sizeof(sljit_sw);
*code_ptr++ = REX_B;
*code_ptr++ = GROUP_FF;
*code_ptr++ = (type == 2) ? (MOD_REG | CALL_rm | 1) : (MOD_REG | JMP_rm | 1);
}
*code_ptr++ = MOD_REG | (type >= SLJIT_FAST_CALL ? CALL_rm : JMP_rm) | reg_lmap[TMP_REG2];
return code_ptr;
}
@ -98,7 +76,13 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
set_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size);
compiler->flags_saved = 0;
#ifdef _WIN64
/* Two/four register slots for parameters plus space for xmm6 register if needed. */
if (fscratches >= 6 || fsaveds >= 1)
compiler->locals_offset = 6 * sizeof(sljit_sw);
else
compiler->locals_offset = ((scratches > 2) ? 4 : 2) * sizeof(sljit_sw);
#endif
/* Including the return address saved by the call instruction. */
saved_register_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 1);
@ -177,7 +161,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
INC_SIZE(4 + (3 + sizeof(sljit_s32)));
*inst++ = REX_W;
*inst++ = GROUP_BINARY_83;
*inst++ = MOD_REG | SUB | 4;
*inst++ = MOD_REG | SUB | reg_map[SLJIT_SP];
/* Allocated size for registers must be divisible by 8. */
SLJIT_ASSERT(!(saved_register_size & 0x7));
/* Aligned to 16 byte. */
@ -189,7 +173,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
local_size -= 4 * sizeof(sljit_sw);
}
/* Second instruction */
SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R0] < 8, temporary_reg1_is_loreg);
SLJIT_ASSERT(reg_map[SLJIT_R0] < 8);
*inst++ = REX_W;
*inst++ = MOV_rm_i32;
*inst++ = MOD_REG | reg_lmap[SLJIT_R0];
@ -202,14 +186,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
}
#endif
SLJIT_ASSERT(local_size > 0);
if (local_size > 0) {
if (local_size <= 127) {
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
INC_SIZE(4);
*inst++ = REX_W;
*inst++ = GROUP_BINARY_83;
*inst++ = MOD_REG | SUB | 4;
*inst++ = MOD_REG | SUB | reg_map[SLJIT_SP];
*inst++ = local_size;
}
else {
@ -218,10 +202,11 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compi
INC_SIZE(7);
*inst++ = REX_W;
*inst++ = GROUP_BINARY_81;
*inst++ = MOD_REG | SUB | 4;
*inst++ = MOD_REG | SUB | reg_map[SLJIT_SP];
sljit_unaligned_store_s32(inst, local_size);
inst += sizeof(sljit_s32);
}
}
#ifdef _WIN64
/* Save xmm6 register: movaps [rsp + 0x20], xmm6 */
@ -247,6 +232,14 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *comp
CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size));
set_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size);
#ifdef _WIN64
/* Two/four register slots for parameters plus space for xmm6 register if needed. */
if (fscratches >= 6 || fsaveds >= 1)
compiler->locals_offset = 6 * sizeof(sljit_sw);
else
compiler->locals_offset = ((scratches > 2) ? 4 : 2) * sizeof(sljit_sw);
#endif
/* Including the return address saved by the call instruction. */
saved_register_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 1);
compiler->local_size = ((local_size + SLJIT_LOCALS_OFFSET + saved_register_size + 15) & ~15) - saved_register_size;
@ -261,7 +254,6 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *comp
CHECK_ERROR();
CHECK(check_sljit_emit_return(compiler, op, src, srcw));
compiler->flags_saved = 0;
FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
#ifdef _WIN64
@ -275,7 +267,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *comp
}
#endif
SLJIT_ASSERT(compiler->local_size > 0);
if (compiler->local_size > 0) {
if (compiler->local_size <= 127) {
inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
FAIL_IF(!inst);
@ -294,6 +286,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *comp
*inst++ = MOD_REG | ADD | 4;
sljit_unaligned_store_s32(inst, compiler->local_size);
}
}
tmp = compiler->scratches;
for (i = SLJIT_FIRST_SAVED_REG; i <= tmp; i++) {
@ -387,13 +380,12 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_s32
if (b & SLJIT_MEM) {
if (!(b & OFFS_REG_MASK)) {
if (NOT_HALFWORD(immb)) {
if (emit_load_imm64(compiler, TMP_REG3, immb))
return NULL;
PTR_FAIL_IF(emit_load_imm64(compiler, TMP_REG2, immb));
immb = 0;
if (b & REG_MASK)
b |= TO_OFFS_REG(TMP_REG3);
b |= TO_OFFS_REG(TMP_REG2);
else
b |= TMP_REG3;
b |= TMP_REG2;
}
else if (reg_lmap[b & REG_MASK] == 4)
b |= TO_OFFS_REG(SLJIT_SP);
@ -553,17 +545,19 @@ static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_s32
/* Call / return instructions */
/* --------------------------------------------------------------------- */
static SLJIT_INLINE sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 type)
static sljit_s32 call_with_args(struct sljit_compiler *compiler, sljit_s32 type)
{
sljit_u8 *inst;
/* After any change update IS_REG_CHANGED_BY_CALL as well. */
#ifndef _WIN64
SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R1] == 6 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8, args_registers);
SLJIT_ASSERT(reg_map[SLJIT_R1] == 6 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8 && reg_map[TMP_REG1] == 2);
inst = (sljit_u8*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
FAIL_IF(!inst);
INC_SIZE((type < SLJIT_CALL3) ? 3 : 6);
if (type >= SLJIT_CALL3) {
/* Move third argument to TMP_REG1. */
*inst++ = REX_W;
*inst++ = MOV_r_rm;
*inst++ = MOD_REG | (0x2 /* rdx */ << 3) | reg_lmap[SLJIT_R2];
@ -572,12 +566,13 @@ static SLJIT_INLINE sljit_s32 call_with_args(struct sljit_compiler *compiler, sl
*inst++ = MOV_r_rm;
*inst++ = MOD_REG | (0x7 /* rdi */ << 3) | reg_lmap[SLJIT_R0];
#else
SLJIT_COMPILE_ASSERT(reg_map[SLJIT_R1] == 2 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8, args_registers);
SLJIT_ASSERT(reg_map[SLJIT_R1] == 2 && reg_map[SLJIT_R0] < 8 && reg_map[SLJIT_R2] < 8 && reg_map[TMP_REG1] == 8);
inst = (sljit_u8*)ensure_buf(compiler, 1 + ((type < SLJIT_CALL3) ? 3 : 6));
FAIL_IF(!inst);
INC_SIZE((type < SLJIT_CALL3) ? 3 : 6);
if (type >= SLJIT_CALL3) {
/* Move third argument to TMP_REG1. */
*inst++ = REX_W | REX_R;
*inst++ = MOV_r_rm;
*inst++ = MOD_REG | (0x0 /* r8 */ << 3) | reg_lmap[SLJIT_R2];

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
/*
* Stack-less Just-In-Time compiler
*
* Copyright 2009-2012 Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
* Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
@ -206,10 +206,7 @@ static sljit_sw sljit_page_align = 0;
SLJIT_API_FUNC_ATTRIBUTE struct sljit_stack* SLJIT_CALL sljit_allocate_stack(sljit_uw limit, sljit_uw max_limit, void *allocator_data)
{
struct sljit_stack *stack;
union {
void *ptr;
sljit_uw uw;
} base;
#ifdef _WIN32
SYSTEM_INFO si;
#endif
@ -233,29 +230,29 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_stack* SLJIT_CALL sljit_allocate_stack(slj
}
#endif
/* Align limit and max_limit. */
max_limit = (max_limit + sljit_page_align) & ~sljit_page_align;
stack = (struct sljit_stack*)SLJIT_MALLOC(sizeof(struct sljit_stack), allocator_data);
if (!stack)
return NULL;
/* Align max_limit. */
max_limit = (max_limit + sljit_page_align) & ~sljit_page_align;
#ifdef _WIN32
base.ptr = VirtualAlloc(NULL, max_limit, MEM_RESERVE, PAGE_READWRITE);
if (!base.ptr) {
ptr = VirtualAlloc(NULL, max_limit, MEM_RESERVE, PAGE_READWRITE);
if (!ptr) {
SLJIT_FREE(stack, allocator_data);
return NULL;
}
stack->base = base.uw;
stack->max_limit = (sljit_u8 *)ptr;
stack->base = stack->max_limit + max_limit;
stack->limit = stack->base;
stack->max_limit = stack->base + max_limit;
if (sljit_stack_resize(stack, stack->base + limit)) {
if (sljit_stack_resize(stack, stack->base - limit)) {
sljit_free_stack(stack, allocator_data);
return NULL;
}
#else
#ifdef MAP_ANON
base.ptr = mmap(NULL, max_limit, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANON, -1, 0);
ptr = mmap(NULL, max_limit, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANON, -1, 0);
#else
if (dev_zero < 0) {
if (open_dev_zero()) {
@ -263,15 +260,15 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_stack* SLJIT_CALL sljit_allocate_stack(slj
return NULL;
}
}
base.ptr = mmap(NULL, max_limit, PROT_READ | PROT_WRITE, MAP_PRIVATE, dev_zero, 0);
ptr = mmap(NULL, max_limit, PROT_READ | PROT_WRITE, MAP_PRIVATE, dev_zero, 0);
#endif
if (base.ptr == MAP_FAILED) {
if (ptr == MAP_FAILED) {
SLJIT_FREE(stack, allocator_data);
return NULL;
}
stack->base = base.uw;
stack->limit = stack->base + limit;
stack->max_limit = stack->base + max_limit;
stack->max_limit = (sljit_u8 *)ptr;
stack->base = stack->max_limit + max_limit;
stack->limit = stack->base - limit;
#endif
stack->top = stack->base;
return stack;
@ -279,53 +276,53 @@ SLJIT_API_FUNC_ATTRIBUTE struct sljit_stack* SLJIT_CALL sljit_allocate_stack(slj
#undef PAGE_ALIGN
SLJIT_API_FUNC_ATTRIBUTE void SLJIT_CALL sljit_free_stack(struct sljit_stack* stack, void *allocator_data)
SLJIT_API_FUNC_ATTRIBUTE void SLJIT_CALL sljit_free_stack(struct sljit_stack *stack, void *allocator_data)
{
SLJIT_UNUSED_ARG(allocator_data);
#ifdef _WIN32
VirtualFree((void*)stack->base, 0, MEM_RELEASE);
VirtualFree((void*)stack->max_limit, 0, MEM_RELEASE);
#else
munmap((void*)stack->base, stack->max_limit - stack->base);
munmap((void*)stack->max_limit, stack->base - stack->max_limit);
#endif
SLJIT_FREE(stack, allocator_data);
}
SLJIT_API_FUNC_ATTRIBUTE sljit_sw SLJIT_CALL sljit_stack_resize(struct sljit_stack* stack, sljit_uw new_limit)
SLJIT_API_FUNC_ATTRIBUTE sljit_sw SLJIT_CALL sljit_stack_resize(struct sljit_stack *stack, sljit_u8 *new_limit)
{
sljit_uw aligned_old_limit;
sljit_uw aligned_new_limit;
if ((new_limit > stack->max_limit) || (new_limit < stack->base))
if ((new_limit < stack->max_limit) || (new_limit >= stack->base))
return -1;
#ifdef _WIN32
aligned_new_limit = (new_limit + sljit_page_align) & ~sljit_page_align;
aligned_old_limit = (stack->limit + sljit_page_align) & ~sljit_page_align;
aligned_new_limit = (sljit_uw)new_limit & ~sljit_page_align;
aligned_old_limit = ((sljit_uw)stack->limit) & ~sljit_page_align;
if (aligned_new_limit != aligned_old_limit) {
if (aligned_new_limit > aligned_old_limit) {
if (!VirtualAlloc((void*)aligned_old_limit, aligned_new_limit - aligned_old_limit, MEM_COMMIT, PAGE_READWRITE))
if (aligned_new_limit < aligned_old_limit) {
if (!VirtualAlloc((void*)aligned_new_limit, aligned_old_limit - aligned_new_limit, MEM_COMMIT, PAGE_READWRITE))
return -1;
}
else {
if (!VirtualFree((void*)aligned_new_limit, aligned_old_limit - aligned_new_limit, MEM_DECOMMIT))
if (!VirtualFree((void*)aligned_old_limit, aligned_new_limit - aligned_old_limit, MEM_DECOMMIT))
return -1;
}
}
stack->limit = new_limit;
return 0;
#else
if (new_limit >= stack->limit) {
if (new_limit <= stack->limit) {
stack->limit = new_limit;
return 0;
}
aligned_new_limit = (new_limit + sljit_page_align) & ~sljit_page_align;
aligned_old_limit = (stack->limit + sljit_page_align) & ~sljit_page_align;
aligned_new_limit = (sljit_uw)new_limit & ~sljit_page_align;
aligned_old_limit = ((sljit_uw)stack->limit) & ~sljit_page_align;
/* If madvise is available, we release the unnecessary space. */
#if defined(MADV_DONTNEED)
if (aligned_new_limit < aligned_old_limit)
madvise((void*)aligned_new_limit, aligned_old_limit - aligned_new_limit, MADV_DONTNEED);
if (aligned_new_limit > aligned_old_limit)
madvise((void*)aligned_old_limit, aligned_new_limit - aligned_old_limit, MADV_DONTNEED);
#elif defined(POSIX_MADV_DONTNEED)
if (aligned_new_limit < aligned_old_limit)
posix_madvise((void*)aligned_new_limit, aligned_old_limit - aligned_new_limit, POSIX_MADV_DONTNEED);
if (aligned_new_limit > aligned_old_limit)
posix_madvise((void*)aligned_old_limit, aligned_new_limit - aligned_old_limit, POSIX_MADV_DONTNEED);
#endif
stack->limit = new_limit;
return 0;