2018-08-25 06:39:10 +00:00
|
|
|
# Copyright (C) 2018 and later: Unicode, Inc. and others.
|
|
|
|
# License & terms of use: http://www.unicode.org/copyright.html
|
|
|
|
|
2018-12-08 01:22:10 +00:00
|
|
|
# Python 2/3 Compatibility (ICU-20299)
|
|
|
|
# TODO(ICU-20301): Remove this.
|
|
|
|
from __future__ import print_function
|
|
|
|
|
2019-05-06 23:37:10 +00:00
|
|
|
from icutools.databuilder import *
|
|
|
|
from icutools.databuilder import utils
|
|
|
|
from icutools.databuilder.request_types import *
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2019-03-12 01:59:09 +00:00
|
|
|
import os
|
2018-08-25 06:39:10 +00:00
|
|
|
import sys
|
|
|
|
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
requests = []
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
if len(io.glob("misc/*")) == 0:
|
2019-02-16 12:33:20 +00:00
|
|
|
print("Error: Cannot find data directory; please specify --src_dir", file=sys.stderr)
|
2018-08-25 06:39:10 +00:00
|
|
|
exit(1)
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_cnvalias(config, io, common_vars)
|
|
|
|
requests += generate_ulayout(config, io, common_vars)
|
|
|
|
requests += generate_confusables(config, io, common_vars)
|
|
|
|
requests += generate_conversion_mappings(config, io, common_vars)
|
|
|
|
requests += generate_brkitr_brk(config, io, common_vars)
|
|
|
|
requests += generate_stringprep(config, io, common_vars)
|
|
|
|
requests += generate_brkitr_dictionaries(config, io, common_vars)
|
|
|
|
requests += generate_normalization(config, io, common_vars)
|
|
|
|
requests += generate_coll_ucadata(config, io, common_vars)
|
|
|
|
requests += generate_full_unicore_data(config, io, common_vars)
|
|
|
|
requests += generate_unames(config, io, common_vars)
|
|
|
|
requests += generate_misc(config, io, common_vars)
|
|
|
|
requests += generate_curr_supplemental(config, io, common_vars)
|
2020-04-14 00:45:27 +00:00
|
|
|
requests += generate_zone_supplemental(config, io, common_vars)
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_translit(config, io, common_vars)
|
2018-12-05 03:07:27 +00:00
|
|
|
|
|
|
|
# Res Tree Files
|
|
|
|
# (input dirname, output dirname, resfiles.mk path, mk version var, mk source var, use pool file, dep files)
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_tree(config, io, common_vars,
|
2018-12-05 03:07:27 +00:00
|
|
|
"locales",
|
|
|
|
None,
|
2019-06-10 23:07:53 +00:00
|
|
|
config.use_pool_bundle,
|
2018-12-05 03:07:27 +00:00
|
|
|
[])
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_tree(config, io, common_vars,
|
2018-12-05 03:07:27 +00:00
|
|
|
"curr",
|
|
|
|
"curr",
|
2019-06-10 23:07:53 +00:00
|
|
|
config.use_pool_bundle,
|
2018-12-05 03:07:27 +00:00
|
|
|
[])
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_tree(config, io, common_vars,
|
2018-12-05 03:07:27 +00:00
|
|
|
"lang",
|
|
|
|
"lang",
|
2019-06-10 23:07:53 +00:00
|
|
|
config.use_pool_bundle,
|
2018-12-05 03:07:27 +00:00
|
|
|
[])
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_tree(config, io, common_vars,
|
2018-12-05 03:07:27 +00:00
|
|
|
"region",
|
|
|
|
"region",
|
2019-06-10 23:07:53 +00:00
|
|
|
config.use_pool_bundle,
|
2018-12-05 03:07:27 +00:00
|
|
|
[])
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_tree(config, io, common_vars,
|
2018-12-05 03:07:27 +00:00
|
|
|
"zone",
|
|
|
|
"zone",
|
2019-06-10 23:07:53 +00:00
|
|
|
config.use_pool_bundle,
|
2018-12-05 03:07:27 +00:00
|
|
|
[])
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_tree(config, io, common_vars,
|
2018-12-05 03:07:27 +00:00
|
|
|
"unit",
|
|
|
|
"unit",
|
2019-06-10 23:07:53 +00:00
|
|
|
config.use_pool_bundle,
|
2018-12-05 03:07:27 +00:00
|
|
|
[])
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_tree(config, io, common_vars,
|
2018-12-05 03:07:27 +00:00
|
|
|
"coll",
|
|
|
|
"coll",
|
2019-06-10 23:07:53 +00:00
|
|
|
# Never use pool bundle for coll, brkitr, or rbnf
|
2018-12-05 03:07:27 +00:00
|
|
|
False,
|
2018-12-06 08:33:10 +00:00
|
|
|
# Depends on timezoneTypes.res and keyTypeData.res.
|
|
|
|
# TODO: We should not need this dependency to build collation.
|
|
|
|
# TODO: Bake keyTypeData.res into the common library?
|
2019-02-16 12:33:20 +00:00
|
|
|
[DepTarget("coll_ucadata"), DepTarget("misc_res"), InFile("unidata/UCARules.txt")])
|
2018-12-05 03:07:27 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_tree(config, io, common_vars,
|
2018-12-05 03:07:27 +00:00
|
|
|
"brkitr",
|
|
|
|
"brkitr",
|
2019-06-10 23:07:53 +00:00
|
|
|
# Never use pool bundle for coll, brkitr, or rbnf
|
2018-12-05 03:07:27 +00:00
|
|
|
False,
|
2018-12-08 04:06:22 +00:00
|
|
|
[DepTarget("brkitr_brk"), DepTarget("dictionaries")])
|
2018-12-05 03:07:27 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
requests += generate_tree(config, io, common_vars,
|
2018-12-05 03:07:27 +00:00
|
|
|
"rbnf",
|
|
|
|
"rbnf",
|
2019-06-10 23:07:53 +00:00
|
|
|
# Never use pool bundle for coll, brkitr, or rbnf
|
2018-12-05 03:07:27 +00:00
|
|
|
False,
|
|
|
|
[])
|
|
|
|
|
|
|
|
requests += [
|
|
|
|
ListRequest(
|
|
|
|
name = "icudata_list",
|
|
|
|
variable_name = "icudata_all_output_files",
|
|
|
|
output_file = TmpFile("icudata.lst"),
|
|
|
|
include_tmp = False
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2018-12-13 07:55:13 +00:00
|
|
|
return requests
|
2018-12-05 03:07:27 +00:00
|
|
|
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_cnvalias(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
# UConv Name Aliases
|
2018-12-05 03:07:27 +00:00
|
|
|
input_file = InFile("mappings/convrtrs.txt")
|
|
|
|
output_file = OutFile("cnvalias.icu")
|
|
|
|
return [
|
|
|
|
SingleExecutionRequest(
|
|
|
|
name = "cnvalias",
|
|
|
|
category = "cnvalias",
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = [],
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = [input_file],
|
|
|
|
output_files = [output_file],
|
|
|
|
tool = IcuTool("gencnval"),
|
|
|
|
args = "-s {IN_DIR} -d {OUT_DIR} "
|
|
|
|
"{INPUT_FILES[0]}",
|
|
|
|
format_with = {}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_confusables(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
# CONFUSABLES
|
2018-12-05 03:07:27 +00:00
|
|
|
txt1 = InFile("unidata/confusables.txt")
|
|
|
|
txt2 = InFile("unidata/confusablesWholeScript.txt")
|
|
|
|
cfu = OutFile("confusables.cfu")
|
|
|
|
return [
|
|
|
|
SingleExecutionRequest(
|
|
|
|
name = "confusables",
|
|
|
|
category = "confusables",
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = [DepTarget("cnvalias")],
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = [txt1, txt2],
|
|
|
|
output_files = [cfu],
|
|
|
|
tool = IcuTool("gencfu"),
|
|
|
|
args = "-d {OUT_DIR} -i {OUT_DIR} "
|
|
|
|
"-c -r {IN_DIR}/{INPUT_FILES[0]} -w {IN_DIR}/{INPUT_FILES[1]} "
|
|
|
|
"-o {OUTPUT_FILES[0]}",
|
|
|
|
format_with = {}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_conversion_mappings(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
# UConv Conversion Table Files
|
2019-11-23 03:09:45 +00:00
|
|
|
input_files = [InFile(filename) for filename in io.glob("mappings/*.ucm")]
|
2018-12-05 03:07:27 +00:00
|
|
|
output_files = [OutFile("%s.cnv" % v.filename[9:-4]) for v in input_files]
|
|
|
|
# TODO: handle BUILD_SPECIAL_CNV_FILES? Means to add --ignore-siso-check flag to makeconv
|
|
|
|
return [
|
|
|
|
RepeatedOrSingleExecutionRequest(
|
|
|
|
name = "conversion_mappings",
|
|
|
|
category = "conversion_mappings",
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = [],
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = input_files,
|
|
|
|
output_files = output_files,
|
|
|
|
tool = IcuTool("makeconv"),
|
|
|
|
args = "-s {IN_DIR} -d {OUT_DIR} -c {INPUT_FILE_PLACEHOLDER}",
|
|
|
|
format_with = {},
|
|
|
|
repeat_with = {
|
|
|
|
"INPUT_FILE_PLACEHOLDER": utils.SpaceSeparatedList(file.filename for file in input_files)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_brkitr_brk(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
# BRK Files
|
2019-11-23 03:09:45 +00:00
|
|
|
input_files = [InFile(filename) for filename in io.glob("brkitr/rules/*.txt")]
|
2018-12-05 03:07:27 +00:00
|
|
|
output_files = [OutFile("brkitr/%s.brk" % v.filename[13:-4]) for v in input_files]
|
|
|
|
return [
|
|
|
|
RepeatedExecutionRequest(
|
|
|
|
name = "brkitr_brk",
|
|
|
|
category = "brkitr_rules",
|
2019-05-30 23:41:44 +00:00
|
|
|
dep_targets = [DepTarget("cnvalias"), DepTarget("ulayout")],
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = input_files,
|
|
|
|
output_files = output_files,
|
|
|
|
tool = IcuTool("genbrk"),
|
|
|
|
args = "-d {OUT_DIR} -i {OUT_DIR} "
|
|
|
|
"-c -r {IN_DIR}/{INPUT_FILE} "
|
|
|
|
"-o {OUTPUT_FILE}",
|
|
|
|
format_with = {},
|
|
|
|
repeat_with = {}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_stringprep(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
# SPP FILES
|
2019-11-23 03:09:45 +00:00
|
|
|
input_files = [InFile(filename) for filename in io.glob("sprep/*.txt")]
|
2018-12-05 03:07:27 +00:00
|
|
|
output_files = [OutFile("%s.spp" % v.filename[6:-4]) for v in input_files]
|
|
|
|
bundle_names = [v.filename[6:-4] for v in input_files]
|
|
|
|
return [
|
|
|
|
RepeatedExecutionRequest(
|
|
|
|
name = "stringprep",
|
|
|
|
category = "stringprep",
|
2019-02-16 12:33:20 +00:00
|
|
|
dep_targets = [InFile("unidata/NormalizationCorrections.txt")],
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = input_files,
|
|
|
|
output_files = output_files,
|
|
|
|
tool = IcuTool("gensprep"),
|
|
|
|
args = "-s {IN_DIR}/sprep -d {OUT_DIR} -i {OUT_DIR} "
|
|
|
|
"-b {BUNDLE_NAME} -m {IN_DIR}/unidata -u 3.2.0 {BUNDLE_NAME}.txt",
|
|
|
|
format_with = {},
|
|
|
|
repeat_with = {
|
|
|
|
"BUNDLE_NAME": bundle_names
|
|
|
|
}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_brkitr_dictionaries(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
# Dict Files
|
2019-11-23 03:09:45 +00:00
|
|
|
input_files = [InFile(filename) for filename in io.glob("brkitr/dictionaries/*.txt")]
|
2018-12-05 03:07:27 +00:00
|
|
|
output_files = [OutFile("brkitr/%s.dict" % v.filename[20:-4]) for v in input_files]
|
|
|
|
extra_options_map = {
|
|
|
|
"brkitr/dictionaries/burmesedict.txt": "--bytes --transform offset-0x1000",
|
|
|
|
"brkitr/dictionaries/cjdict.txt": "--uchars",
|
|
|
|
"brkitr/dictionaries/khmerdict.txt": "--bytes --transform offset-0x1780",
|
|
|
|
"brkitr/dictionaries/laodict.txt": "--bytes --transform offset-0x0e80",
|
|
|
|
"brkitr/dictionaries/thaidict.txt": "--bytes --transform offset-0x0e00"
|
|
|
|
}
|
|
|
|
extra_optionses = [extra_options_map[v.filename] for v in input_files]
|
|
|
|
return [
|
|
|
|
RepeatedExecutionRequest(
|
|
|
|
name = "dictionaries",
|
|
|
|
category = "brkitr_dictionaries",
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = [],
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = input_files,
|
|
|
|
output_files = output_files,
|
|
|
|
tool = IcuTool("gendict"),
|
|
|
|
args = "-i {OUT_DIR} "
|
|
|
|
"-c {EXTRA_OPTIONS} "
|
|
|
|
"{IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}",
|
|
|
|
format_with = {},
|
|
|
|
repeat_with = {
|
|
|
|
"EXTRA_OPTIONS": extra_optionses
|
|
|
|
}
|
|
|
|
)
|
|
|
|
]
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2018-12-05 03:07:27 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_normalization(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
# NRM Files
|
2019-11-23 03:09:45 +00:00
|
|
|
input_files = [InFile(filename) for filename in io.glob("in/*.nrm")]
|
2019-02-28 00:09:17 +00:00
|
|
|
# nfc.nrm is pre-compiled into C++; see generate_full_unicore_data
|
|
|
|
input_files.remove(InFile("in/nfc.nrm"))
|
2018-12-05 03:07:27 +00:00
|
|
|
output_files = [OutFile(v.filename[3:]) for v in input_files]
|
|
|
|
return [
|
|
|
|
RepeatedExecutionRequest(
|
|
|
|
name = "normalization",
|
|
|
|
category = "normalization",
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = [],
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = input_files,
|
|
|
|
output_files = output_files,
|
|
|
|
tool = IcuTool("icupkg"),
|
|
|
|
args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}",
|
|
|
|
format_with = {},
|
|
|
|
repeat_with = {}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_coll_ucadata(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
# Collation Dependency File (ucadata.icu)
|
2018-12-05 03:07:27 +00:00
|
|
|
input_file = InFile("in/coll/ucadata-%s.icu" % config.coll_han_type)
|
|
|
|
output_file = OutFile("coll/ucadata.icu")
|
|
|
|
return [
|
|
|
|
SingleExecutionRequest(
|
|
|
|
name = "coll_ucadata",
|
|
|
|
category = "coll_ucadata",
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = [],
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = [input_file],
|
|
|
|
output_files = [output_file],
|
|
|
|
tool = IcuTool("icupkg"),
|
|
|
|
args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
|
|
|
|
format_with = {}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_full_unicore_data(config, io, common_vars):
|
2019-02-28 00:09:17 +00:00
|
|
|
# The core Unicode properties files (pnames.icu, uprops.icu, ucase.icu, ubidi.icu)
|
|
|
|
# are hardcoded in the common DLL and therefore not included in the data package any more.
|
|
|
|
# They are not built by default but need to be built for ICU4J data,
|
|
|
|
# both in the .jar and in the .dat file (if ICU4J uses the .dat file).
|
|
|
|
# See ICU-4497.
|
|
|
|
if not config.include_uni_core_data:
|
|
|
|
return []
|
|
|
|
|
|
|
|
basenames = [
|
|
|
|
"pnames.icu",
|
|
|
|
"uprops.icu",
|
|
|
|
"ucase.icu",
|
|
|
|
"ubidi.icu",
|
|
|
|
"nfc.nrm"
|
|
|
|
]
|
|
|
|
input_files = [InFile("in/%s" % bn) for bn in basenames]
|
|
|
|
output_files = [OutFile(bn) for bn in basenames]
|
|
|
|
return [
|
|
|
|
RepeatedExecutionRequest(
|
|
|
|
name = "unicore",
|
|
|
|
category = "unicore",
|
|
|
|
input_files = input_files,
|
|
|
|
output_files = output_files,
|
|
|
|
tool = IcuTool("icupkg"),
|
|
|
|
args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}"
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_unames(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
# Unicode Character Names
|
2018-12-05 03:07:27 +00:00
|
|
|
input_file = InFile("in/unames.icu")
|
|
|
|
output_file = OutFile("unames.icu")
|
|
|
|
return [
|
|
|
|
SingleExecutionRequest(
|
|
|
|
name = "unames",
|
|
|
|
category = "unames",
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = [],
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = [input_file],
|
|
|
|
output_files = [output_file],
|
|
|
|
tool = IcuTool("icupkg"),
|
|
|
|
args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
|
2019-02-13 22:35:10 +00:00
|
|
|
format_with = {}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_ulayout(config, io, common_vars):
|
2019-02-13 22:35:10 +00:00
|
|
|
# Unicode text layout properties
|
|
|
|
basename = "ulayout"
|
|
|
|
input_file = InFile("in/%s.icu" % basename)
|
|
|
|
output_file = OutFile("%s.icu" % basename)
|
|
|
|
return [
|
|
|
|
SingleExecutionRequest(
|
|
|
|
name = basename,
|
|
|
|
category = basename,
|
|
|
|
dep_targets = [],
|
|
|
|
input_files = [input_file],
|
|
|
|
output_files = [output_file],
|
|
|
|
tool = IcuTool("icupkg"),
|
|
|
|
args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
|
2018-12-05 03:07:27 +00:00
|
|
|
format_with = {}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_misc(config, io, common_vars):
|
2018-08-25 06:39:10 +00:00
|
|
|
# Misc Data Res Files
|
2019-11-23 03:09:45 +00:00
|
|
|
input_files = [InFile(filename) for filename in io.glob("misc/*.txt")]
|
2018-12-05 03:07:27 +00:00
|
|
|
input_basenames = [v.filename[5:] for v in input_files]
|
|
|
|
output_files = [OutFile("%s.res" % v[:-4]) for v in input_basenames]
|
|
|
|
return [
|
|
|
|
RepeatedExecutionRequest(
|
|
|
|
name = "misc_res",
|
|
|
|
category = "misc",
|
2020-07-10 19:28:22 +00:00
|
|
|
dep_targets = [DepTarget("cnvalias")], # ICU-21175
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = input_files,
|
|
|
|
output_files = output_files,
|
|
|
|
tool = IcuTool("genrb"),
|
|
|
|
args = "-s {IN_DIR}/misc -d {OUT_DIR} -i {OUT_DIR} "
|
|
|
|
"-k -q "
|
|
|
|
"{INPUT_BASENAME}",
|
|
|
|
format_with = {},
|
|
|
|
repeat_with = {
|
|
|
|
"INPUT_BASENAME": input_basenames
|
|
|
|
}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_curr_supplemental(config, io, common_vars):
|
2018-12-05 03:07:27 +00:00
|
|
|
# Currency Supplemental Res File
|
|
|
|
input_file = InFile("curr/supplementalData.txt")
|
|
|
|
input_basename = "supplementalData.txt"
|
|
|
|
output_file = OutFile("curr/supplementalData.res")
|
|
|
|
return [
|
|
|
|
SingleExecutionRequest(
|
|
|
|
name = "curr_supplemental_res",
|
|
|
|
category = "curr_supplemental",
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = [],
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = [input_file],
|
|
|
|
output_files = [output_file],
|
|
|
|
tool = IcuTool("genrb"),
|
|
|
|
args = "-s {IN_DIR}/curr -d {OUT_DIR}/curr -i {OUT_DIR} "
|
|
|
|
"-k "
|
|
|
|
"{INPUT_BASENAME}",
|
|
|
|
format_with = {
|
|
|
|
"INPUT_BASENAME": input_basename
|
|
|
|
}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2020-04-14 00:45:27 +00:00
|
|
|
def generate_zone_supplemental(config, io, common_vars):
|
|
|
|
# tzdbNames Res File
|
|
|
|
input_file = InFile("zone/tzdbNames.txt")
|
|
|
|
input_basename = "tzdbNames.txt"
|
|
|
|
output_file = OutFile("zone/tzdbNames.res")
|
|
|
|
return [
|
|
|
|
SingleExecutionRequest(
|
|
|
|
name = "zone_supplemental_res",
|
|
|
|
category = "zone_supplemental",
|
|
|
|
dep_targets = [],
|
|
|
|
input_files = [input_file],
|
|
|
|
output_files = [output_file],
|
|
|
|
tool = IcuTool("genrb"),
|
|
|
|
args = "-s {IN_DIR}/zone -d {OUT_DIR}/zone -i {OUT_DIR} "
|
|
|
|
"-k "
|
|
|
|
"{INPUT_BASENAME}",
|
|
|
|
format_with = {
|
|
|
|
"INPUT_BASENAME": input_basename
|
|
|
|
}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2019-11-23 03:09:45 +00:00
|
|
|
def generate_translit(config, io, common_vars):
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = [
|
|
|
|
InFile("translit/root.txt"),
|
|
|
|
InFile("translit/en.txt"),
|
|
|
|
InFile("translit/el.txt")
|
|
|
|
]
|
2019-11-23 03:09:45 +00:00
|
|
|
dep_files = set(InFile(filename) for filename in io.glob("translit/*.txt"))
|
2019-02-16 12:33:20 +00:00
|
|
|
dep_files -= set(input_files)
|
2019-03-06 22:33:17 +00:00
|
|
|
dep_files = list(sorted(dep_files))
|
2018-12-05 03:07:27 +00:00
|
|
|
input_basenames = [v.filename[9:] for v in input_files]
|
|
|
|
output_files = [
|
|
|
|
OutFile("translit/%s.res" % v[:-4])
|
|
|
|
for v in input_basenames
|
|
|
|
]
|
|
|
|
return [
|
|
|
|
RepeatedOrSingleExecutionRequest(
|
|
|
|
name = "translit_res",
|
|
|
|
category = "translit",
|
2019-02-16 12:33:20 +00:00
|
|
|
dep_targets = dep_files,
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = input_files,
|
|
|
|
output_files = output_files,
|
|
|
|
tool = IcuTool("genrb"),
|
|
|
|
args = "-s {IN_DIR}/translit -d {OUT_DIR}/translit -i {OUT_DIR} "
|
|
|
|
"-k "
|
|
|
|
"{INPUT_BASENAME}",
|
|
|
|
format_with = {
|
|
|
|
},
|
|
|
|
repeat_with = {
|
|
|
|
"INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def generate_tree(
|
|
|
|
config,
|
2019-11-23 03:09:45 +00:00
|
|
|
io,
|
2018-12-05 03:07:27 +00:00
|
|
|
common_vars,
|
|
|
|
sub_dir,
|
|
|
|
out_sub_dir,
|
|
|
|
use_pool_bundle,
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets):
|
2018-12-05 03:07:27 +00:00
|
|
|
requests = []
|
|
|
|
category = "%s_tree" % sub_dir
|
|
|
|
out_prefix = "%s/" % out_sub_dir if out_sub_dir else ""
|
2019-11-23 03:09:45 +00:00
|
|
|
input_files = [InFile(filename) for filename in io.glob("%s/*.txt" % sub_dir)]
|
2018-12-05 03:07:27 +00:00
|
|
|
if sub_dir == "curr":
|
|
|
|
input_files.remove(InFile("curr/supplementalData.txt"))
|
2020-04-14 00:45:27 +00:00
|
|
|
if sub_dir == "zone":
|
|
|
|
input_files.remove(InFile("zone/tzdbNames.txt"))
|
2018-12-05 03:07:27 +00:00
|
|
|
input_basenames = [v.filename[len(sub_dir)+1:] for v in input_files]
|
|
|
|
output_files = [
|
|
|
|
OutFile("%s%s.res" % (out_prefix, v[:-4]))
|
|
|
|
for v in input_basenames
|
|
|
|
]
|
|
|
|
|
|
|
|
# Generate Pool Bundle
|
|
|
|
if use_pool_bundle:
|
|
|
|
input_pool_files = [OutFile("%spool.res" % out_prefix)]
|
2018-12-06 08:33:10 +00:00
|
|
|
pool_target_name = "%s_pool_write" % sub_dir
|
2018-12-05 03:07:27 +00:00
|
|
|
use_pool_bundle_option = "--usePoolBundle {OUT_DIR}/{OUT_PREFIX}".format(
|
|
|
|
OUT_PREFIX = out_prefix,
|
|
|
|
**common_vars
|
|
|
|
)
|
2018-08-25 06:39:10 +00:00
|
|
|
requests += [
|
2018-12-05 03:07:27 +00:00
|
|
|
SingleExecutionRequest(
|
2018-12-06 08:33:10 +00:00
|
|
|
name = pool_target_name,
|
2018-12-05 03:07:27 +00:00
|
|
|
category = category,
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = dep_targets,
|
2018-08-25 06:39:10 +00:00
|
|
|
input_files = input_files,
|
2018-12-05 03:07:27 +00:00
|
|
|
output_files = input_pool_files,
|
2018-08-25 06:39:10 +00:00
|
|
|
tool = IcuTool("genrb"),
|
2018-12-05 03:07:27 +00:00
|
|
|
args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
|
|
|
|
"--writePoolBundle -k "
|
|
|
|
"{INPUT_BASENAMES_SPACED}",
|
|
|
|
format_with = {
|
|
|
|
"IN_SUB_DIR": sub_dir,
|
|
|
|
"OUT_PREFIX": out_prefix,
|
|
|
|
"INPUT_BASENAMES_SPACED": utils.SpaceSeparatedList(input_basenames)
|
2018-08-25 06:39:10 +00:00
|
|
|
}
|
2018-12-05 03:07:27 +00:00
|
|
|
),
|
2018-08-25 06:39:10 +00:00
|
|
|
]
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = dep_targets + [DepTarget(pool_target_name)]
|
2018-12-05 03:07:27 +00:00
|
|
|
else:
|
|
|
|
use_pool_bundle_option = ""
|
2018-08-25 06:39:10 +00:00
|
|
|
|
2018-12-05 03:07:27 +00:00
|
|
|
# Generate Res File Tree
|
|
|
|
requests += [
|
|
|
|
RepeatedOrSingleExecutionRequest(
|
|
|
|
name = "%s_res" % sub_dir,
|
|
|
|
category = category,
|
2018-12-13 08:56:13 +00:00
|
|
|
dep_targets = dep_targets,
|
2018-12-05 03:07:27 +00:00
|
|
|
input_files = input_files,
|
|
|
|
output_files = output_files,
|
|
|
|
tool = IcuTool("genrb"),
|
|
|
|
args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
|
|
|
|
"{EXTRA_OPTION} -k "
|
|
|
|
"{INPUT_BASENAME}",
|
|
|
|
format_with = {
|
|
|
|
"IN_SUB_DIR": sub_dir,
|
|
|
|
"OUT_PREFIX": out_prefix,
|
|
|
|
"EXTRA_OPTION": use_pool_bundle_option
|
|
|
|
},
|
|
|
|
repeat_with = {
|
|
|
|
"INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
|
|
|
|
}
|
|
|
|
)
|
2018-08-25 06:39:10 +00:00
|
|
|
]
|
|
|
|
|
2019-03-29 15:55:58 +00:00
|
|
|
# Generate res_index file
|
2019-06-05 21:03:55 +00:00
|
|
|
# Exclude the deprecated locale variants and root; see ICU-20628. This
|
|
|
|
# could be data-driven, but we do not want to perform I/O in this script
|
|
|
|
# (for example, we do not want to read from an XML file).
|
|
|
|
excluded_locales = set([
|
|
|
|
"ja_JP_TRADITIONAL",
|
|
|
|
"th_TH_TRADITIONAL",
|
|
|
|
"de_",
|
|
|
|
"de__PHONEBOOK",
|
|
|
|
"es_",
|
|
|
|
"es__TRADITIONAL",
|
|
|
|
"root",
|
|
|
|
])
|
|
|
|
# Put alias locales in a separate structure; see ICU-20627
|
2019-11-23 03:09:45 +00:00
|
|
|
dependency_data = io.read_locale_deps(sub_dir)
|
|
|
|
if "aliases" in dependency_data:
|
|
|
|
alias_locales = set(dependency_data["aliases"].keys())
|
|
|
|
else:
|
|
|
|
alias_locales = set()
|
2019-06-05 21:03:55 +00:00
|
|
|
alias_files = []
|
|
|
|
installed_files = []
|
2019-03-12 01:59:09 +00:00
|
|
|
for f in input_files:
|
2019-06-05 21:03:55 +00:00
|
|
|
file_stem = IndexRequest.locale_file_stem(f)
|
|
|
|
if file_stem in excluded_locales:
|
2019-03-12 01:59:09 +00:00
|
|
|
continue
|
2019-06-05 21:03:55 +00:00
|
|
|
destination = alias_files if file_stem in alias_locales else installed_files
|
|
|
|
destination.append(f)
|
2019-11-23 03:09:45 +00:00
|
|
|
cldr_version = dependency_data["cldrVersion"] if sub_dir == "locales" else None
|
2018-12-05 03:07:27 +00:00
|
|
|
index_file_txt = TmpFile("{IN_SUB_DIR}/{INDEX_NAME}.txt".format(
|
|
|
|
IN_SUB_DIR = sub_dir,
|
|
|
|
**common_vars
|
|
|
|
))
|
|
|
|
index_res_file = OutFile("{OUT_PREFIX}{INDEX_NAME}.res".format(
|
|
|
|
OUT_PREFIX = out_prefix,
|
|
|
|
**common_vars
|
|
|
|
))
|
2019-03-29 15:55:58 +00:00
|
|
|
index_file_target_name = "%s_index_txt" % sub_dir
|
2018-12-05 03:07:27 +00:00
|
|
|
requests += [
|
2019-03-29 15:55:58 +00:00
|
|
|
IndexRequest(
|
|
|
|
name = index_file_target_name,
|
2019-03-06 22:33:17 +00:00
|
|
|
category = category,
|
2019-06-05 21:03:55 +00:00
|
|
|
installed_files = installed_files,
|
|
|
|
alias_files = alias_files,
|
2019-03-29 15:55:58 +00:00
|
|
|
txt_file = index_file_txt,
|
|
|
|
output_file = index_res_file,
|
|
|
|
cldr_version = cldr_version,
|
2018-12-05 03:07:27 +00:00
|
|
|
args = "-s {TMP_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
|
|
|
|
"-k "
|
|
|
|
"{INDEX_NAME}.txt",
|
|
|
|
format_with = {
|
|
|
|
"IN_SUB_DIR": sub_dir,
|
|
|
|
"OUT_PREFIX": out_prefix
|
|
|
|
}
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
return requests
|