Scripts to generate Android.mk for framework Skia.
In order to create Android.mk, run >> python platform_tools/android/bin/gyp_to_android.py For the change in the Android.mk file, see https://googleplex-android-review.git.corp.google.com/#/c/408170/ (SkipBuildbotRuns) BUG=skia:1975 R=djsollen@google.com, epoger@google.com Author: scroggo@google.com Review URL: https://codereview.chromium.org/140503007 git-svn-id: http://skia.googlecode.com/svn/trunk@13344 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
parent
1c188ed9fa
commit
8933563174
79
platform_tools/android/bin/android_framework_gyp.py
Normal file
79
platform_tools/android/bin/android_framework_gyp.py
Normal file
@ -0,0 +1,79 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
Modified version of gyp_skia, used by gyp_to_android.py to generate Android.mk
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
# Unlike gyp_skia, this file is nested deep inside Skia. Find Skia's trunk dir.
|
||||
# This line depends on the fact that the script is three levels deep
|
||||
# (specifically, it is in platform_tools/android/bin).
|
||||
SKIA_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir,
|
||||
os.pardir))
|
||||
dir_contents = os.listdir(SKIA_DIR)
|
||||
assert 'third_party' in dir_contents and 'gyp' in dir_contents
|
||||
|
||||
# Directory within which we can find the gyp source.
|
||||
GYP_SOURCE_DIR = os.path.join(SKIA_DIR, 'third_party', 'externals', 'gyp')
|
||||
|
||||
# Ensure we import our current gyp source's module, not any version
|
||||
# pre-installed in your PYTHONPATH.
|
||||
sys.path.insert(0, os.path.join(GYP_SOURCE_DIR, 'pylib'))
|
||||
|
||||
import gyp
|
||||
|
||||
def main(target_dir, target_file, skia_arch_type, have_neon):
|
||||
"""
|
||||
Create gypd files based on target_file.
|
||||
@param target_dir Directory containing all gyp files, including common.gypi
|
||||
@param target_file Gyp file to start on. Other files within target_dir will
|
||||
be read if target_file depends on them.
|
||||
@param skia_arch_type Target architecture to pass to gyp.
|
||||
@param have_neon Whether to generate files including neon optimizations.
|
||||
Only meaningful if skia_arch_type is 'arm'.
|
||||
@return path Path to root gypd file created by running gyp.
|
||||
"""
|
||||
# Set GYP_DEFINES for building for the android framework.
|
||||
gyp_defines = ('skia_android_framework=1 OS=android skia_arch_type=%s '
|
||||
% skia_arch_type)
|
||||
if skia_arch_type == 'arm':
|
||||
# Always use thumb and version 7 for arm
|
||||
gyp_defines += 'arm_thumb=1 arm_version=7 '
|
||||
if have_neon:
|
||||
gyp_defines += 'arm_neon=1 '
|
||||
else:
|
||||
gyp_defines += 'arm_neon=0 '
|
||||
|
||||
os.environ['GYP_DEFINES'] = gyp_defines
|
||||
|
||||
args = []
|
||||
args.extend(['--depth', '.'])
|
||||
full_path = os.path.join(target_dir, target_file)
|
||||
args.extend([full_path])
|
||||
# Common conditions
|
||||
args.extend(['-I', os.path.join(target_dir, 'common.gypi')])
|
||||
# Use the debugging format. We'll use these to create one master make file.
|
||||
args.extend(['-f', 'gypd'])
|
||||
|
||||
# Off we go...
|
||||
ret = gyp.main(args)
|
||||
|
||||
if ret != 0:
|
||||
raise Exception("gyp failed!")
|
||||
|
||||
# Running gyp should have created a gypd file, with the same name as
|
||||
# full_path but with a 'd' on the end.
|
||||
gypd_file = full_path + 'd'
|
||||
if not os.path.exists(gypd_file):
|
||||
raise Exception("gyp failed to produce gypd file!")
|
||||
|
||||
return gypd_file
|
134
platform_tools/android/bin/gyp_to_android.py
Normal file
134
platform_tools/android/bin/gyp_to_android.py
Normal file
@ -0,0 +1,134 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
Script for generating the Android framework's version of Skia from gyp
|
||||
files.
|
||||
"""
|
||||
|
||||
import android_framework_gyp
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
# Find the top of trunk
|
||||
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
SKIA_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir,
|
||||
os.pardir))
|
||||
|
||||
# Find the directory with our helper files, and add it to the path.
|
||||
GYP_GEN_DIR = os.path.join(SKIA_DIR, 'platform_tools', 'android', 'gyp_gen')
|
||||
sys.path.append(GYP_GEN_DIR)
|
||||
|
||||
import gypd_parser
|
||||
import makefile_writer
|
||||
import variables
|
||||
import vars_dict_lib
|
||||
|
||||
# Folder containing all gyp files and generated gypd files.
|
||||
GYP_FOLDER = 'gyp'
|
||||
|
||||
def clean_gypd_files(folder):
|
||||
"""
|
||||
Remove the gypd files generated by android_framework_gyp.main().
|
||||
@param folder Folder in which to delete all files ending with 'gypd'.
|
||||
"""
|
||||
assert os.path.isdir(folder)
|
||||
files = os.listdir(folder)
|
||||
for f in files:
|
||||
if f.endswith('gypd'):
|
||||
os.remove(os.path.join(folder, f))
|
||||
|
||||
def generate_var_dict(target_dir, target_file, skia_arch_type, have_neon):
|
||||
"""
|
||||
Create a VarsDict for a particular arch type. Each paramater is passed
|
||||
directly to android_framework_gyp.main().
|
||||
@param target_dir Directory containing gyp files.
|
||||
@param target_file Target gyp file.
|
||||
@param skia_arch_type Target architecture.
|
||||
@param have_neon Whether the target should build for neon.
|
||||
@return a VarsDict containing the variable definitions determined by gyp.
|
||||
"""
|
||||
result_file = android_framework_gyp.main(target_dir, target_file,
|
||||
skia_arch_type, have_neon)
|
||||
var_dict = vars_dict_lib.VarsDict()
|
||||
gypd_parser.parse_gypd(var_dict, result_file)
|
||||
clean_gypd_files(target_dir)
|
||||
print '.',
|
||||
return var_dict
|
||||
|
||||
def main(target_dir=None):
|
||||
"""
|
||||
Read gyp files and create Android.mk for the Android framework's
|
||||
external/skia.
|
||||
@param target_dir Directory in which to place 'Android.mk'. If None, the file
|
||||
will be placed in skia's root directory.
|
||||
"""
|
||||
# Create a temporary folder to hold gyp and gypd files. Create it in SKIA_DIR
|
||||
# so that it is a sibling of gyp/, so the relationships between gyp files and
|
||||
# other files (e.g. platform_tools/android/gyp/dependencies.gypi, referenced
|
||||
# by android_deps.gyp as a relative path) is unchanged.
|
||||
# Use mkdtemp to find an unused folder name, but then delete it so copytree
|
||||
# can be called with a non-existent directory.
|
||||
tmp_folder = tempfile.mkdtemp(dir=SKIA_DIR)
|
||||
os.rmdir(tmp_folder)
|
||||
shutil.copytree(os.path.join(SKIA_DIR, GYP_FOLDER), tmp_folder)
|
||||
|
||||
try:
|
||||
main_gyp_file = 'android_framework_lib.gyp'
|
||||
|
||||
print 'Creating Android.mk',
|
||||
|
||||
# Generate a separate VarsDict for each architecture type. For each
|
||||
# archtype:
|
||||
# 1. call android_framework_gyp.main() to generate gypd files
|
||||
# 2. call parse_gypd to read those gypd files into the VarsDict
|
||||
# 3. delete the gypd files
|
||||
#
|
||||
# Once we have the VarsDict for each architecture type, we combine them all
|
||||
# into a single Android.mk file, which can build targets of any
|
||||
# architecture type.
|
||||
|
||||
# The default uses a non-existant archtype, to find all the general
|
||||
# variable definitions.
|
||||
default_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'other',
|
||||
False)
|
||||
arm_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm', False)
|
||||
arm_neon_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm',
|
||||
True)
|
||||
if variables.INCLUDE_X86_OPTS:
|
||||
x86_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'x86', False)
|
||||
else:
|
||||
x86_var_dict = None
|
||||
|
||||
# Compute the intersection of all targets. All the files in the intersection
|
||||
# should be part of the makefile always. Each dict will now contain trimmed
|
||||
# lists containing only variable definitions specific to that configuration.
|
||||
var_dict_list = [default_var_dict, arm_var_dict, arm_neon_var_dict]
|
||||
if variables.INCLUDE_X86_OPTS:
|
||||
var_dict_list.append(x86_var_dict)
|
||||
common = vars_dict_lib.intersect(var_dict_list)
|
||||
|
||||
# Further trim arm_neon_var_dict with arm_var_dict. After this call,
|
||||
# arm_var_dict (which will now be the intersection) includes all definitions
|
||||
# used by both arm and arm + neon, and arm_neon_var_dict will only contain
|
||||
# those specific to arm + neon.
|
||||
arm_var_dict = vars_dict_lib.intersect([arm_var_dict, arm_neon_var_dict])
|
||||
|
||||
makefile_writer.write_android_mk(target_dir=target_dir,
|
||||
common=common,
|
||||
arm=arm_var_dict,
|
||||
arm_neon=arm_neon_var_dict,
|
||||
x86=x86_var_dict,
|
||||
default=default_var_dict)
|
||||
|
||||
finally:
|
||||
shutil.rmtree(tmp_folder)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
122
platform_tools/android/gyp_gen/gypd_parser.py
Normal file
122
platform_tools/android/gyp_gen/gypd_parser.py
Normal file
@ -0,0 +1,122 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
Functions for parsing the gypd output from gyp.
|
||||
"""
|
||||
|
||||
import vars_dict_lib
|
||||
|
||||
def parse_dictionary(var_dict, d, current_target_name):
|
||||
"""
|
||||
Helper function to get the meaningful entries in a dictionary.
|
||||
@param var_dict VarsDict object for storing the results of the parsing.
|
||||
@param d Dictionary object to parse.
|
||||
@param current_target_name The current target being parsed. If this
|
||||
dictionary is a target, this will be its entry
|
||||
'target_name'. Otherwise, this will be the name of
|
||||
the target which contains this dictionary.
|
||||
"""
|
||||
for source in d.get('sources', []):
|
||||
# Compare against a lowercase version, in case files are named .H or .GYPI
|
||||
lowercase_source = source.lower()
|
||||
if lowercase_source.endswith('.h'):
|
||||
# Android.mk does not need the header files.
|
||||
continue
|
||||
if lowercase_source.endswith('gypi'):
|
||||
# The gypi files are included in sources, but the sources they included
|
||||
# are also included. No need to parse them again.
|
||||
continue
|
||||
# The path is relative to the gyp folder, but Android wants the path
|
||||
# relative to the root.
|
||||
source = source.replace('../src', 'src', 1)
|
||||
var_dict.LOCAL_SRC_FILES.add(source)
|
||||
|
||||
for lib in d.get('libraries', []):
|
||||
if lib.endswith('.a'):
|
||||
# Remove the '.a'
|
||||
lib = lib[:-2]
|
||||
# Add 'lib', if necessary
|
||||
if not lib.startswith('lib'):
|
||||
lib = 'lib' + lib
|
||||
var_dict.LOCAL_STATIC_LIBRARIES.add(lib)
|
||||
else:
|
||||
# lib will be in the form of '-l<name>'. Change it to 'lib<name>'
|
||||
lib = lib.replace('-l', 'lib', 1)
|
||||
var_dict.LOCAL_SHARED_LIBRARIES.add(lib)
|
||||
|
||||
for dependency in d.get('dependencies', []):
|
||||
# Each dependency is listed as
|
||||
# <path_to_file>:<target>#target
|
||||
li = dependency.split(':')
|
||||
assert(len(li) <= 2 and len(li) >= 1)
|
||||
sub_targets = []
|
||||
if len(li) == 2 and li[1] != '*':
|
||||
sub_targets.append(li[1].split('#')[0])
|
||||
sub_path = li[0]
|
||||
assert(sub_path.endswith('.gyp'))
|
||||
# Although the original reference is to a .gyp, parse the corresponding
|
||||
# gypd file, which was constructed by gyp.
|
||||
sub_path = sub_path + 'd'
|
||||
parse_gypd(var_dict, sub_path, sub_targets)
|
||||
|
||||
if 'default_configuration' in d:
|
||||
config_name = d['default_configuration']
|
||||
# default_configuration is meaningless without configurations
|
||||
assert('configurations' in d)
|
||||
config = d['configurations'][config_name]
|
||||
parse_dictionary(var_dict, config, current_target_name)
|
||||
|
||||
for flag in d.get('cflags', []):
|
||||
var_dict.LOCAL_CFLAGS.add(flag)
|
||||
for flag in d.get('cflags_cc', []):
|
||||
var_dict.LOCAL_CPPFLAGS.add(flag)
|
||||
|
||||
for include in d.get('include_dirs', []):
|
||||
# The input path will be relative to gyp/, but Android wants relative to
|
||||
# LOCAL_PATH
|
||||
include = include.replace('..', '$(LOCAL_PATH)', 1)
|
||||
# Remove a trailing slash, if present.
|
||||
if include.endswith('/'):
|
||||
include = include[:-1]
|
||||
var_dict.LOCAL_C_INCLUDES.add(include)
|
||||
# For the top level, libskia, include directories should be exported.
|
||||
if current_target_name == 'libskia':
|
||||
var_dict.LOCAL_EXPORT_C_INCLUDE_DIRS.add(include)
|
||||
|
||||
for define in d.get('defines', []):
|
||||
var_dict.LOCAL_CFLAGS.add('-D' + define)
|
||||
|
||||
|
||||
def parse_gypd(var_dict, path, desired_targets=None):
|
||||
"""
|
||||
Parse a gypd file.
|
||||
@param var_dict VarsDict object for storing the result of the parse.
|
||||
@param path Path to gypd file.
|
||||
@param desired_targets List of targets to be parsed from this file. If empty,
|
||||
parse all targets.
|
||||
"""
|
||||
d = {}
|
||||
with open(path, 'r') as f:
|
||||
# Read the entire file as a dictionary
|
||||
d = eval(f.read())
|
||||
|
||||
# The gypd file is structured such that the top level dictionary has an entry
|
||||
# named 'targets'
|
||||
for target in d['targets']:
|
||||
target_name = target['target_name']
|
||||
if target_name in var_dict.KNOWN_TARGETS:
|
||||
# Avoid circular dependencies
|
||||
continue
|
||||
if desired_targets and target_name not in desired_targets:
|
||||
# Our caller does not depend on this one
|
||||
continue
|
||||
# Add it to our known targets so we don't parse it again
|
||||
var_dict.KNOWN_TARGETS.add(target_name)
|
||||
|
||||
parse_dictionary(var_dict, target, target_name)
|
||||
|
175
platform_tools/android/gyp_gen/makefile_writer.py
Normal file
175
platform_tools/android/gyp_gen/makefile_writer.py
Normal file
@ -0,0 +1,175 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
Functions for creating an Android.mk from already created dictionaries.
|
||||
"""
|
||||
|
||||
import os
|
||||
import variables
|
||||
|
||||
def write_group(f, name, items, append):
|
||||
"""
|
||||
Helper function to list all names passed to a variable.
|
||||
@param f File open for writing (Android.mk)
|
||||
@param name Name of the makefile variable (e.g. LOCAL_CFLAGS)
|
||||
@param items list of strings to be passed to the variable.
|
||||
@param append Whether to append to the variable or overwrite it.
|
||||
"""
|
||||
if not items:
|
||||
return
|
||||
|
||||
# Copy the list so we can prepend it with its name.
|
||||
items_to_write = list(items)
|
||||
|
||||
if append:
|
||||
items_to_write.insert(0, '%s +=' % name)
|
||||
else:
|
||||
items_to_write.insert(0, '%s :=' % name)
|
||||
|
||||
f.write(' \\\n\t'.join(items_to_write))
|
||||
|
||||
f.write('\n\n')
|
||||
|
||||
|
||||
def write_local_vars(f, var_dict, append):
|
||||
"""
|
||||
Helper function to write all the members of var_dict to the makefile.
|
||||
@param f File open for writing (Android.mk)
|
||||
@param var_dict VarsDict holding the unique values for one configuration.
|
||||
@param append Whether to append to each makefile variable or overwrite it.
|
||||
"""
|
||||
for key in var_dict.keys():
|
||||
if key == 'LOCAL_CFLAGS':
|
||||
# Always append LOCAL_CFLAGS. This allows us to define some early on in
|
||||
# the makefile and not overwrite them.
|
||||
_append = True
|
||||
elif key == 'KNOWN_TARGETS':
|
||||
# KNOWN_TARGETS are not needed in the final make file.
|
||||
continue
|
||||
else:
|
||||
_append = append
|
||||
write_group(f, key, var_dict[key], _append)
|
||||
|
||||
|
||||
AUTOGEN_WARNING = (
|
||||
"""
|
||||
###############################################################################
|
||||
#
|
||||
# THIS FILE IS AUTOGENERATED BY GYP_TO_ANDROID.PY. DO NOT EDIT.
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
DEBUGGING_HELP = (
|
||||
"""
|
||||
###############################################################################
|
||||
#
|
||||
# PROBLEMS WITH SKIA DEBUGGING?? READ THIS...
|
||||
#
|
||||
# The debug build results in changes to the Skia headers. This means that those
|
||||
# using libskia must also be built with the debug version of the Skia headers.
|
||||
# There are a few scenarios where this comes into play:
|
||||
#
|
||||
# (1) You're building debug code that depends on libskia.
|
||||
# (a) If libskia is built in release, then define SK_RELEASE when building
|
||||
# your sources.
|
||||
# (b) If libskia is built with debugging (see step 2), then no changes are
|
||||
# needed since your sources and libskia have been built with SK_DEBUG.
|
||||
# (2) You're building libskia in debug mode.
|
||||
# (a) RECOMMENDED: You can build the entire system in debug mode. Do this by
|
||||
# updating your build/config.mk to include -DSK_DEBUG on the line that
|
||||
# defines COMMON_GLOBAL_CFLAGS
|
||||
# (b) You can update all the users of libskia to define SK_DEBUG when they are
|
||||
# building their sources.
|
||||
#
|
||||
# NOTE: If neither SK_DEBUG or SK_RELEASE are defined then Skia checks NDEBUG to
|
||||
# determine which build type to use.
|
||||
###############################################################################
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
# TODO (scroggo): Currently write_android_mk has intimate knowledge about its
|
||||
# parameters: e.g. arm_neon keeps track of differences from arm, whereas the
|
||||
# others keep track of differences from common. Consider reworking this.
|
||||
def write_android_mk(target_dir, common, arm, arm_neon, x86, default):
|
||||
"""
|
||||
Given all the variables, write the final make file.
|
||||
@param target_dir The full path to the directory to write Android.mk, or None
|
||||
to use the current working directory.
|
||||
@param common VarsDict holding variables definitions common to all
|
||||
configurations.
|
||||
@param arm VarsDict holding variable definitions unique to arm. Will be
|
||||
written to the makefile inside an 'ifeq ($(TARGET_ARCH), arm)'
|
||||
block.
|
||||
@param arm_neon VarsDict holding variable definitions unique to arm with neon.
|
||||
Will be written inside an 'ifeq ($(ARCH_ARM_HAVE_NEON),true)'
|
||||
block nested inside an 'ifeq ($(TARGET_ARCH), arm)' block.
|
||||
@param x86 VarsDict holding variable definitions unique to x86. Will be
|
||||
written inside an 'ifeq ($(TARGET_ARCH),x86)' block.
|
||||
@param default VarsDict holding variable definitions for an architecture
|
||||
without custom optimizations.
|
||||
TODO: Add mips.
|
||||
"""
|
||||
target_file = 'Android.mk'
|
||||
if target_dir:
|
||||
target_file = os.path.join(target_dir, target_file)
|
||||
with open(target_file, 'w') as f:
|
||||
f.write(AUTOGEN_WARNING)
|
||||
f.write('BASE_PATH := $(call my-dir)\n')
|
||||
f.write('LOCAL_PATH:= $(call my-dir)\n')
|
||||
|
||||
f.write(DEBUGGING_HELP)
|
||||
|
||||
f.write('include $(CLEAR_VARS)\n')
|
||||
|
||||
f.write('LOCAL_ARM_MODE := thumb\n')
|
||||
|
||||
# need a flag to tell the C side when we're on devices with large memory
|
||||
# budgets (i.e. larger than the low-end devices that initially shipped)
|
||||
f.write('ifeq ($(ARCH_ARM_HAVE_VFP),true)\n')
|
||||
f.write('\tLOCAL_CFLAGS += -DANDROID_LARGE_MEMORY_DEVICE\n')
|
||||
f.write('endif\n\n')
|
||||
|
||||
f.write('ifeq ($(TARGET_ARCH),x86)\n')
|
||||
f.write('\tLOCAL_CFLAGS += -DANDROID_LARGE_MEMORY_DEVICE\n')
|
||||
f.write('endif\n\n')
|
||||
|
||||
f.write('# used for testing\n')
|
||||
f.write('#LOCAL_CFLAGS += -g -O0\n\n')
|
||||
|
||||
f.write('ifeq ($(NO_FALLBACK_FONT),true)\n')
|
||||
f.write('\tLOCAL_CFLAGS += -DNO_FALLBACK_FONT\n')
|
||||
f.write('endif\n\n')
|
||||
|
||||
write_local_vars(f, common, False)
|
||||
|
||||
f.write('ifeq ($(TARGET_ARCH),arm)\n')
|
||||
f.write('ifeq ($(ARCH_ARM_HAVE_NEON),true)\n')
|
||||
write_local_vars(f, arm_neon, True)
|
||||
f.write('endif\n\n')
|
||||
write_local_vars(f, arm, True)
|
||||
|
||||
if variables.INCLUDE_X86_OPTS:
|
||||
f.write('else ifeq ($(TARGET_ARCH),x86)\n')
|
||||
write_local_vars(f, x86, True)
|
||||
|
||||
f.write('else\n')
|
||||
write_local_vars(f, default, True)
|
||||
f.write('endif\n\n')
|
||||
|
||||
f.write('include external/stlport/libstlport.mk\n')
|
||||
f.write('LOCAL_MODULE:= libskia\n')
|
||||
f.write('include $(BUILD_SHARED_LIBRARY)\n')
|
||||
|
||||
|
||||
|
11
platform_tools/android/gyp_gen/variables.py
Normal file
11
platform_tools/android/gyp_gen/variables.py
Normal file
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# TODO (scroggo): Currently the x86 specific files are not included. Include
|
||||
# them.
|
||||
INCLUDE_X86_OPTS = False
|
||||
|
133
platform_tools/android/gyp_gen/vars_dict_lib.py
Normal file
133
platform_tools/android/gyp_gen/vars_dict_lib.py
Normal file
@ -0,0 +1,133 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import collections
|
||||
import types
|
||||
|
||||
class OrderedSet(object):
|
||||
"""
|
||||
Ordered set of unique items that supports addition and removal.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.__li = []
|
||||
|
||||
def add(self, item):
|
||||
"""
|
||||
Add item, if it is not already in the set.
|
||||
@param item The item to add.
|
||||
"""
|
||||
if item not in self.__li:
|
||||
self.__li.append(item)
|
||||
|
||||
def __contains__(self, item):
|
||||
"""
|
||||
Whether the set contains item.
|
||||
@param item The item to search for in the set.
|
||||
@return bool Whether the item is in the set.
|
||||
"""
|
||||
return item in self.__li
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterator for the set.
|
||||
"""
|
||||
return self.__li.__iter__()
|
||||
|
||||
def remove(self, item):
|
||||
"""
|
||||
Remove item from the set.
|
||||
@param item Item to be removed.
|
||||
"""
|
||||
return self.__li.remove(item)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Number of items in the set.
|
||||
"""
|
||||
return len(self.__li)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Return item at index.
|
||||
"""
|
||||
return self.__li[index]
|
||||
|
||||
VAR_NAMES = ['LOCAL_CFLAGS',
|
||||
'LOCAL_CPPFLAGS',
|
||||
'LOCAL_SRC_FILES',
|
||||
'LOCAL_SHARED_LIBRARIES',
|
||||
'LOCAL_STATIC_LIBRARIES',
|
||||
'LOCAL_C_INCLUDES',
|
||||
'LOCAL_EXPORT_C_INCLUDE_DIRS',
|
||||
'KNOWN_TARGETS']
|
||||
|
||||
class VarsDict(collections.namedtuple('VarsDict', VAR_NAMES)):
|
||||
"""
|
||||
Custom class for storing the arguments to Android.mk variables. Can be
|
||||
treated as a dictionary with fixed keys.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls):
|
||||
lists = []
|
||||
# TODO (scroggo): Is there a better way add N items?
|
||||
for __unused__ in range(len(VAR_NAMES)):
|
||||
lists.append(OrderedSet())
|
||||
return tuple.__new__(cls, lists)
|
||||
|
||||
def keys(self):
|
||||
"""
|
||||
Return the field names as strings.
|
||||
"""
|
||||
return self._fields
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Return an item, indexed by a number or a string.
|
||||
"""
|
||||
if type(index) == types.IntType:
|
||||
# Treat the index as an array index into a tuple.
|
||||
return tuple.__getitem__(self, index)
|
||||
if type(index) == types.StringType:
|
||||
# Treat the index as a key into a dictionary.
|
||||
return eval('self.%s' % index)
|
||||
return None
|
||||
|
||||
|
||||
def intersect(var_dict_list):
|
||||
"""
|
||||
Find the intersection of a list of VarsDicts and trim each input to its
|
||||
unique entries.
|
||||
@param var_dict_list list of VarsDicts. WARNING: each VarsDict will be
|
||||
modified in place, to remove the common elements!
|
||||
@return VarsDict containing list entries common to all VarsDicts in
|
||||
var_dict_list
|
||||
"""
|
||||
intersection = VarsDict()
|
||||
# First VarsDict
|
||||
var_dict_a = var_dict_list[0]
|
||||
# The rest.
|
||||
other_var_dicts = var_dict_list[1:]
|
||||
|
||||
for key in var_dict_a.keys():
|
||||
# Copy A's list, so we can continue iterating after modifying the original.
|
||||
a_list = list(var_dict_a[key])
|
||||
for item in a_list:
|
||||
# If item is in all lists, add to intersection, and remove from all.
|
||||
in_all_lists = True
|
||||
for var_dict in other_var_dicts:
|
||||
if not item in var_dict[key]:
|
||||
in_all_lists = False
|
||||
break
|
||||
if in_all_lists:
|
||||
intersection[key].add(item)
|
||||
for var_dict in var_dict_list:
|
||||
var_dict[key].remove(item)
|
||||
return intersection
|
||||
|
105
platform_tools/android/tests/gyp_to_android_tests.py
Normal file
105
platform_tools/android/tests/gyp_to_android_tests.py
Normal file
@ -0,0 +1,105 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
Test gyp_to_android.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import test_variables
|
||||
import unittest
|
||||
|
||||
# Path to gyp_to_android
|
||||
sys.path.append(test_variables.BIN_DIR)
|
||||
|
||||
import gyp_to_android
|
||||
|
||||
class AndroidMkCreationTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# Create a temporary directory for storing the output (Android.mk)
|
||||
self.__tmp_dir = tempfile.mkdtemp()
|
||||
|
||||
def test_create(self):
|
||||
gyp_to_android.main(self.__tmp_dir)
|
||||
|
||||
# Now there should be a file named 'Android.mk' inside __tmp_dir
|
||||
path_to_android_mk = os.path.join(self.__tmp_dir, 'Android.mk')
|
||||
self.assertTrue(os.path.exists(path_to_android_mk))
|
||||
|
||||
def tearDown(self):
|
||||
# Remove self.__tmp_dir, which is no longer needed.
|
||||
shutil.rmtree(self.__tmp_dir)
|
||||
|
||||
|
||||
GYPD_SUFFIX = ".gypd"
|
||||
GYP_SUFFIX = ".gyp"
|
||||
GYPI_SUFFIX = ".gypi"
|
||||
OTHER_SUFFIX = ".txt"
|
||||
|
||||
class CleanGypdTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.__tmp_dir = tempfile.mkdtemp()
|
||||
self.__num_files = 10
|
||||
# Fill the dir with four types of files. .gypd files should be deleted by
|
||||
# clean_gypd_files(), while the rest should be left alone.
|
||||
for i in range(self.__num_files):
|
||||
self.create_file('%s%s' % (str(i), GYPD_SUFFIX))
|
||||
self.create_file('%s%s' % (str(i), GYPI_SUFFIX))
|
||||
self.create_file('%s%s' % (str(i), GYP_SUFFIX))
|
||||
self.create_file('%s%s' % (str(i), OTHER_SUFFIX))
|
||||
|
||||
def create_file(self, basename):
|
||||
"""
|
||||
Create a file named 'basename' in self.__tmp_dir.
|
||||
"""
|
||||
f = tempfile.mkstemp(dir=self.__tmp_dir)
|
||||
os.rename(f[1], os.path.join(self.__tmp_dir, basename))
|
||||
self.assert_file_exists(basename)
|
||||
|
||||
def assert_file_exists(self, basename):
|
||||
"""
|
||||
Assert that 'basename' exists in self.__tmp_dir.
|
||||
"""
|
||||
full_name = os.path.join(self.__tmp_dir, basename)
|
||||
self.assertTrue(os.path.exists(full_name))
|
||||
|
||||
def assert_file_does_not_exist(self, basename):
|
||||
"""
|
||||
Assert that 'basename' does not exist in self.__tmp_dir.
|
||||
"""
|
||||
full_name = os.path.join(self.__tmp_dir, basename)
|
||||
self.assertFalse(os.path.exists(full_name))
|
||||
|
||||
def test_clean(self):
|
||||
"""
|
||||
Test that clean_gypd_files() deletes .gypd files, and leaves others.
|
||||
"""
|
||||
gyp_to_android.clean_gypd_files(self.__tmp_dir)
|
||||
for i in range(self.__num_files):
|
||||
self.assert_file_exists('%s%s' % (str(i), GYPI_SUFFIX))
|
||||
self.assert_file_exists('%s%s' % (str(i), GYP_SUFFIX))
|
||||
self.assert_file_exists('%s%s' % (str(i), OTHER_SUFFIX))
|
||||
# Only the GYPD files should have been deleted.
|
||||
self.assert_file_does_not_exist('%s%s' % (str(i), GYPD_SUFFIX))
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.__tmp_dir)
|
||||
|
||||
|
||||
def main():
|
||||
loader = unittest.TestLoader()
|
||||
suite = loader.loadTestsFromTestCase(AndroidMkCreationTest)
|
||||
suite.addTest(loader.loadTestsFromTestCase(CleanGypdTest))
|
||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
69
platform_tools/android/tests/ordered_set_tests.py
Normal file
69
platform_tools/android/tests/ordered_set_tests.py
Normal file
@ -0,0 +1,69 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
Test OrderedSet.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import test_variables
|
||||
import unittest
|
||||
|
||||
sys.path.append(test_variables.GYP_GEN_DIR)
|
||||
|
||||
from vars_dict_lib import OrderedSet
|
||||
|
||||
|
||||
def create_dummy_var(i):
|
||||
return 'dummy_var' + str(i)
|
||||
|
||||
|
||||
class OrderedSetTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.__set = OrderedSet()
|
||||
|
||||
def test_methods(self):
|
||||
"""
|
||||
Test methods on OrderedSet.
|
||||
"""
|
||||
RANGE = 10
|
||||
for i in range(RANGE):
|
||||
dummy_var = create_dummy_var(i)
|
||||
# Add to the list. This should succeed.
|
||||
self.__set.add(dummy_var)
|
||||
self.assertEqual(len(self.__set), i+1)
|
||||
self.assertTrue(dummy_var in self.__set)
|
||||
self.assertEqual(self.__set[i], dummy_var)
|
||||
|
||||
# Now attempt to add it again. This should fail.
|
||||
self.__set.add(dummy_var)
|
||||
self.assertEqual(len(self.__set), i+1)
|
||||
self.assertEqual(self.__set[i], dummy_var)
|
||||
|
||||
# Test iterator.
|
||||
counter = 0
|
||||
for set_member in self.__set:
|
||||
self.assertEqual(create_dummy_var(counter), set_member)
|
||||
counter += 1
|
||||
self.assertEqual(counter, len(self.__set))
|
||||
|
||||
# Now test removal.
|
||||
for i in range(RANGE):
|
||||
dummy_var = create_dummy_var(i)
|
||||
self.__set.remove(dummy_var)
|
||||
self.assertEqual(len(self.__set), RANGE-i-1)
|
||||
self.assertFalse(dummy_var in self.__set)
|
||||
|
||||
def main():
|
||||
loader = unittest.TestLoader()
|
||||
suite = loader.loadTestsFromTestCase(OrderedSetTest)
|
||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
24
platform_tools/android/tests/run_all.py
Normal file
24
platform_tools/android/tests/run_all.py
Normal file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
Run all the tests in platform_tools/android/tests.
|
||||
"""
|
||||
|
||||
import os
|
||||
import unittest
|
||||
|
||||
def main():
|
||||
suite = unittest.TestLoader().discover(os.path.dirname(__file__),
|
||||
pattern='*_tests.py')
|
||||
results = unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
print repr(results)
|
||||
if not results.wasSuccessful():
|
||||
raise Exception('failed one or more unittests')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
22
platform_tools/android/tests/test_variables.py
Normal file
22
platform_tools/android/tests/test_variables.py
Normal file
@ -0,0 +1,22 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
Common variables for tests.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
# Find this file so we can find the python files to test.
|
||||
SCRIPT_DIR = os.path.dirname(__file__)
|
||||
ANDROID_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir))
|
||||
|
||||
# Path to gyp_to_android.
|
||||
BIN_DIR = os.path.join(ANDROID_DIR, 'bin')
|
||||
|
||||
# Path to generator files.
|
||||
GYP_GEN_DIR = os.path.join(ANDROID_DIR, 'gyp_gen')
|
110
platform_tools/android/tests/var_dict_tests.py
Normal file
110
platform_tools/android/tests/var_dict_tests.py
Normal file
@ -0,0 +1,110 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2014 Google Inc.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
Test the VarsDict.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import test_variables
|
||||
import unittest
|
||||
|
||||
sys.path.append(test_variables.GYP_GEN_DIR)
|
||||
|
||||
import vars_dict_lib
|
||||
from vars_dict_lib import OrderedSet
|
||||
from vars_dict_lib import VarsDict
|
||||
from vars_dict_lib import VAR_NAMES
|
||||
|
||||
class VarsDictTest(unittest.TestCase):
|
||||
"""
|
||||
Tests for the VarsDict class.
|
||||
"""
|
||||
|
||||
# May not be needed.
|
||||
def setUp(self):
|
||||
self.__vars_dict = VarsDict()
|
||||
|
||||
def assert_consistency(self, v_dict):
|
||||
self.assertIs(v_dict.LOCAL_CFLAGS, v_dict['LOCAL_CFLAGS'])
|
||||
self.assertIs(v_dict.LOCAL_CPPFLAGS, v_dict['LOCAL_CPPFLAGS'])
|
||||
self.assertIs(v_dict.LOCAL_SRC_FILES, v_dict['LOCAL_SRC_FILES'])
|
||||
self.assertIs(v_dict.LOCAL_SHARED_LIBRARIES,
|
||||
v_dict['LOCAL_SHARED_LIBRARIES'])
|
||||
self.assertIs(v_dict.LOCAL_STATIC_LIBRARIES,
|
||||
v_dict['LOCAL_STATIC_LIBRARIES'])
|
||||
self.assertIs(v_dict.LOCAL_C_INCLUDES, v_dict['LOCAL_C_INCLUDES'])
|
||||
self.assertIs(v_dict.LOCAL_EXPORT_C_INCLUDE_DIRS,
|
||||
v_dict['LOCAL_EXPORT_C_INCLUDE_DIRS'])
|
||||
self.assertIs(v_dict.KNOWN_TARGETS, v_dict['KNOWN_TARGETS'])
|
||||
|
||||
def test_creation(self):
|
||||
v_dict = VarsDict()
|
||||
# VarsDict has one entry for each label in VAR_NAMES
|
||||
self.assertEqual(len(v_dict.keys()), len(VAR_NAMES))
|
||||
for key in v_dict.keys():
|
||||
self.assertIn(key, VAR_NAMES)
|
||||
# Each entry is an empty OrderedSet
|
||||
self.assertIsNotNone(v_dict[key])
|
||||
self.assertIsInstance(v_dict[key], OrderedSet)
|
||||
self.assertEqual(len(v_dict[key]), 0)
|
||||
self.assert_consistency(v_dict)
|
||||
|
||||
def test_intersection(self):
|
||||
v_dict_list = []
|
||||
RANGE = 10
|
||||
for i in range(RANGE):
|
||||
v_dict = VarsDict()
|
||||
# Add something common to each field, as well as a unique entry
|
||||
v_dict.LOCAL_CFLAGS.add('cflag')
|
||||
v_dict.LOCAL_CFLAGS.add(str(i))
|
||||
|
||||
v_dict.LOCAL_CPPFLAGS.add('cppflag')
|
||||
v_dict.LOCAL_CPPFLAGS.add(str(i))
|
||||
|
||||
v_dict.LOCAL_SRC_FILES.add('src')
|
||||
v_dict.LOCAL_SRC_FILES.add(str(i))
|
||||
|
||||
v_dict.LOCAL_SHARED_LIBRARIES.add('shared')
|
||||
v_dict.LOCAL_SHARED_LIBRARIES.add(str(i))
|
||||
|
||||
v_dict.LOCAL_STATIC_LIBRARIES.add('static')
|
||||
v_dict.LOCAL_STATIC_LIBRARIES.add(str(i))
|
||||
|
||||
v_dict.LOCAL_C_INCLUDES.add('includes')
|
||||
v_dict.LOCAL_C_INCLUDES.add(str(i))
|
||||
|
||||
v_dict.LOCAL_EXPORT_C_INCLUDE_DIRS.add('exports')
|
||||
v_dict.LOCAL_EXPORT_C_INCLUDE_DIRS.add(str(i))
|
||||
|
||||
v_dict.KNOWN_TARGETS.add('known')
|
||||
v_dict.KNOWN_TARGETS.add(str(i))
|
||||
|
||||
self.assert_consistency(v_dict)
|
||||
|
||||
v_dict_list.append(v_dict)
|
||||
|
||||
intersection = vars_dict_lib.intersect(v_dict_list)
|
||||
|
||||
self.assert_consistency(intersection)
|
||||
|
||||
for key in intersection.keys():
|
||||
# Each field had one common item
|
||||
self.assertEqual(len(intersection[key]), 1)
|
||||
for item in intersection[key]:
|
||||
for other_v_dict in v_dict_list:
|
||||
self.assertNotIn(item, other_v_dict[key])
|
||||
|
||||
|
||||
def main():
|
||||
loader = unittest.TestLoader()
|
||||
suite = loader.loadTestsFromTestCase(VarsDictTest)
|
||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
Loading…
Reference in New Issue
Block a user