Add pdf to skia.gyp and checkin a snap of GYP.

Review URL: http://codereview.appspot.com/4307050/



git-svn-id: http://skia.googlecode.com/svn/trunk@1029 2bbb7eff-a529-9590-31e7-b0007b416f81
This commit is contained in:
bsalomon@google.com 2011-03-30 22:04:53 +00:00
parent 2022c94ec4
commit 30ca0a69e6
31 changed files with 16693 additions and 5 deletions

View File

@ -25,9 +25,7 @@ import sys
script_dir = os.path.dirname(__file__)
# This assumes that your gyp checkout is placed in the same root directory
# as your skia checkout.
gyp_dir = os.path.normpath(os.path.join(script_dir, os.pardir, os.pardir))
gyp_dir = os.path.normpath(os.path.join(script_dir, os.pardir, 'third_party'))
sys.path.append(os.path.join(gyp_dir, 'gyp', 'pylib'))
import gyp

View File

@ -590,6 +590,47 @@
],
},
},
{
'target_name': 'pdf',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/pdf',
],
'sources': [
'../include/pdf/SkPDFCatalog.h',
'../include/pdf/SkPDFDevice.h',
'../include/pdf/SkPDFDocument.h',
'../include/pdf/SkPDFFont.h',
'../include/pdf/SkPDFFormXObject.h',
'../include/pdf/SkPDFGraphicState.h',
'../include/pdf/SkPDFImage.h',
'../include/pdf/SkPDFPage.h',
'../include/pdf/SkPDFShader.h',
'../include/pdf/SkPDFStream.h',
'../include/pdf/SkPDFTypes.h',
'../include/pdf/SkPDFUtils.h',
'../src/pdf/SkPDFCatalog.cpp',
'../src/pdf/SkPDFDevice.cpp',
'../src/pdf/SkPDFDocument.cpp',
'../src/pdf/SkPDFFont.cpp',
'../src/pdf/SkPDFFormXObject.cpp',
'../src/pdf/SkPDFGraphicState.cpp',
'../src/pdf/SkPDFImage.cpp',
'../src/pdf/SkPDFPage.cpp',
'../src/pdf/SkPDFShader.cpp',
'../src/pdf/SkPDFStream.cpp',
'../src/pdf/SkPDFTypes.cpp',
'../src/pdf/SkPDFUtils.cpp',
],
'direct_dependent_settings': {
'include_dirs': [
'../include/pdf',
],
},
},
{
'target_name': 'utils',
'type': 'static_library',
@ -678,7 +719,6 @@
'../include/views/SkEventSink.h',
'../include/views/SkImageView.h',
'../include/views/SkKey.h',
'../include/views/SkMetaData.h',
'../include/views/SkOSMenu.h',
'../include/views/SkOSWindow_Mac.h',
'../include/views/SkOSWindow_SDL.h',
@ -703,7 +743,6 @@
'../src/views/SkImageView.cpp',
'../src/views/SkListView.cpp',
'../src/views/SkListWidget.cpp',
'../src/views/SkMetaData.cpp',
'../src/views/SkOSMenu.cpp',
'../src/views/SkParsePaint.cpp',
'../src/views/SkProgressBarView.cpp',

5
third_party/gyp/AUTHORS vendored Normal file
View File

@ -0,0 +1,5 @@
# Names should be added to this file like so:
# Name or Organization <email address>
Google Inc.
Steven Knight <knight@baldmt.com>

27
third_party/gyp/LICENSE vendored Normal file
View File

@ -0,0 +1,27 @@
Copyright (c) 2009 Google Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

14
third_party/gyp/README.skia vendored Normal file
View File

@ -0,0 +1,14 @@
This is a snap of GYP (http://code.google.com/p/gyp/) at r197
svn export http://gyp.googlecode.com/svn/trunk/
The following were omitted:
DEPS
MANIFEST
PRESUBMIT.py
codereview.settings
gyp_dummy.c
gyptest.py
test/
samples/
tools/

18
third_party/gyp/gyp vendored Executable file
View File

@ -0,0 +1,18 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
# TODO(mark): sys.path manipulation is some temporary testing stuff.
try:
import gyp
except ImportError, e:
import os.path
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
import gyp
if __name__ == '__main__':
sys.exit(gyp.main(sys.argv[1:]))

5
third_party/gyp/gyp.bat vendored Executable file
View File

@ -0,0 +1,5 @@
@rem Copyright (c) 2009 Google Inc. All rights reserved.
@rem Use of this source code is governed by a BSD-style license that can be
@rem found in the LICENSE file.
@python "%~dp0/gyp" %*

341
third_party/gyp/pylib/gyp/MSVSNew.py vendored Normal file
View File

@ -0,0 +1,341 @@
#!/usr/bin/python2.4
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""New implementation of Visual Studio project generation for SCons."""
import common
import os
import random
import gyp.common
# hashlib is supplied as of Python 2.5 as the replacement interface for md5
# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
# preserving 2.4 compatibility.
try:
import hashlib
_new_md5 = hashlib.md5
except ImportError:
import md5
_new_md5 = md5.new
# Initialize random number generator
random.seed()
# GUIDs for project types
ENTRY_TYPE_GUIDS = {
'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
}
#------------------------------------------------------------------------------
# Helper functions
def MakeGuid(name, seed='msvs_new'):
"""Returns a GUID for the specified target name.
Args:
name: Target name.
seed: Seed for MD5 hash.
Returns:
A GUID-line string calculated from the name and seed.
This generates something which looks like a GUID, but depends only on the
name and seed. This means the same name/seed will always generate the same
GUID, so that projects and solutions which refer to each other can explicitly
determine the GUID to refer to explicitly. It also means that the GUID will
not change when the project for a target is rebuilt.
"""
# Calculate a MD5 signature for the seed and name.
d = _new_md5(str(seed) + str(name)).hexdigest().upper()
# Convert most of the signature to GUID form (discard the rest)
guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
+ '-' + d[20:32] + '}')
return guid
#------------------------------------------------------------------------------
class MSVSFolder:
"""Folder in a Visual Studio project or solution."""
def __init__(self, path, name = None, entries = None,
guid = None, items = None):
"""Initializes the folder.
Args:
path: Full path to the folder.
name: Name of the folder.
entries: List of folder entries to nest inside this folder. May contain
Folder or Project objects. May be None, if the folder is empty.
guid: GUID to use for folder, if not None.
items: List of solution items to include in the folder project. May be
None, if the folder does not directly contain items.
"""
if name:
self.name = name
else:
# Use last layer.
self.name = os.path.basename(path)
self.path = path
self.guid = guid
# Copy passed lists (or set to empty lists)
self.entries = list(entries or [])
self.items = list(items or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
def get_guid(self):
if self.guid is None:
# Use consistent guids for folders (so things don't regenerate).
self.guid = MakeGuid(self.path, seed='msvs_folder')
return self.guid
#------------------------------------------------------------------------------
class MSVSProject:
"""Visual Studio project."""
def __init__(self, path, name = None, dependencies = None, guid = None,
spec = None, build_file = None, config_platform_overrides = None,
fixpath_prefix = None):
"""Initializes the project.
Args:
path: Absolute path to the project file.
name: Name of project. If None, the name will be the same as the base
name of the project file.
dependencies: List of other Project objects this project is dependent
upon, if not None.
guid: GUID to use for project, if not None.
spec: Dictionary specifying how to build this project.
build_file: Filename of the .gyp file that the vcproj file comes from.
config_platform_overrides: optional dict of configuration platforms to
used in place of the default for this target.
fixpath_prefix: the path used to adjust the behavior of _fixpath
"""
self.path = path
self.guid = guid
self.spec = spec
self.build_file = build_file
# Use project filename if name not specified
self.name = name or os.path.splitext(os.path.basename(path))[0]
# Copy passed lists (or set to empty lists)
self.dependencies = list(dependencies or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
if config_platform_overrides:
self.config_platform_overrides = config_platform_overrides
else:
self.config_platform_overrides = {}
self.fixpath_prefix = fixpath_prefix
def set_dependencies(self, dependencies):
self.dependencies = list(dependencies or [])
def get_guid(self):
if self.guid is None:
# Set GUID from path
# TODO(rspangler): This is fragile.
# 1. We can't just use the project filename sans path, since there could
# be multiple projects with the same base name (for example,
# foo/unittest.vcproj and bar/unittest.vcproj).
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
# GUID is the same whether it's included from base/base.sln or
# foo/bar/baz/baz.sln.
# 3. The GUID needs to be the same each time this builder is invoked, so
# that we don't need to rebuild the solution when the project changes.
# 4. We should be able to handle pre-built project files by reading the
# GUID from the files.
self.guid = MakeGuid(self.name)
return self.guid
#------------------------------------------------------------------------------
class MSVSSolution:
"""Visual Studio solution."""
def __init__(self, path, version, entries=None, variants=None,
websiteProperties=True):
"""Initializes the solution.
Args:
path: Path to solution file.
version: Format version to emit.
entries: List of entries in solution. May contain Folder or Project
objects. May be None, if the folder is empty.
variants: List of build variant strings. If none, a default list will
be used.
websiteProperties: Flag to decide if the website properties section
is generated.
"""
self.path = path
self.websiteProperties = websiteProperties
self.version = version
# Copy passed lists (or set to empty lists)
self.entries = list(entries or [])
if variants:
# Copy passed list
self.variants = variants[:]
else:
# Use default
self.variants = ['Debug|Win32', 'Release|Win32']
# TODO(rspangler): Need to be able to handle a mapping of solution config
# to project config. Should we be able to handle variants being a dict,
# or add a separate variant_map variable? If it's a dict, we can't
# guarantee the order of variants since dict keys aren't ordered.
# TODO(rspangler): Automatically write to disk for now; should delay until
# node-evaluation time.
self.Write()
def Write(self, writer=common.WriteOnDiff):
"""Writes the solution file to disk.
Raises:
IndexError: An entry appears multiple times.
"""
# Walk the entry tree and collect all the folders and projects.
all_entries = []
entries_to_check = self.entries[:]
while entries_to_check:
# Pop from the beginning of the list to preserve the user's order.
e = entries_to_check.pop(0)
# A project or folder can only appear once in the solution's folder tree.
# This also protects from cycles.
if e in all_entries:
#raise IndexError('Entry "%s" appears more than once in solution' %
# e.name)
continue
all_entries.append(e)
# If this is a folder, check its entries too.
if isinstance(e, MSVSFolder):
entries_to_check += e.entries
# Sort by name then guid (so things are in order on vs2008).
def NameThenGuid(a, b):
if a.name < b.name: return -1
if a.name > b.name: return 1
if a.get_guid() < b.get_guid(): return -1
if a.get_guid() > b.get_guid(): return 1
return 0
all_entries = sorted(all_entries, NameThenGuid)
# Open file and print header
f = writer(self.path)
f.write('Microsoft Visual Studio Solution File, '
'Format Version %s\r\n' % self.version.SolutionVersion())
f.write('# %s\r\n' % self.version.Description())
# Project entries
sln_root = os.path.split(self.path)[0]
for e in all_entries:
relative_path = gyp.common.RelativePath(e.path, sln_root)
f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
e.entry_type_guid, # Entry type GUID
e.name, # Folder name
relative_path.replace('/', '\\'), # Folder name (again)
e.get_guid(), # Entry GUID
))
# TODO(rspangler): Need a way to configure this stuff
if self.websiteProperties:
f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
'\tEndProjectSection\r\n')
if isinstance(e, MSVSFolder):
if e.items:
f.write('\tProjectSection(SolutionItems) = preProject\r\n')
for i in e.items:
f.write('\t\t%s = %s\r\n' % (i, i))
f.write('\tEndProjectSection\r\n')
if isinstance(e, MSVSProject):
if e.dependencies:
f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
for d in e.dependencies:
f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
f.write('\tEndProjectSection\r\n')
f.write('EndProject\r\n')
# Global section
f.write('Global\r\n')
# Configurations (variants)
f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
for v in self.variants:
f.write('\t\t%s = %s\r\n' % (v, v))
f.write('\tEndGlobalSection\r\n')
# Sort config guids for easier diffing of solution changes.
config_guids = []
config_guids_overrides = {}
for e in all_entries:
if isinstance(e, MSVSProject):
config_guids.append(e.get_guid())
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
config_guids.sort()
f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
for g in config_guids:
for v in self.variants:
nv = config_guids_overrides[g].get(v, v)
# Pick which project configuration to build for this solution
# configuration.
f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
g, # Project GUID
v, # Solution build configuration
nv, # Project build config for that solution config
))
# Enable project in this solution configuration.
f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
g, # Project GUID
v, # Solution build configuration
nv, # Project build config for that solution config
))
f.write('\tEndGlobalSection\r\n')
# TODO(rspangler): Should be able to configure this stuff too (though I've
# never seen this be any different)
f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
f.write('\t\tHideSolutionNode = FALSE\r\n')
f.write('\tEndGlobalSection\r\n')
# Folder mappings
# TODO(rspangler): Should omit this section if there are no folders
f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
for e in all_entries:
if not isinstance(e, MSVSFolder):
continue # Does not apply to projects, only folders
for subentry in e.entries:
f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
f.write('\tEndGlobalSection\r\n')
f.write('EndGlobal\r\n')
f.close()

245
third_party/gyp/pylib/gyp/MSVSProject.py vendored Normal file
View File

@ -0,0 +1,245 @@
#!/usr/bin/python2.4
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio project reader/writer."""
import common
import xml.dom
import xml_fix
#------------------------------------------------------------------------------
class Tool(object):
"""Visual Studio tool."""
def __init__(self, name, attrs=None):
"""Initializes the tool.
Args:
name: Tool name.
attrs: Dict of tool attributes; may be None.
"""
self.name = name
self.attrs = attrs or {}
def CreateElement(self, doc):
"""Creates an element for the tool.
Args:
doc: xml.dom.Document object to use for node creation.
Returns:
A new xml.dom.Element for the tool.
"""
node = doc.createElement('Tool')
node.setAttribute('Name', self.name)
for k, v in self.attrs.items():
node.setAttribute(k, v)
return node
class Filter(object):
"""Visual Studio filter - that is, a virtual folder."""
def __init__(self, name, contents=None):
"""Initializes the folder.
Args:
name: Filter (folder) name.
contents: List of filenames and/or Filter objects contained.
"""
self.name = name
self.contents = list(contents or [])
#------------------------------------------------------------------------------
class Writer(object):
"""Visual Studio XML project writer."""
def __init__(self, project_path, version):
"""Initializes the project.
Args:
project_path: Path to the project file.
version: Format version to emit.
"""
self.project_path = project_path
self.doc = None
self.version = version
def Create(self, name, guid=None, platforms=None):
"""Creates the project document.
Args:
name: Name of the project.
guid: GUID to use for project, if not None.
"""
self.name = name
self.guid = guid
# Default to Win32 for platforms.
if not platforms:
platforms = ['Win32']
# Create XML doc
xml_impl = xml.dom.getDOMImplementation()
self.doc = xml_impl.createDocument(None, 'VisualStudioProject', None)
# Add attributes to root element
self.n_root = self.doc.documentElement
self.n_root.setAttribute('ProjectType', 'Visual C++')
self.n_root.setAttribute('Version', self.version.ProjectVersion())
self.n_root.setAttribute('Name', self.name)
self.n_root.setAttribute('ProjectGUID', self.guid)
self.n_root.setAttribute('RootNamespace', self.name)
self.n_root.setAttribute('Keyword', 'Win32Proj')
# Add platform list
n_platform = self.doc.createElement('Platforms')
self.n_root.appendChild(n_platform)
for platform in platforms:
n = self.doc.createElement('Platform')
n.setAttribute('Name', platform)
n_platform.appendChild(n)
# Add tool files section
self.n_tool_files = self.doc.createElement('ToolFiles')
self.n_root.appendChild(self.n_tool_files)
# Add configurations section
self.n_configs = self.doc.createElement('Configurations')
self.n_root.appendChild(self.n_configs)
# Add empty References section
self.n_root.appendChild(self.doc.createElement('References'))
# Add files section
self.n_files = self.doc.createElement('Files')
self.n_root.appendChild(self.n_files)
# Keep a dict keyed on filename to speed up access.
self.n_files_dict = dict()
# Add empty Globals section
self.n_root.appendChild(self.doc.createElement('Globals'))
def AddToolFile(self, path):
"""Adds a tool file to the project.
Args:
path: Relative path from project to tool file.
"""
n_tool = self.doc.createElement('ToolFile')
n_tool.setAttribute('RelativePath', path)
self.n_tool_files.appendChild(n_tool)
def _AddConfigToNode(self, parent, config_type, config_name, attrs=None,
tools=None):
"""Adds a configuration to the parent node.
Args:
parent: Destination node.
config_type: Type of configuration node.
config_name: Configuration name.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
"""
# Handle defaults
if not attrs:
attrs = {}
if not tools:
tools = []
# Add configuration node and its attributes
n_config = self.doc.createElement(config_type)
n_config.setAttribute('Name', config_name)
for k, v in attrs.items():
n_config.setAttribute(k, v)
parent.appendChild(n_config)
# Add tool nodes and their attributes
if tools:
for t in tools:
if isinstance(t, Tool):
n_config.appendChild(t.CreateElement(self.doc))
else:
n_config.appendChild(Tool(t).CreateElement(self.doc))
def AddConfig(self, name, attrs=None, tools=None):
"""Adds a configuration to the project.
Args:
name: Configuration name.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
"""
self._AddConfigToNode(self.n_configs, 'Configuration', name, attrs, tools)
def _AddFilesToNode(self, parent, files):
"""Adds files and/or filters to the parent node.
Args:
parent: Destination node
files: A list of Filter objects and/or relative paths to files.
Will call itself recursively, if the files list contains Filter objects.
"""
for f in files:
if isinstance(f, Filter):
node = self.doc.createElement('Filter')
node.setAttribute('Name', f.name)
self._AddFilesToNode(node, f.contents)
else:
node = self.doc.createElement('File')
node.setAttribute('RelativePath', f)
self.n_files_dict[f] = node
parent.appendChild(node)
def AddFiles(self, files):
"""Adds files to the project.
Args:
files: A list of Filter objects and/or relative paths to files.
This makes a copy of the file/filter tree at the time of this call. If you
later add files to a Filter object which was passed into a previous call
to AddFiles(), it will not be reflected in this project.
"""
self._AddFilesToNode(self.n_files, files)
# TODO(rspangler) This also doesn't handle adding files to an existing
# filter. That is, it doesn't merge the trees.
def AddFileConfig(self, path, config, attrs=None, tools=None):
"""Adds a configuration to a file.
Args:
path: Relative path to the file.
config: Name of configuration to add.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
Raises:
ValueError: Relative path does not match any file added via AddFiles().
"""
# Find the file node with the right relative path
parent = self.n_files_dict.get(path)
if not parent:
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
# Add the config to the file node
self._AddConfigToNode(parent, 'FileConfiguration', config, attrs, tools)
def Write(self, writer=common.WriteOnDiff):
"""Writes the project file."""
f = writer(self.project_path)
fix = xml_fix.XmlFix()
self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
fix.Cleanup()
f.close()
#------------------------------------------------------------------------------

1034
third_party/gyp/pylib/gyp/MSVSSettings.py vendored Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,81 @@
#!/usr/bin/python2.4
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio project reader/writer."""
import common
import xml.dom
import xml_fix
#------------------------------------------------------------------------------
class Writer(object):
"""Visual Studio XML tool file writer."""
def __init__(self, tool_file_path):
"""Initializes the tool file.
Args:
tool_file_path: Path to the tool file.
"""
self.tool_file_path = tool_file_path
self.doc = None
def Create(self, name):
"""Creates the tool file document.
Args:
name: Name of the tool file.
"""
self.name = name
# Create XML doc
xml_impl = xml.dom.getDOMImplementation()
self.doc = xml_impl.createDocument(None, 'VisualStudioToolFile', None)
# Add attributes to root element
self.n_root = self.doc.documentElement
self.n_root.setAttribute('Version', '8.00')
self.n_root.setAttribute('Name', self.name)
# Add rules section
self.n_rules = self.doc.createElement('Rules')
self.n_root.appendChild(self.n_rules)
def AddCustomBuildRule(self, name, cmd, description,
additional_dependencies,
outputs, extensions):
"""Adds a rule to the tool file.
Args:
name: Name of the rule.
description: Description of the rule.
cmd: Command line of the rule.
additional_dependencies: other files which may trigger the rule.
outputs: outputs of the rule.
extensions: extensions handled by the rule.
"""
n_rule = self.doc.createElement('CustomBuildRule')
n_rule.setAttribute('Name', name)
n_rule.setAttribute('ExecutionDescription', description)
n_rule.setAttribute('CommandLine', cmd)
n_rule.setAttribute('Outputs', ';'.join(outputs))
n_rule.setAttribute('FileExtensions', ';'.join(extensions))
n_rule.setAttribute('AdditionalDependencies',
';'.join(additional_dependencies))
self.n_rules.appendChild(n_rule)
def Write(self, writer=common.WriteOnDiff):
"""Writes the tool file."""
f = writer(self.tool_file_path)
fix = xml_fix.XmlFix()
self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
fix.Cleanup()
f.close()
#------------------------------------------------------------------------------

View File

@ -0,0 +1,182 @@
#!/usr/bin/python2.4
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio user preferences file writer."""
import common
import os
import re
import socket # for gethostname
import xml.dom
import xml_fix
#------------------------------------------------------------------------------
def _FindCommandInPath(command):
"""If there are no slashes in the command given, this function
searches the PATH env to find the given command, and converts it
to an absolute path. We have to do this because MSVS is looking
for an actual file to launch a debugger on, not just a command
line. Note that this happens at GYP time, so anything needing to
be built needs to have a full path."""
if '/' in command or '\\' in command:
# If the command already has path elements (either relative or
# absolute), then assume it is constructed properly.
return command
else:
# Search through the path list and find an existing file that
# we can access.
paths = os.environ.get('PATH','').split(os.pathsep)
for path in paths:
item = os.path.join(path, command)
if os.path.isfile(item) and os.access(item, os.X_OK):
return item
return command
def _QuoteWin32CommandLineArgs(args):
new_args = []
for arg in args:
# Replace all double-quotes with double-double-quotes to escape
# them for cmd shell, and then quote the whole thing if there
# are any.
if arg.find('"') != -1:
arg = '""'.join(arg.split('"'))
arg = '"%s"' % arg
# Otherwise, if there are any spaces, quote the whole arg.
elif re.search(r'[ \t\n]', arg):
arg = '"%s"' % arg
new_args.append(arg)
return new_args
class Writer(object):
"""Visual Studio XML user user file writer."""
def __init__(self, user_file_path, version):
"""Initializes the user file.
Args:
user_file_path: Path to the user file.
"""
self.user_file_path = user_file_path
self.version = version
self.doc = None
def Create(self, name):
"""Creates the user file document.
Args:
name: Name of the user file.
"""
self.name = name
# Create XML doc
xml_impl = xml.dom.getDOMImplementation()
self.doc = xml_impl.createDocument(None, 'VisualStudioUserFile', None)
# Add attributes to root element
self.n_root = self.doc.documentElement
self.n_root.setAttribute('Version', self.version.ProjectVersion())
self.n_root.setAttribute('Name', self.name)
# Add configurations section
self.n_configs = self.doc.createElement('Configurations')
self.n_root.appendChild(self.n_configs)
def _AddConfigToNode(self, parent, config_type, config_name):
"""Adds a configuration to the parent node.
Args:
parent: Destination node.
config_type: Type of configuration node.
config_name: Configuration name.
"""
# Add configuration node and its attributes
n_config = self.doc.createElement(config_type)
n_config.setAttribute('Name', config_name)
parent.appendChild(n_config)
def AddConfig(self, name):
"""Adds a configuration to the project.
Args:
name: Configuration name.
"""
self._AddConfigToNode(self.n_configs, 'Configuration', name)
def AddDebugSettings(self, config_name, command, environment = {},
working_directory=""):
"""Adds a DebugSettings node to the user file for a particular config.
Args:
command: command line to run. First element in the list is the
executable. All elements of the command will be quoted if
necessary.
working_directory: other files which may trigger the rule. (optional)
"""
command = _QuoteWin32CommandLineArgs(command)
n_cmd = self.doc.createElement('DebugSettings')
abs_command = _FindCommandInPath(command[0])
n_cmd.setAttribute('Command', abs_command)
n_cmd.setAttribute('WorkingDirectory', working_directory)
n_cmd.setAttribute('CommandArguments', " ".join(command[1:]))
n_cmd.setAttribute('RemoteMachine', socket.gethostname())
if environment and isinstance(environment, dict):
n_cmd.setAttribute('Environment',
" ".join(['%s="%s"' % (key, val)
for (key,val) in environment.iteritems()]))
else:
n_cmd.setAttribute('Environment', '')
n_cmd.setAttribute('EnvironmentMerge', 'true')
# Currently these are all "dummy" values that we're just setting
# in the default manner that MSVS does it. We could use some of
# these to add additional capabilities, I suppose, but they might
# not have parity with other platforms then.
n_cmd.setAttribute('Attach', 'false')
n_cmd.setAttribute('DebuggerType', '3') # 'auto' debugger
n_cmd.setAttribute('Remote', '1')
n_cmd.setAttribute('RemoteCommand', '')
n_cmd.setAttribute('HttpUrl', '')
n_cmd.setAttribute('PDBPath', '')
n_cmd.setAttribute('SQLDebugging', '')
n_cmd.setAttribute('DebuggerFlavor', '0')
n_cmd.setAttribute('MPIRunCommand', '')
n_cmd.setAttribute('MPIRunArguments', '')
n_cmd.setAttribute('MPIRunWorkingDirectory', '')
n_cmd.setAttribute('ApplicationCommand', '')
n_cmd.setAttribute('ApplicationArguments', '')
n_cmd.setAttribute('ShimCommand', '')
n_cmd.setAttribute('MPIAcceptMode', '')
n_cmd.setAttribute('MPIAcceptFilter', '')
# Find the config, and add it if it doesn't exist.
found = False
for config in self.n_configs.childNodes:
if config.getAttribute("Name") == config_name:
found = True
if not found:
self.AddConfig(config_name)
# Add the DebugSettings onto the appropriate config.
for config in self.n_configs.childNodes:
if config.getAttribute("Name") == config_name:
config.appendChild(n_cmd)
break
def Write(self, writer=common.WriteOnDiff):
"""Writes the user file."""
f = writer(self.user_file_path)
self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
f.close()
#------------------------------------------------------------------------------

199
third_party/gyp/pylib/gyp/MSVSVersion.py vendored Executable file
View File

@ -0,0 +1,199 @@
#!/usr/bin/python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Handle version information related to Visual Stuio."""
import os
import re
import subprocess
import sys
class VisualStudioVersion:
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
self.project_version = project_version
self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj
def ShortName(self):
return self.short_name
def Description(self):
"""Get the full description of the version."""
return self.description
def SolutionVersion(self):
"""Get the version number of the sln files."""
return self.solution_version
def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version
def FlatSolution(self):
return self.flat_sln
def UsesVcxproj(self):
"""Returns true if this version uses a vcxproj file."""
return self.uses_vcxproj
def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def _RegistryGetValue(key, value):
"""Use reg.exe to read a paricular key.
While ideally we might use the win32 module, we would like gyp to be
python neutral, so for instance cygwin python lacks this module.
Arguments:
key: The registry key to read from.
value: The particular value to read.
Return:
The contents there, or None for failure.
"""
# Skip if not on Windows.
if sys.platform not in ('win32', 'cygwin'):
return None
# Run reg.exe.
cmd = [os.path.join(os.environ.get('WINDIR', ''), 'System32', 'reg.exe'),
'query', key, '/v', value]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
text = p.communicate()[0]
# Require a successful return value.
if p.returncode:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1)
def _RegistryKeyExists(key):
"""Use reg.exe to see if a key exists.
Args:
key: The registry key to check.
Return:
True if the key exists
"""
# Skip if not on Windows.
if sys.platform not in ('win32', 'cygwin'):
return None
# Run reg.exe.
cmd = [os.path.join(os.environ.get('WINDIR', ''), 'System32', 'reg.exe'),
'query', key]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p.returncode == 0
def _CreateVersion(name):
versions = {
'2010': VisualStudioVersion('2010',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True),
'2008': VisualStudioVersion('2008',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=False,
uses_vcxproj=False),
'2008e': VisualStudioVersion('2008e',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=True,
uses_vcxproj=False),
'2005': VisualStudioVersion('2005',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=False,
uses_vcxproj=False),
'2005e': VisualStudioVersion('2005e',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=True,
uses_vcxproj=False),
}
return versions[str(name)]
def _DetectVisualStudioVersions():
"""Collect the list of installed visual studio versions.
Returns:
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
Only versions 8-10 are considered.
Possibilities are:
2005 - Visual Studio 2005 (8)
2008 - Visual Studio 2008 (9)
2010 - Visual Studio 2010 (10)
"""
version_to_year = {'8.0': '2005', '9.0': '2008', '10.0': '2010'}
versions = []
# For now, prefer versions before VS2010
for version in ('9.0', '8.0', '10.0'):
# Check if VS2010 and later is installed as specified by
# http://msdn.microsoft.com/en-us/library/bb164659.aspx
key32 = r'HKLM\SOFTWARE\Microsoft\DevDiv\VS\Servicing\%s' % version
key64 = r'HKLM\SOFTWARE\Wow6432Node\Microsoft\DevDiv\VS\Servicing\%sD' % (
version)
if _RegistryKeyExists(key32) or _RegistryKeyExists(key64):
# Add this one.
# TODO(jeanluc) This does not check for an express version.
versions.append(_CreateVersion(version_to_year[version]))
continue
# Get the install dir for this version.
key = r'HKLM\Software\Microsoft\VisualStudio\%s' % version
path = _RegistryGetValue(key, 'InstallDir')
if not path:
continue
# Check for full.
if os.path.exists(os.path.join(path, 'devenv.exe')):
# Add this one.
versions.append(_CreateVersion(version_to_year[version]))
# Check for express.
elif os.path.exists(os.path.join(path, 'vcexpress.exe')):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e'))
return versions
def SelectVisualStudioVersion(version='auto'):
"""Select which version of Visual Studio projects to generate.
Arguments:
version: Hook to allow caller to force a particular version (vs auto).
Returns:
An object representing a visual studio project format version.
"""
# In auto mode, check environment variable for override.
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
# In auto mode, pick the most preferred version present.
if version == 'auto':
versions = _DetectVisualStudioVersions()
if not versions:
# Default to 2005.
return _CreateVersion('2005')
return versions[0]
# Convert version string into a version object.
return _CreateVersion(version)

200
third_party/gyp/pylib/gyp/SCons.py vendored Normal file
View File

@ -0,0 +1,200 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
SCons generator.
This contains class definitions and supporting functions for generating
pieces of SCons files for the different types of GYP targets.
"""
import os
def WriteList(fp, list, prefix='',
separator=',\n ',
preamble=None,
postamble=None):
fp.write(preamble or '')
fp.write((separator or ' ').join([prefix + l for l in list]))
fp.write(postamble or '')
class TargetBase(object):
"""
Base class for a SCons representation of a GYP target.
"""
is_ignored = False
target_prefix = ''
target_suffix = ''
def __init__(self, spec):
self.spec = spec
def full_product_name(self):
"""
Returns the full name of the product being built:
* Uses 'product_name' if it's set, else prefix + 'target_name'.
* Prepends 'product_dir' if set.
* Appends SCons suffix variables for the target type (or
product_extension).
"""
suffix = self.target_suffix
product_extension = self.spec.get('product_extension')
if product_extension:
suffix = '.' + product_extension
prefix = self.spec.get('product_prefix', self.target_prefix)
name = self.spec['target_name']
name = prefix + self.spec.get('product_name', name) + suffix
product_dir = self.spec.get('product_dir')
if product_dir:
name = os.path.join(product_dir, name)
else:
name = os.path.join(self.out_dir, name)
return name
def write_input_files(self, fp):
"""
Writes the definition of the input files (sources).
"""
sources = self.spec.get('sources')
if not sources:
fp.write('\ninput_files = []\n')
return
preamble = '\ninput_files = [\n '
postamble = ',\n]\n'
WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble)
def builder_call(self):
"""
Returns the actual SCons builder call to build this target.
"""
name = self.full_product_name()
return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name)
def write_target(self, fp, src_dir='', pre=''):
"""
Writes the lines necessary to build this target.
"""
fp.write('\n' + pre)
fp.write('_outputs = %s\n' % self.builder_call())
fp.write('target_files.extend(_outputs)\n')
class NoneTarget(TargetBase):
"""
A GYP target type of 'none', implicitly or explicitly.
"""
def write_target(self, fp, pre=''):
fp.write('\ntarget_files.extend(input_files)\n')
class SettingsTarget(TargetBase):
"""
A GYP target type of 'settings'.
"""
is_ignored = True
compilable_sources_template = """
_result = []
for infile in input_files:
if env.compilable(infile):
if (type(infile) == type('')
and (infile.startswith(%(src_dir)r)
or not os.path.isabs(env.subst(infile)))):
# Force files below the build directory by replacing all '..'
# elements in the path with '__':
base, ext = os.path.splitext(os.path.normpath(infile))
base = [d == '..' and '__' or d for d in base.split('/')]
base = os.path.join(*base)
object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base
if not infile.startswith(%(src_dir)r):
infile = %(src_dir)r + infile
infile = env.%(name)s(object, infile)[0]
else:
infile = env.%(name)s(infile)[0]
_result.append(infile)
input_files = _result
"""
class CompilableSourcesTargetBase(TargetBase):
"""
An abstract base class for targets that compile their source files.
We explicitly transform compilable files into object files,
even though SCons could infer that for us, because we want
to control where the object file ends up. (The implicit rules
in SCons always put the object file next to the source file.)
"""
intermediate_builder_name = None
def write_target(self, fp, src_dir='', pre=''):
if self.intermediate_builder_name is None:
raise NotImplementedError
if src_dir and not src_dir.endswith('/'):
src_dir += '/'
variables = {
'src_dir': src_dir,
'name': self.intermediate_builder_name,
}
fp.write(compilable_sources_template % variables)
super(CompilableSourcesTargetBase, self).write_target(fp)
class ProgramTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'executable'.
"""
builder_name = 'GypProgram'
intermediate_builder_name = 'StaticObject'
target_prefix = '${PROGPREFIX}'
target_suffix = '${PROGSUFFIX}'
out_dir = '${TOP_BUILDDIR}'
class StaticLibraryTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'static_library'.
"""
builder_name = 'GypStaticLibrary'
intermediate_builder_name = 'StaticObject'
target_prefix = '${LIBPREFIX}'
target_suffix = '${LIBSUFFIX}'
out_dir = '${LIB_DIR}'
class SharedLibraryTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'shared_library'.
"""
builder_name = 'GypSharedLibrary'
intermediate_builder_name = 'SharedObject'
target_prefix = '${SHLIBPREFIX}'
target_suffix = '${SHLIBSUFFIX}'
out_dir = '${LIB_DIR}'
class LoadableModuleTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'loadable_module'.
"""
builder_name = 'GypLoadableModule'
intermediate_builder_name = 'SharedObject'
target_prefix = '${SHLIBPREFIX}'
target_suffix = '${SHLIBSUFFIX}'
out_dir = '${TOP_BUILDDIR}'
TargetMap = {
None : NoneTarget,
'none' : NoneTarget,
'settings' : SettingsTarget,
'executable' : ProgramTarget,
'static_library' : StaticLibraryTarget,
'shared_library' : SharedLibraryTarget,
'loadable_module' : LoadableModuleTarget,
}
def Target(spec):
return TargetMap[spec.get('type')](spec)

461
third_party/gyp/pylib/gyp/__init__.py vendored Normal file
View File

@ -0,0 +1,461 @@
#!/usr/bin/python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import gyp.input
import optparse
import os.path
import re
import shlex
import sys
# Default debug modes for GYP
debug = {}
# List of "official" debug modes, but you can use anything you like.
DEBUG_GENERAL = 'general'
DEBUG_VARIABLES = 'variables'
DEBUG_INCLUDES = 'includes'
def DebugOutput(mode, message):
if mode in gyp.debug.keys():
print "%s: %s" % (mode.upper(), message)
def FindBuildFiles():
extension = '.gyp'
files = os.listdir(os.getcwd())
build_files = []
for file in files:
if file[-len(extension):] == extension:
build_files.append(file)
return build_files
def Load(build_files, format, default_variables={},
includes=[], depth='.', params={}, check=False, circular_check=True):
"""
Loads one or more specified build files.
default_variables and includes will be copied before use.
Returns the generator for the specified format and the
data returned by loading the specified build files.
"""
default_variables = copy.copy(default_variables)
# Default variables provided by this program and its modules should be
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
# avoiding collisions with user and automatic variables.
default_variables['GENERATOR'] = format
generator_name = 'gyp.generator.' + format
# These parameters are passed in order (as opposed to by key)
# because ActivePython cannot handle key parameters to __import__.
generator = __import__(generator_name, globals(), locals(), generator_name)
for (key, val) in generator.generator_default_variables.items():
default_variables.setdefault(key, val)
# Give the generator the opportunity to set additional variables based on
# the params it will receive in the output phase.
if getattr(generator, 'CalculateVariables', None):
generator.CalculateVariables(default_variables, params)
# Fetch the generator specific info that gets fed to input, we use getattr
# so we can default things and the generators only have to provide what
# they need.
generator_input_info = {
'generator_wants_absolute_build_file_paths':
getattr(generator, 'generator_wants_absolute_build_file_paths', False),
'generator_handles_variants':
getattr(generator, 'generator_handles_variants', False),
'non_configuration_keys':
getattr(generator, 'generator_additional_non_configuration_keys', []),
'path_sections':
getattr(generator, 'generator_additional_path_sections', []),
'extra_sources_for_rules':
getattr(generator, 'generator_extra_sources_for_rules', []),
'generator_supports_multiple_toolsets':
getattr(generator, 'generator_supports_multiple_toolsets', False),
}
# Process the input specific to this generator.
result = gyp.input.Load(build_files, default_variables, includes[:],
depth, generator_input_info, check, circular_check)
return [generator] + result
def NameValueListToDict(name_value_list):
"""
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
of the pairs. If a string is simply NAME, then the value in the dictionary
is set to True. If VALUE can be converted to an integer, it is.
"""
result = { }
for item in name_value_list:
tokens = item.split('=', 1)
if len(tokens) == 2:
# If we can make it an int, use that, otherwise, use the string.
try:
token_value = int(tokens[1])
except ValueError:
token_value = tokens[1]
# Set the variable to the supplied value.
result[tokens[0]] = token_value
else:
# No value supplied, treat it as a boolean and set it.
result[tokens[0]] = True
return result
def ShlexEnv(env_name):
flags = os.environ.get(env_name, [])
if flags:
flags = shlex.split(flags)
return flags
def FormatOpt(opt, value):
if opt.startswith('--'):
return '%s=%s' % (opt, value)
return opt + value
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
"""Regenerate a list of command line flags, for an option of action='append'.
The |env_name|, if given, is checked in the environment and used to generate
an initial list of options, then the options that were specified on the
command line (given in |values|) are appended. This matches the handling of
environment variables and command line flags where command line flags override
the environment, while not requiring the environment to be set when the flags
are used again.
"""
flags = []
if options.use_environment and env_name:
for flag_value in ShlexEnv(env_name):
flags.append(FormatOpt(flag, predicate(flag_value)))
if values:
for flag_value in values:
flags.append(FormatOpt(flag, predicate(flag_value)))
return flags
def RegenerateFlags(options):
"""Given a parsed options object, and taking the environment variables into
account, returns a list of flags that should regenerate an equivalent options
object (even in the absence of the environment variables.)
Any path options will be normalized relative to depth.
The format flag is not included, as it is assumed the calling generator will
set that as appropriate.
"""
def FixPath(path):
path = gyp.common.FixIfRelativePath(path, options.depth)
if not path:
return os.path.curdir
return path
def Noop(value):
return value
# We always want to ignore the environment when regenerating, to avoid
# duplicate or changed flags in the environment at the time of regeneration.
flags = ['--ignore-environment']
for name, metadata in options._regeneration_metadata.iteritems():
opt = metadata['opt']
value = getattr(options, name)
value_predicate = metadata['type'] == 'path' and FixPath or Noop
action = metadata['action']
env_name = metadata['env_name']
if action == 'append':
flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
env_name, options))
elif action in ('store', None): # None is a synonym for 'store'.
if value:
flags.append(FormatOpt(opt, value_predicate(value)))
elif options.use_environment and env_name and os.environ.get(env_name):
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
elif action in ('store_true', 'store_false'):
if ((action == 'store_true' and value) or
(action == 'store_false' and not value)):
flags.append(opt)
elif options.use_environment and env_name:
print >>sys.stderr, ('Warning: environment regeneration unimplemented '
'for %s flag %r env_name %r' % (action, opt,
env_name))
else:
print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
'flag %r' % (action, opt))
return flags
class RegeneratableOptionParser(optparse.OptionParser):
def __init__(self):
self.__regeneratable_options = {}
optparse.OptionParser.__init__(self)
def add_option(self, *args, **kw):
"""Add an option to the parser.
This accepts the same arguments as OptionParser.add_option, plus the
following:
regenerate: can be set to False to prevent this option from being included
in regeneration.
env_name: name of environment variable that additional values for this
option come from.
type: adds type='path', to tell the regenerator that the values of
this option need to be made relative to options.depth
"""
env_name = kw.pop('env_name', None)
if 'dest' in kw and kw.pop('regenerate', True):
dest = kw['dest']
# The path type is needed for regenerating, for optparse we can just treat
# it as a string.
type = kw.get('type')
if type == 'path':
kw['type'] = 'string'
self.__regeneratable_options[dest] = {
'action': kw.get('action'),
'type': type,
'env_name': env_name,
'opt': args[0],
}
optparse.OptionParser.add_option(self, *args, **kw)
def parse_args(self, *args):
values, args = optparse.OptionParser.parse_args(self, *args)
values._regeneration_metadata = self.__regeneratable_options
return values, args
def main(args):
my_name = os.path.basename(sys.argv[0])
parser = RegeneratableOptionParser()
usage = 'usage: %s [options ...] [build_file ...]'
parser.set_usage(usage.replace('%s', '%prog'))
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES',
help='sets variable VAR to value VAL')
parser.add_option('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate')
parser.add_option('--msvs-version', dest='msvs_version',
regenerate=False,
help='Deprecated; use -G msvs_version=MSVS_VERSION instead')
parser.add_option('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files')
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables" '
'and "general"')
parser.add_option('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files')
parser.add_option('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL')
parser.add_option('--generator-output', dest='generator_output',
action='store', default=None, metavar='DIR', type='path',
env_name='GYP_GENERATOR_OUTPUT',
help='puts generated build files under DIR')
parser.add_option('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False,
help='do not read options from environment variables')
parser.add_option('--check', dest='check', action='store_true',
help='check format of gyp files')
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
default=None, metavar='DIR', type='path',
help='directory to use as the root of the source tree')
# --no-circular-check disables the check for circular relationships between
# .gyp files. These relationships should not exist, but they've only been
# observed to be harmful with the Xcode generator. Chromium's .gyp files
# currently have some circular relationships on non-Mac platforms, so this
# option allows the strict behavior to be used on Macs and the lenient
# behavior to be used elsewhere.
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
parser.add_option('--no-circular-check', dest='circular_check',
action='store_false', default=True, regenerate=False,
help="don't check for circular relationships between files")
# We read a few things from ~/.gyp, so set up a var for that.
home_vars = ['HOME']
if sys.platform in ('cygwin', 'win32'):
home_vars.append('USERPROFILE')
home = None
home_dot_gyp = None
for home_var in home_vars:
home = os.getenv(home_var)
if home != None:
home_dot_gyp = os.path.join(home, '.gyp')
if not os.path.exists(home_dot_gyp):
home_dot_gyp = None
else:
break
# TODO(thomasvl): add support for ~/.gyp/defaults
options, build_files_arg = parser.parse_args(args)
build_files = build_files_arg
if not options.formats:
# If no format was given on the command line, then check the env variable.
generate_formats = []
if options.use_environment:
generate_formats = os.environ.get('GYP_GENERATORS', [])
if generate_formats:
generate_formats = re.split('[\s,]', generate_formats)
if generate_formats:
options.formats = generate_formats
else:
# Nothing in the variable, default based on platform.
options.formats = [ {'darwin': 'xcode',
'win32': 'msvs',
'cygwin': 'msvs',
'freebsd7': 'make',
'freebsd8': 'make',
'linux2': 'make',
'openbsd4': 'make',
'sunos5': 'make',}[sys.platform] ]
if not options.generator_output and options.use_environment:
g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
if g_o:
options.generator_output = g_o
for mode in options.debug:
gyp.debug[mode] = 1
# Do an extra check to avoid work when we're not debugging.
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, 'running with these options:')
for option, value in sorted(options.__dict__.items()):
if option[0] == '_':
continue
if isinstance(value, basestring):
DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value))
else:
DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value)))
if not build_files:
build_files = FindBuildFiles()
if not build_files:
print >>sys.stderr, (usage + '\n\n%s: error: no build_file') % \
(my_name, my_name)
return 1
# TODO(mark): Chromium-specific hack!
# For Chromium, the gyp "depth" variable should always be a relative path
# to Chromium's top-level "src" directory. If no depth variable was set
# on the command line, try to find a "src" directory by looking at the
# absolute path to each build file's directory. The first "src" component
# found will be treated as though it were the path used for --depth.
if not options.depth:
for build_file in build_files:
build_file_dir = os.path.abspath(os.path.dirname(build_file))
build_file_dir_components = build_file_dir.split(os.path.sep)
components_len = len(build_file_dir_components)
for index in xrange(components_len - 1, -1, -1):
if build_file_dir_components[index] == 'src':
options.depth = os.path.sep.join(build_file_dir_components)
break
del build_file_dir_components[index]
# If the inner loop found something, break without advancing to another
# build file.
if options.depth:
break
if not options.depth:
raise Exception, \
'Could not automatically locate src directory. This is a ' + \
'temporary Chromium feature that will be removed. Use ' + \
'--depth as a workaround.'
# If toplevel-dir is not set, we assume that depth is the root of our source
# tree.
if not options.toplevel_dir:
options.toplevel_dir = options.depth
# -D on the command line sets variable defaults - D isn't just for define,
# it's for default. Perhaps there should be a way to force (-F?) a
# variable's value so that it can't be overridden by anything else.
cmdline_default_variables = {}
defines = []
if options.use_environment:
defines += ShlexEnv('GYP_DEFINES')
if options.defines:
defines += options.defines
cmdline_default_variables = NameValueListToDict(defines)
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL,
"cmdline_default_variables: %s" % cmdline_default_variables)
# Set up includes.
includes = []
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
# .gyp file that's loaded, before anything else is included.
if home_dot_gyp != None:
default_include = os.path.join(home_dot_gyp, 'include.gypi')
if os.path.exists(default_include):
includes.append(default_include)
# Command-line --include files come after the default include.
if options.includes:
includes.extend(options.includes)
# Generator flags should be prefixed with the target generator since they
# are global across all generator runs.
gen_flags = []
if options.use_environment:
gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
if options.generator_flags:
gen_flags += options.generator_flags
generator_flags = NameValueListToDict(gen_flags)
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags)
# TODO: Remove this and the option after we've gotten folks to move to the
# generator flag.
if options.msvs_version:
print >>sys.stderr, \
'DEPRECATED: Use generator flag (-G msvs_version=' + \
options.msvs_version + ') instead of --msvs-version=' + \
options.msvs_version
generator_flags['msvs_version'] = options.msvs_version
# Generate all requested formats (use a set in case we got one format request
# twice)
for format in set(options.formats):
params = {'options': options,
'build_files': build_files,
'generator_flags': generator_flags,
'cwd': os.getcwd(),
'build_files_arg': build_files_arg,
'gyp_binary': sys.argv[0],
'home_dot_gyp': home_dot_gyp}
# Start with the default variables from the command line.
[generator, flat_list, targets, data] = Load(build_files, format,
cmdline_default_variables,
includes, options.depth,
params, options.check,
options.circular_check)
# TODO(mark): Pass |data| for now because the generator needs a list of
# build files that came in. In the future, maybe it should just accept
# a list, and not the whole data dict.
# NOTE: flat_list is the flattened dependency graph specifying the order
# that targets may be built. Build systems that operate serially or that
# need to have dependencies defined before dependents reference them should
# generate targets in the order specified in flat_list.
generator.GenerateOutput(flat_list, targets, data, params)
# Done
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

345
third_party/gyp/pylib/gyp/common.py vendored Normal file
View File

@ -0,0 +1,345 @@
#!/usr/bin/python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import errno
import filecmp
import os.path
import re
import tempfile
import sys
def ExceptionAppend(e, msg):
"""Append a message to the given exception's message."""
if not e.args:
e.args = (msg,)
elif len(e.args) == 1:
e.args = (str(e.args[0]) + ' ' + msg,)
else:
e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
def ParseQualifiedTarget(target):
# Splits a qualified target into a build file, target name and toolset.
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
target_split = target.rsplit(':', 1)
if len(target_split) == 2:
[build_file, target] = target_split
else:
build_file = None
target_split = target.rsplit('#', 1)
if len(target_split) == 2:
[target, toolset] = target_split
else:
toolset = None
return [build_file, target, toolset]
def ResolveTarget(build_file, target, toolset):
# This function resolves a target into a canonical form:
# - a fully defined build file, either absolute or relative to the current
# directory
# - a target name
# - a toolset
#
# build_file is the file relative to which 'target' is defined.
# target is the qualified target.
# toolset is the default toolset for that target.
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
if parsed_build_file:
if build_file:
# If a relative path, parsed_build_file is relative to the directory
# containing build_file. If build_file is not in the current directory,
# parsed_build_file is not a usable path as-is. Resolve it by
# interpreting it as relative to build_file. If parsed_build_file is
# absolute, it is usable as a path regardless of the current directory,
# and os.path.join will return it as-is.
build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
parsed_build_file))
else:
build_file = parsed_build_file
if parsed_toolset:
toolset = parsed_toolset
return [build_file, target, toolset]
def BuildFile(fully_qualified_target):
# Extracts the build file from the fully qualified target.
return ParseQualifiedTarget(fully_qualified_target)[0]
def QualifiedTarget(build_file, target, toolset):
# "Qualified" means the file that a target was defined in and the target
# name, separated by a colon, suffixed by a # and the toolset name:
# /path/to/file.gyp:target_name#toolset
fully_qualified = build_file + ':' + target
if toolset:
fully_qualified = fully_qualified + '#' + toolset
return fully_qualified
def RelativePath(path, relative_to):
# Assuming both |path| and |relative_to| are relative to the current
# directory, returns a relative path that identifies path relative to
# relative_to.
# Convert to absolute (and therefore normalized paths).
path = os.path.abspath(path)
relative_to = os.path.abspath(relative_to)
# Split the paths into components.
path_split = path.split(os.path.sep)
relative_to_split = relative_to.split(os.path.sep)
# Determine how much of the prefix the two paths share.
prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
# Put enough ".." components to back up out of relative_to to the common
# prefix, and then append the part of path_split after the common prefix.
relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
path_split[prefix_len:]
if len(relative_split) == 0:
# The paths were the same.
return ''
# Turn it back into a string and we're done.
return os.path.join(*relative_split)
def FixIfRelativePath(path, relative_to):
# Like RelativePath but returns |path| unchanged if it is absolute.
if os.path.isabs(path):
return path
return RelativePath(path, relative_to)
def UnrelativePath(path, relative_to):
# Assuming that |relative_to| is relative to the current directory, and |path|
# is a path relative to the dirname of |relative_to|, returns a path that
# identifies |path| relative to the current directory.
rel_dir = os.path.dirname(relative_to)
return os.path.normpath(os.path.join(rel_dir, path))
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
# and the documentation for various shells.
# _quote is a pattern that should match any argument that needs to be quoted
# with double-quotes by EncodePOSIXShellArgument. It matches the following
# characters appearing anywhere in an argument:
# \t, \n, space parameter separators
# # comments
# $ expansions (quoted to always expand within one argument)
# % called out by IEEE 1003.1 XCU.2.2
# & job control
# ' quoting
# (, ) subshell execution
# *, ?, [ pathname expansion
# ; command delimiter
# <, >, | redirection
# = assignment
# {, } brace expansion (bash)
# ~ tilde expansion
# It also matches the empty string, because "" (or '') is the only way to
# represent an empty string literal argument to a POSIX shell.
#
# This does not match the characters in _escape, because those need to be
# backslash-escaped regardless of whether they appear in a double-quoted
# string.
_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
# _escape is a pattern that should match any character that needs to be
# escaped with a backslash, whether or not the argument matched the _quote
# pattern. _escape is used with re.sub to backslash anything in _escape's
# first match group, hence the (parentheses) in the regular expression.
#
# _escape matches the following characters appearing anywhere in an argument:
# " to prevent POSIX shells from interpreting this character for quoting
# \ to prevent POSIX shells from interpreting this character for escaping
# ` to prevent POSIX shells from interpreting this character for command
# substitution
# Missing from this list is $, because the desired behavior of
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
#
# Also missing from this list is !, which bash will interpret as the history
# expansion character when history is enabled. bash does not enable history
# by default in non-interactive shells, so this is not thought to be a problem.
# ! was omitted from this list because bash interprets "\!" as a literal string
# including the backslash character (avoiding history expansion but retaining
# the backslash), which would not be correct for argument encoding. Handling
# this case properly would also be problematic because bash allows the history
# character to be changed with the histchars shell variable. Fortunately,
# as history is not enabled in non-interactive shells and
# EncodePOSIXShellArgument is only expected to encode for non-interactive
# shells, there is no room for error here by ignoring !.
_escape = re.compile(r'(["\\`])')
def EncodePOSIXShellArgument(argument):
"""Encodes |argument| suitably for consumption by POSIX shells.
argument may be quoted and escaped as necessary to ensure that POSIX shells
treat the returned value as a literal representing the argument passed to
this function. Parameter (variable) expansions beginning with $ are allowed
to remain intact without escaping the $, to allow the argument to contain
references to variables to be expanded by the shell.
"""
if not isinstance(argument, str):
argument = str(argument)
if _quote.search(argument):
quote = '"'
else:
quote = ''
encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
return encoded
def EncodePOSIXShellList(list):
"""Encodes |list| suitably for consumption by POSIX shells.
Returns EncodePOSIXShellArgument for each item in list, and joins them
together using the space character as an argument separator.
"""
encoded_arguments = []
for argument in list:
encoded_arguments.append(EncodePOSIXShellArgument(argument))
return ' '.join(encoded_arguments)
def DeepDependencyTargets(target_dicts, roots):
"""Returns the recursive list of target dependencies."""
dependencies = set()
pending = set(roots)
while pending:
# Pluck out one.
r = pending.pop()
# Skip if visited already.
if r in dependencies:
continue
# Add it.
dependencies.add(r)
# Add its children.
spec = target_dicts[r]
pending.update(set(spec.get('dependencies', [])))
pending.update(set(spec.get('dependencies_original', [])))
return list(dependencies - set(roots))
def BuildFileTargets(target_list, build_file):
"""From a target_list, returns the subset from the specified build_file.
"""
return [p for p in target_list if BuildFile(p) == build_file]
def AllTargets(target_list, target_dicts, build_file):
"""Returns all targets (direct and dependencies) for the specified build_file.
"""
bftargets = BuildFileTargets(target_list, build_file)
deptargets = DeepDependencyTargets(target_dicts, bftargets)
return bftargets + deptargets
def WriteOnDiff(filename):
"""Write to a file only if the new contents differ.
Arguments:
filename: name of the file to potentially write to.
Returns:
A file like object which will write to temporary file and only overwrite
the target if it differs (on close).
"""
class Writer:
"""Wrapper around file which only covers the target if it differs."""
def __init__(self):
# Pick temporary file.
tmp_fd, self.tmp_path = tempfile.mkstemp(
suffix='.tmp',
prefix=os.path.split(filename)[1] + '.gyp.',
dir=os.path.split(filename)[0])
try:
self.tmp_file = os.fdopen(tmp_fd, 'wb')
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
raise
def __getattr__(self, attrname):
# Delegate everything else to self.tmp_file
return getattr(self.tmp_file, attrname)
def close(self):
try:
# Close tmp file.
self.tmp_file.close()
# Determine if different.
same = False
try:
same = filecmp.cmp(self.tmp_path, filename, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(self.tmp_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(self.tmp_path, 0666 & ~umask)
if sys.platform == 'win32' and os.path.exists(filename):
# NOTE: on windows (but not cygwin) rename will not replace an
# existing file, so it must be preceded with a remove. Sadly there
# is no way to make the switch atomic.
os.remove(filename)
os.rename(self.tmp_path, filename)
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
raise
return Writer()
# From Alex Martelli,
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
# ASPN: Python Cookbook: Remove duplicates from a sequence
# First comment, dated 2001/10/13.
# (Also in the printed Python Cookbook.)
def uniquer(seq, idfun=None):
if idfun is None:
def idfun(x): return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result

121
third_party/gyp/pylib/gyp/easy_xml.py vendored Normal file
View File

@ -0,0 +1,121 @@
#!/usr/bin/python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import xml.dom
import xml_fix
import common
class EasyXml(object):
""" Class to easily create XML files with substantial pre-defined structures.
Visual Studio files have a lot of pre-defined structures. This class makes
it easy to represent these structures as Python data structures, instead of
having to create a lot of function calls.
For this class, an XML element is represented as a list composed of:
1. The name of the element, a string,
2. The attributes of the element, an dictionary (optional), and
3+. The content of the element, if any. Strings are simple text nodes and
lists are child elements.
Example 1:
<test/>
becomes
['test']
Example 2:
<myelement a='value1' b='value2'>
<childtype>This is</childtype>
<childtype>it!</childtype>
</myelement>
becomes
['myelement', {'a':'value1', 'b':'value2'},
['childtype', 'This is'],
['childtype', 'it!'],
]
"""
def __init__(self, name, attributes=None):
""" Constructs an object representing an XML document.
Args:
name: A string, the name of the root element.
attributes: A dictionary, the attributes of the root.
"""
xml_impl = xml.dom.getDOMImplementation()
self.doc = xml_impl.createDocument(None, name, None)
if attributes:
self.SetAttributes(self.doc.documentElement, attributes)
def AppendChildren(self, parent, children_specifications):
""" Appends multiple children.
Args:
parent: The node to which the children will be added.
children_specifications: A list of node specifications.
"""
for specification in children_specifications:
# If it's a string, append a text node.
# Otherwise append an XML node.
if isinstance(specification, str):
parent.appendChild(self.doc.createTextNode(specification))
else:
self.AppendNode(parent, specification)
def AppendNode(self, parent, specification):
""" Appends multiple children.
Args:
parent: The node to which the child will be added.
children_specifications: A list, the node specification. The first
entry is the name of the element. If the second entry is a
dictionary, it is the attributes. The remaining entries of the
list are the sub-elements.
Returns:
The XML element created.
"""
name = specification[0]
if not isinstance(name, str):
raise Exception('The first item of an EasyXml specification should be '
'a string. Specification was ' + str(specification))
element = self.doc.createElement(name)
parent.appendChild(element)
rest = specification[1:]
# The second element is optionally a dictionary of the attributes.
if rest and isinstance(rest[0], dict):
self.SetAttributes(element, rest[0])
rest = rest[1:]
if rest:
self.AppendChildren(element, rest)
return element
def SetAttributes(self, element, attribute_description):
""" Sets the attributes of an element.
Args:
element: The node to which the child will be added.
attribute_description: A dictionary that maps attribute names to
attribute values.
"""
for attribute, value in attribute_description.iteritems():
element.setAttribute(attribute, value)
def Root(self):
""" Returns the root element. """
return self.doc.documentElement
def WriteIfChanged(self, path):
""" Writes the XML doc but don't touch the file if unchanged. """
f = common.WriteOnDiff(path)
fix = xml_fix.XmlFix()
self.doc.writexml(f, encoding='utf-8', addindent='', newl='')
fix.Cleanup()
f.close()
def __str__(self):
""" Converts the doc to a string. """
return self.doc.toxml()

View File

@ -0,0 +1,92 @@
#!/usr/bin/python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import easy_xml
import unittest
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
xml = easy_xml.EasyXml('test')
self.assertEqual(str(xml), '<?xml version="1.0" ?><test/>')
def test_EasyXml_simple_with_attributes(self):
xml = easy_xml.EasyXml('test2', {'a': 'value1', 'b': 'value2'})
self.assertEqual(str(xml),
'<?xml version="1.0" ?><test2 a="value1" b="value2"/>')
def test_EasyXml_add_node(self):
# We want to create:
target = ('<?xml version="1.0" ?>'
'<test3>'
'<GrandParent>'
'<Parent1>'
'<Child/>'
'</Parent1>'
'<Parent2/>'
'</GrandParent>'
'</test3>')
# Do it the hard way first:
xml = easy_xml.EasyXml('test3')
grand_parent = xml.AppendNode(xml.Root(), ['GrandParent'])
parent1 = xml.AppendNode(grand_parent, ['Parent1'])
parent2 = xml.AppendNode(grand_parent, ['Parent2'])
xml.AppendNode(parent1, ['Child'])
self.assertEqual(str(xml), target)
# Do it the easier way:
xml = easy_xml.EasyXml('test3')
xml.AppendNode(xml.Root(),
['GrandParent',
['Parent1', ['Child']],
['Parent2']])
self.assertEqual(str(xml), target)
def test_EasyXml_complex(self):
# We want to create:
target = ('<?xml version="1.0" ?>'
'<Project>'
'<PropertyGroup Label="Globals">'
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
'<Keyword>Win32Proj</Keyword>'
'<RootNamespace>automated_ui_tests</RootNamespace>'
'</PropertyGroup>'
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
'<PropertyGroup Condition="\'$(Configuration)|$(Platform)\'==\''
'Debug|Win32\'" Label="Configuration">'
'<ConfigurationType>Application</ConfigurationType>'
'<CharacterSet>Unicode</CharacterSet>'
'</PropertyGroup>'
'</Project>')
xml = easy_xml.EasyXml('Project')
xml.AppendChildren(xml.Root(), [
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
],
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
]
])
self.assertEqual(str(xml), target)
if __name__ == '__main__':
unittest.main()

View File

View File

@ -0,0 +1,88 @@
#!/usr/bin/python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypd output module
This module produces gyp input as its output. Output files are given the
.gypd extension to avoid overwriting the .gyp files that they are generated
from. Internal references to .gyp files (such as those found in
"dependencies" sections) are not adjusted to point to .gypd files instead;
unlike other paths, which are relative to the .gyp or .gypd file, such paths
are relative to the directory from which gyp was run to create the .gypd file.
This generator module is intended to be a sample and a debugging aid, hence
the "d" for "debug" in .gypd. It is useful to inspect the results of the
various merges, expansions, and conditional evaluations performed by gyp
and to see a representation of what would be fed to a generator module.
It's not advisable to rename .gypd files produced by this module to .gyp,
because they will have all merges, expansions, and evaluations already
performed and the relevant constructs not present in the output; paths to
dependencies may be wrong; and various sections that do not belong in .gyp
files such as such as "included_files" and "*_excluded" will be present.
Output will also be stripped of comments. This is not intended to be a
general-purpose gyp pretty-printer; for that, you probably just want to
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
comments but won't do all of the other things done to this module's output.
The specific formatting of the output generated by this module is subject
to change.
"""
import gyp.common
import errno
import os
import pprint
# These variables should just be spit back out as variable references.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
# gypd doesn't define a default value for OS like many other generator
# modules. Specify "-D OS=whatever" on the command line to provide a value.
generator_default_variables = {
}
# gypd supports multiple toolsets
generator_supports_multiple_toolsets = True
# TODO(mark): This always uses <, which isn't right. The input module should
# notify the generator to tell it which phase it is operating in, and this
# module should use < for the early phase and then switch to > for the late
# phase. Bonus points for carrying @ back into the output too.
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
output_files = {}
for qualified_target in target_list:
[input_file, target] = \
gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
if input_file[-4:] != '.gyp':
continue
input_file_stem = input_file[:-4]
output_file = input_file_stem + params['options'].suffix + '.gypd'
if not output_file in output_files:
output_files[output_file] = input_file
for output_file, input_file in output_files.iteritems():
output = open(output_file, 'w')
pprint.pprint(data[input_file], output)
output.close()

View File

@ -0,0 +1,57 @@
#!/usr/bin/python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

2250
third_party/gyp/pylib/gyp/input.py vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,70 @@
#!/usr/bin/python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import tempfile
import shutil
import subprocess
def TestCommands(commands, files={}, env={}):
"""Run commands in a temporary directory, returning true if they all succeed.
Return false on failures or if any commands produce output.
Arguments:
commands: an array of shell-interpretable commands, e.g. ['ls -l', 'pwd']
each will be expanded with Python %-expansion using env first.
files: a dictionary mapping filename to contents;
files will be created in the temporary directory before running
the command.
env: a dictionary of strings to expand commands with.
"""
tempdir = tempfile.mkdtemp()
try:
for name, contents in files.items():
f = open(os.path.join(tempdir, name), 'wb')
f.write(contents)
f.close()
for command in commands:
proc = subprocess.Popen(command % env, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=tempdir)
output = proc.communicate()[0]
if proc.returncode != 0 or output:
return False
return True
finally:
shutil.rmtree(tempdir)
return False
def TestArSupportsT(ar_command='ar', cc_command='cc'):
"""Test whether 'ar' supports the 'T' flag."""
return TestCommands(['%(cc)s -c test.c',
'%(ar)s crsT test.a test.o',
'%(cc)s test.a'],
files={'test.c': 'int main(){}'},
env={'ar': ar_command, 'cc': cc_command})
def TestLinkerSupportsThreads(cc_command='cc'):
"""Test whether the linker supports the --threads flag."""
return TestCommands(['%(cc)s -Wl,--threads test.c'],
files={'test.c': 'int main(){}'},
env={'cc': cc_command})
if __name__ == '__main__':
# Run the various test functions and print the results.
def RunTest(description, function, **kwargs):
print "Testing " + description + ':',
if function(**kwargs):
print 'ok'
else:
print 'fail'
RunTest("ar 'T' flag", TestArSupportsT)
RunTest("ar 'T' flag with ccache", TestArSupportsT, cc_command='ccache cc')
RunTest("ld --threads", TestLinkerSupportsThreads)

File diff suppressed because it is too large Load Diff

70
third_party/gyp/pylib/gyp/xml_fix.py vendored Normal file
View File

@ -0,0 +1,70 @@
#!/usr/bin/python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Applies a fix to CR LF TAB handling in xml.dom.
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
Working around this: http://bugs.python.org/issue5752
TODO(bradnelson): Consider dropping this when we drop XP support.
"""
import xml.dom.minidom
def _Replacement_write_data(writer, data, is_attrib=False):
"""Writes datachars to writer."""
data = data.replace("&", "&amp;").replace("<", "&lt;")
data = data.replace("\"", "&quot;").replace(">", "&gt;")
if is_attrib:
data = data.replace(
"\r", "&#xD;").replace(
"\n", "&#xA;").replace(
"\t", "&#x9;")
writer.write(data)
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
# indent = current indentation
# addindent = indentation to add to higher levels
# newl = newline string
writer.write(indent+"<" + self.tagName)
attrs = self._get_attributes()
a_names = attrs.keys()
a_names.sort()
for a_name in a_names:
writer.write(" %s=\"" % a_name)
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
writer.write("\"")
if self.childNodes:
writer.write(">%s" % newl)
for node in self.childNodes:
node.writexml(writer, indent + addindent, addindent, newl)
writer.write("%s</%s>%s" % (indent, self.tagName, newl))
else:
writer.write("/>%s" % newl)
class XmlFix(object):
"""Object to manage temporary patching of xml.dom.minidom."""
def __init__(self):
# Preserve current xml.dom.minidom functions.
self.write_data = xml.dom.minidom._write_data
self.writexml = xml.dom.minidom.Element.writexml
# Inject replacement versions of a function and a method.
xml.dom.minidom._write_data = _Replacement_write_data
xml.dom.minidom.Element.writexml = _Replacement_writexml
def Cleanup(self):
if self.write_data:
xml.dom.minidom._write_data = self.write_data
xml.dom.minidom.Element.writexml = self.writexml
self.write_data = None
def __del__(self):
self.Cleanup()

26
third_party/gyp/setup.py vendored Executable file
View File

@ -0,0 +1,26 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from distutils.core import setup
from distutils.command.install import install
from distutils.command.install_lib import install_lib
from distutils.command.install_scripts import install_scripts
setup(
name='gyp',
version='0.1',
description='Generate Your Projects',
author='Chromium Authors',
author_email='chromium-dev@googlegroups.com',
url='http://code.google.com/p/gyp',
package_dir = {'': 'pylib'},
packages=['gyp', 'gyp.generator'],
scripts = ['gyp'],
cmdclass = {'install': install,
'install_lib': install_lib,
'install_scripts': install_scripts},
)