Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/nodejs/node.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorRyan Dahl <ry@tinyclouds.org>2011-08-16 00:48:20 +0400
committerRyan Dahl <ry@tinyclouds.org>2011-08-16 00:49:15 +0400
commit66f77963a368f8696c26db7b42e510fc0b2aebbf (patch)
treeeeb2694f71a882bbf637214b7d44ad5cf4e77237 /tools
parent25a5e90289965d60f8aad5138b1321ce267c2242 (diff)
Upgrade GYP to r995
Diffstat (limited to 'tools')
-rw-r--r--tools/gyp/MANIFEST21
-rw-r--r--tools/gyp/OWNERS1
-rwxr-xr-xtools/gyp/PRESUBMIT.py41
-rwxr-xr-xtools/gyp/buildbot/buildbot_run.py79
-rwxr-xr-xtools/gyp/gyp18
-rwxr-xr-xtools/gyp/gyp.bat5
-rw-r--r--tools/gyp/pylib/gyp/MSVSProject.py149
-rw-r--r--tools/gyp/pylib/gyp/MSVSToolFile.py69
-rw-r--r--tools/gyp/pylib/gyp/MSVSUserFile.py140
-rw-r--r--tools/gyp/pylib/gyp/easy_xml.py197
-rw-r--r--tools/gyp/pylib/gyp/easy_xml_test.py124
-rw-r--r--tools/gyp/pylib/gyp/generator/make.py176
-rw-r--r--tools/gyp/pylib/gyp/generator/msvs.py187
-rw-r--r--tools/gyp/pylib/gyp/input.py2
-rwxr-xr-xtools/gyp/samples/samples81
-rw-r--r--tools/gyp/samples/samples.bat5
-rw-r--r--tools/gyp/tools/README15
-rwxr-xr-xtools/gyp/tools/graphviz.py95
-rw-r--r--tools/gyp/tools/pretty_gyp.py142
-rwxr-xr-xtools/gyp/tools/pretty_sln.py167
-rwxr-xr-xtools/gyp/tools/pretty_vcproj.py316
21 files changed, 1467 insertions, 563 deletions
diff --git a/tools/gyp/MANIFEST b/tools/gyp/MANIFEST
new file mode 100644
index 00000000000..925ecc1842d
--- /dev/null
+++ b/tools/gyp/MANIFEST
@@ -0,0 +1,21 @@
+setup.py
+gyp
+LICENSE
+AUTHORS
+pylib/gyp/MSVSNew.py
+pylib/gyp/MSVSProject.py
+pylib/gyp/MSVSToolFile.py
+pylib/gyp/MSVSUserFile.py
+pylib/gyp/MSVSVersion.py
+pylib/gyp/SCons.py
+pylib/gyp/__init__.py
+pylib/gyp/common.py
+pylib/gyp/input.py
+pylib/gyp/xcodeproj_file.py
+pylib/gyp/generator/__init__.py
+pylib/gyp/generator/gypd.py
+pylib/gyp/generator/gypsh.py
+pylib/gyp/generator/make.py
+pylib/gyp/generator/msvs.py
+pylib/gyp/generator/scons.py
+pylib/gyp/generator/xcode.py
diff --git a/tools/gyp/OWNERS b/tools/gyp/OWNERS
new file mode 100644
index 00000000000..72e8ffc0db8
--- /dev/null
+++ b/tools/gyp/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/tools/gyp/PRESUBMIT.py b/tools/gyp/PRESUBMIT.py
new file mode 100755
index 00000000000..737584bc7e0
--- /dev/null
+++ b/tools/gyp/PRESUBMIT.py
@@ -0,0 +1,41 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Top-level presubmit script for GYP.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details about the presubmit API built into gcl.
+"""
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ report = []
+ report.extend(input_api.canned_checks.PanProjectChecks(
+ input_api, output_api))
+ return report
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ report = []
+ license = (
+ r'.*? Copyright \(c\) %(year)s Google Inc\. All rights reserved\.\n'
+ r'.*? Use of this source code is governed by a BSD-style license that '
+ r'can be\n'
+ r'.*? found in the LICENSE file\.\n'
+ ) % {
+ 'year': input_api.time.strftime('%Y'),
+ }
+
+ report.extend(input_api.canned_checks.PanProjectChecks(
+ input_api, output_api, license_header=license))
+ report.extend(input_api.canned_checks.CheckTreeIsOpen(
+ input_api, output_api,
+ 'http://gyp-status.appspot.com/status',
+ 'http://gyp-status.appspot.com/current'))
+ return report
+
+
+def GetPreferredTrySlaves():
+ return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac']
diff --git a/tools/gyp/buildbot/buildbot_run.py b/tools/gyp/buildbot/buildbot_run.py
new file mode 100755
index 00000000000..adad7f9da00
--- /dev/null
+++ b/tools/gyp/buildbot/buildbot_run.py
@@ -0,0 +1,79 @@
+#!/usr/bin/python
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Argument-less script to select what to run on the buildbots."""
+
+
+import os
+import shutil
+import subprocess
+import sys
+
+
+def GypTestFormat(title, format, msvs_version=None):
+ """Run the gyp tests for a given format, emitting annotator tags.
+
+ See annotator docs at:
+ https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations
+ Args:
+ format: gyp format to test.
+ Returns:
+ 0 for sucesss, 1 for failure.
+ """
+ print '@@@BUILD_STEP ' + title + '@@@'
+ sys.stdout.flush()
+ buildbot_dir = os.path.dirname(os.path.abspath(__file__))
+ trunk_dir = os.path.dirname(buildbot_dir)
+ root_dir = os.path.dirname(trunk_dir)
+ env = os.environ.copy()
+ if msvs_version:
+ env['GYP_MSVS_VERSION'] = msvs_version
+ retcode = subprocess.call(' '.join(
+ [sys.executable, 'trunk/gyptest.py',
+ '--all',
+ '--passed',
+ '--format', format,
+ '--chdir', 'trunk',
+ '--path', '../scons']),
+ cwd=root_dir, env=env, shell=True)
+ if retcode:
+ # Emit failure tag, and keep going.
+ print '@@@STEP_FAILURE@@@'
+ return 1
+ return 0
+
+
+def GypBuild():
+ # Dump out/ directory.
+ print '@@@BUILD_STEP cleanup@@@'
+ print 'Removing out/ ...'
+ shutil.rmtree('out', ignore_errors=True)
+ print 'Done.'
+
+ retcode = 0
+ if sys.platform.startswith('linux'):
+ retcode += GypTestFormat('scons', format='scons')
+ retcode += GypTestFormat('make', format='make')
+ elif sys.platform == 'darwin':
+ retcode += GypTestFormat('xcode', format='xcode')
+ retcode += GypTestFormat('make', format='make')
+ elif sys.platform == 'win32':
+ retcode += GypTestFormat('msvs-2008', format='msvs', msvs_version='2008')
+ if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
+ retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010')
+ else:
+ raise Exception('Unknown platform')
+ if retcode:
+ # TODO(bradnelson): once the annotator supports a postscript (section for
+ # after the build proper that could be used for cumulative failures),
+ # use that instead of this. This isolates the final return value so
+ # that it isn't misattributed to the last stage.
+ print '@@@BUILD_STEP failures@@@'
+ sys.exit(retcode)
+
+
+if __name__ == '__main__':
+ GypBuild()
diff --git a/tools/gyp/gyp b/tools/gyp/gyp
new file mode 100755
index 00000000000..d52e7116f5a
--- /dev/null
+++ b/tools/gyp/gyp
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+# TODO(mark): sys.path manipulation is some temporary testing stuff.
+try:
+ import gyp
+except ImportError, e:
+ import os.path
+ sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
+ import gyp
+
+if __name__ == '__main__':
+ sys.exit(gyp.main(sys.argv[1:]))
diff --git a/tools/gyp/gyp.bat b/tools/gyp/gyp.bat
new file mode 100755
index 00000000000..90fbc6d30ee
--- /dev/null
+++ b/tools/gyp/gyp.bat
@@ -0,0 +1,5 @@
+@rem Copyright (c) 2009 Google Inc. All rights reserved.
+@rem Use of this source code is governed by a BSD-style license that can be
+@rem found in the LICENSE file.
+
+@python "%~dp0/gyp" %*
diff --git a/tools/gyp/pylib/gyp/MSVSProject.py b/tools/gyp/pylib/gyp/MSVSProject.py
index 1246fdd9cda..fab11f9d0d5 100644
--- a/tools/gyp/pylib/gyp/MSVSProject.py
+++ b/tools/gyp/pylib/gyp/MSVSProject.py
@@ -7,8 +7,7 @@
"""Visual Studio project reader/writer."""
import common
-import xml.dom
-import xml_fix
+import gyp.easy_xml as easy_xml
#------------------------------------------------------------------------------
@@ -23,24 +22,16 @@ class Tool(object):
name: Tool name.
attrs: Dict of tool attributes; may be None.
"""
- self.name = name
- self.attrs = attrs or {}
+ self._attrs = attrs or {}
+ self._attrs['Name'] = name
- def CreateElement(self, doc):
+ def _GetSpecification(self):
"""Creates an element for the tool.
- Args:
- doc: xml.dom.Document object to use for node creation.
-
Returns:
A new xml.dom.Element for the tool.
"""
- node = doc.createElement('Tool')
- node.setAttribute('Name', self.name)
- for k, v in self.attrs.items():
- node.setAttribute(k, v)
- return node
-
+ return ['Tool', self._attrs]
class Filter(object):
"""Visual Studio filter - that is, a virtual folder."""
@@ -62,24 +53,18 @@ class Filter(object):
class Writer(object):
"""Visual Studio XML project writer."""
- def __init__(self, project_path, version):
+ def __init__(self, project_path, version, name, guid=None, platforms=None):
"""Initializes the project.
Args:
project_path: Path to the project file.
version: Format version to emit.
- """
- self.project_path = project_path
- self.doc = None
- self.version = version
-
- def Create(self, name, guid=None, platforms=None):
- """Creates the project document.
-
- Args:
name: Name of the project.
guid: GUID to use for project, if not None.
+ platforms: Array of string, the supported platforms. If null, ['Win32']
"""
+ self.project_path = project_path
+ self.version = version
self.name = name
self.guid = guid
@@ -87,46 +72,16 @@ class Writer(object):
if not platforms:
platforms = ['Win32']
- # Create XML doc
- xml_impl = xml.dom.getDOMImplementation()
- self.doc = xml_impl.createDocument(None, 'VisualStudioProject', None)
-
- # Add attributes to root element
- self.n_root = self.doc.documentElement
- self.n_root.setAttribute('ProjectType', 'Visual C++')
- self.n_root.setAttribute('Version', self.version.ProjectVersion())
- self.n_root.setAttribute('Name', self.name)
- self.n_root.setAttribute('ProjectGUID', self.guid)
- self.n_root.setAttribute('RootNamespace', self.name)
- self.n_root.setAttribute('Keyword', 'Win32Proj')
-
- # Add platform list
- n_platform = self.doc.createElement('Platforms')
- self.n_root.appendChild(n_platform)
+ # Initialize the specifications of the various sections.
+ self.platform_section = ['Platforms']
for platform in platforms:
- n = self.doc.createElement('Platform')
- n.setAttribute('Name', platform)
- n_platform.appendChild(n)
-
- # Add tool files section
- self.n_tool_files = self.doc.createElement('ToolFiles')
- self.n_root.appendChild(self.n_tool_files)
-
- # Add configurations section
- self.n_configs = self.doc.createElement('Configurations')
- self.n_root.appendChild(self.n_configs)
+ self.platform_section.append(['Platform', {'Name': platform}])
+ self.tool_files_section = ['ToolFiles']
+ self.configurations_section = ['Configurations']
+ self.files_section = ['Files']
- # Add empty References section
- self.n_root.appendChild(self.doc.createElement('References'))
-
- # Add files section
- self.n_files = self.doc.createElement('Files')
- self.n_root.appendChild(self.n_files)
# Keep a dict keyed on filename to speed up access.
- self.n_files_dict = dict()
-
- # Add empty Globals section
- self.n_root.appendChild(self.doc.createElement('Globals'))
+ self.files_dict = dict()
def AddToolFile(self, path):
"""Adds a tool file to the project.
@@ -134,20 +89,17 @@ class Writer(object):
Args:
path: Relative path from project to tool file.
"""
- n_tool = self.doc.createElement('ToolFile')
- n_tool.setAttribute('RelativePath', path)
- self.n_tool_files.appendChild(n_tool)
+ self.tool_files_section.append(['ToolFile', {'RelativePath': path}])
- def _AddConfigToNode(self, parent, config_type, config_name, attrs=None,
- tools=None):
- """Adds a configuration to the parent node.
+ def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
+ """Returns the specification for a configuration.
Args:
- parent: Destination node.
config_type: Type of configuration node.
config_name: Configuration name.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
+ Returns:
"""
# Handle defaults
if not attrs:
@@ -156,19 +108,19 @@ class Writer(object):
tools = []
# Add configuration node and its attributes
- n_config = self.doc.createElement(config_type)
- n_config.setAttribute('Name', config_name)
- for k, v in attrs.items():
- n_config.setAttribute(k, v)
- parent.appendChild(n_config)
+ node_attrs = attrs.copy()
+ node_attrs['Name'] = config_name
+ specification = [config_type, node_attrs]
# Add tool nodes and their attributes
if tools:
for t in tools:
if isinstance(t, Tool):
- n_config.appendChild(t.CreateElement(self.doc))
+ specification.append(t._GetSpecification())
else:
- n_config.appendChild(Tool(t).CreateElement(self.doc))
+ specification.append(Tool(t)._GetSpecification())
+ return specification
+
def AddConfig(self, name, attrs=None, tools=None):
"""Adds a configuration to the project.
@@ -178,7 +130,8 @@ class Writer(object):
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
"""
- self._AddConfigToNode(self.n_configs, 'Configuration', name, attrs, tools)
+ spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools)
+ self.configurations_section.append(spec)
def _AddFilesToNode(self, parent, files):
"""Adds files and/or filters to the parent node.
@@ -191,14 +144,12 @@ class Writer(object):
"""
for f in files:
if isinstance(f, Filter):
- node = self.doc.createElement('Filter')
- node.setAttribute('Name', f.name)
+ node = ['Filter', {'Name': f.name}]
self._AddFilesToNode(node, f.contents)
else:
- node = self.doc.createElement('File')
- node.setAttribute('RelativePath', f)
- self.n_files_dict[f] = node
- parent.appendChild(node)
+ node = ['File', {'RelativePath': f}]
+ self.files_dict[f] = node
+ parent.append(node)
def AddFiles(self, files):
"""Adds files to the project.
@@ -210,7 +161,7 @@ class Writer(object):
later add files to a Filter object which was passed into a previous call
to AddFiles(), it will not be reflected in this project.
"""
- self._AddFilesToNode(self.n_files, files)
+ self._AddFilesToNode(self.files_section, files)
# TODO(rspangler) This also doesn't handle adding files to an existing
# filter. That is, it doesn't merge the trees.
@@ -227,19 +178,35 @@ class Writer(object):
ValueError: Relative path does not match any file added via AddFiles().
"""
# Find the file node with the right relative path
- parent = self.n_files_dict.get(path)
+ parent = self.files_dict.get(path)
if not parent:
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
# Add the config to the file node
- self._AddConfigToNode(parent, 'FileConfiguration', config, attrs, tools)
+ spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs,
+ tools)
+ parent.append(spec)
- def Write(self, writer=common.WriteOnDiff):
+ def WriteIfChanged(self):
"""Writes the project file."""
- f = writer(self.project_path)
- fix = xml_fix.XmlFix()
- self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
- fix.Cleanup()
- f.close()
+ # First create XML content definition
+ content = [
+ 'VisualStudioProject',
+ {'ProjectType': 'Visual C++',
+ 'Version': self.version.ProjectVersion(),
+ 'Name': self.name,
+ 'ProjectGUID': self.guid,
+ 'RootNamespace': self.name,
+ 'Keyword': 'Win32Proj'
+ },
+ self.platform_section,
+ self.tool_files_section,
+ self.configurations_section,
+ ['References'], # empty section
+ self.files_section,
+ ['Globals'] # empty section
+ ]
+ easy_xml.WriteXmlIfChanged(content, self.project_path,
+ encoding="Windows-1252")
#------------------------------------------------------------------------------
diff --git a/tools/gyp/pylib/gyp/MSVSToolFile.py b/tools/gyp/pylib/gyp/MSVSToolFile.py
index 493a9c42749..fcad90ad953 100644
--- a/tools/gyp/pylib/gyp/MSVSToolFile.py
+++ b/tools/gyp/pylib/gyp/MSVSToolFile.py
@@ -7,45 +7,22 @@
"""Visual Studio project reader/writer."""
import common
-import xml.dom
-import xml_fix
-
-
-#------------------------------------------------------------------------------
+import gyp.easy_xml as easy_xml
class Writer(object):
"""Visual Studio XML tool file writer."""
- def __init__(self, tool_file_path):
+ def __init__(self, tool_file_path, name):
"""Initializes the tool file.
Args:
tool_file_path: Path to the tool file.
- """
- self.tool_file_path = tool_file_path
- self.doc = None
-
- def Create(self, name):
- """Creates the tool file document.
-
- Args:
name: Name of the tool file.
"""
+ self.tool_file_path = tool_file_path
self.name = name
-
- # Create XML doc
- xml_impl = xml.dom.getDOMImplementation()
- self.doc = xml_impl.createDocument(None, 'VisualStudioToolFile', None)
-
- # Add attributes to root element
- self.n_root = self.doc.documentElement
- self.n_root.setAttribute('Version', '8.00')
- self.n_root.setAttribute('Name', self.name)
-
- # Add rules section
- self.n_rules = self.doc.createElement('Rules')
- self.n_root.appendChild(self.n_rules)
+ self.rules_section = ['Rules']
def AddCustomBuildRule(self, name, cmd, description,
additional_dependencies,
@@ -60,22 +37,24 @@ class Writer(object):
outputs: outputs of the rule.
extensions: extensions handled by the rule.
"""
- n_rule = self.doc.createElement('CustomBuildRule')
- n_rule.setAttribute('Name', name)
- n_rule.setAttribute('ExecutionDescription', description)
- n_rule.setAttribute('CommandLine', cmd)
- n_rule.setAttribute('Outputs', ';'.join(outputs))
- n_rule.setAttribute('FileExtensions', ';'.join(extensions))
- n_rule.setAttribute('AdditionalDependencies',
- ';'.join(additional_dependencies))
- self.n_rules.appendChild(n_rule)
-
- def Write(self, writer=common.WriteOnDiff):
+ rule = ['CustomBuildRule',
+ {'Name': name,
+ 'ExecutionDescription': description,
+ 'CommandLine': cmd,
+ 'Outputs': ';'.join(outputs),
+ 'FileExtensions': ';'.join(extensions),
+ 'AdditionalDependencies':
+ ';'.join(additional_dependencies)
+ }]
+ self.rules_section.append(rule)
+
+ def WriteIfChanged(self):
"""Writes the tool file."""
- f = writer(self.tool_file_path)
- fix = xml_fix.XmlFix()
- self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
- fix.Cleanup()
- f.close()
-
-#------------------------------------------------------------------------------
+ content = ['VisualStudioToolFile',
+ {'Version': '8.00',
+ 'Name': self.name
+ },
+ self.rules_section
+ ]
+ easy_xml.WriteXmlIfChanged(content, self.tool_file_path,
+ encoding="Windows-1252")
diff --git a/tools/gyp/pylib/gyp/MSVSUserFile.py b/tools/gyp/pylib/gyp/MSVSUserFile.py
index ba166a92a2c..423649f6344 100644
--- a/tools/gyp/pylib/gyp/MSVSUserFile.py
+++ b/tools/gyp/pylib/gyp/MSVSUserFile.py
@@ -10,8 +10,7 @@ import common
import os
import re
import socket # for gethostname
-import xml.dom
-import xml_fix
+import gyp.easy_xml as easy_xml
#------------------------------------------------------------------------------
@@ -56,49 +55,18 @@ def _QuoteWin32CommandLineArgs(args):
class Writer(object):
"""Visual Studio XML user user file writer."""
- def __init__(self, user_file_path, version):
+ def __init__(self, user_file_path, version, name):
"""Initializes the user file.
Args:
user_file_path: Path to the user file.
+ version: Version info.
+ name: Name of the user file.
"""
self.user_file_path = user_file_path
self.version = version
- self.doc = None
-
- def Create(self, name):
- """Creates the user file document.
-
- Args:
- name: Name of the user file.
- """
self.name = name
-
- # Create XML doc
- xml_impl = xml.dom.getDOMImplementation()
- self.doc = xml_impl.createDocument(None, 'VisualStudioUserFile', None)
-
- # Add attributes to root element
- self.n_root = self.doc.documentElement
- self.n_root.setAttribute('Version', self.version.ProjectVersion())
- self.n_root.setAttribute('Name', self.name)
-
- # Add configurations section
- self.n_configs = self.doc.createElement('Configurations')
- self.n_root.appendChild(self.n_configs)
-
- def _AddConfigToNode(self, parent, config_type, config_name):
- """Adds a configuration to the parent node.
-
- Args:
- parent: Destination node.
- config_type: Type of configuration node.
- config_name: Configuration name.
- """
- # Add configuration node and its attributes
- n_config = self.doc.createElement(config_type)
- n_config.setAttribute('Name', config_name)
- parent.appendChild(n_config)
+ self.configurations = {}
def AddConfig(self, name):
"""Adds a configuration to the project.
@@ -106,8 +74,7 @@ class Writer(object):
Args:
name: Configuration name.
"""
- self._AddConfigToNode(self.n_configs, 'Configuration', name)
-
+ self.configurations[name] = ['Configuration', {'Name': name}]
def AddDebugSettings(self, config_name, command, environment = {},
working_directory=""):
@@ -121,62 +88,61 @@ class Writer(object):
"""
command = _QuoteWin32CommandLineArgs(command)
- n_cmd = self.doc.createElement('DebugSettings')
abs_command = _FindCommandInPath(command[0])
- n_cmd.setAttribute('Command', abs_command)
- n_cmd.setAttribute('WorkingDirectory', working_directory)
- n_cmd.setAttribute('CommandArguments', " ".join(command[1:]))
- n_cmd.setAttribute('RemoteMachine', socket.gethostname())
if environment and isinstance(environment, dict):
- n_cmd.setAttribute('Environment',
- " ".join(['%s="%s"' % (key, val)
- for (key,val) in environment.iteritems()]))
+ env_list = ['%s="%s"' % (key, val)
+ for (key,val) in environment.iteritems()]
+ environment = ' '.join(env_list)
else:
- n_cmd.setAttribute('Environment', '')
-
- n_cmd.setAttribute('EnvironmentMerge', 'true')
-
- # Currently these are all "dummy" values that we're just setting
- # in the default manner that MSVS does it. We could use some of
- # these to add additional capabilities, I suppose, but they might
- # not have parity with other platforms then.
- n_cmd.setAttribute('Attach', 'false')
- n_cmd.setAttribute('DebuggerType', '3') # 'auto' debugger
- n_cmd.setAttribute('Remote', '1')
- n_cmd.setAttribute('RemoteCommand', '')
- n_cmd.setAttribute('HttpUrl', '')
- n_cmd.setAttribute('PDBPath', '')
- n_cmd.setAttribute('SQLDebugging', '')
- n_cmd.setAttribute('DebuggerFlavor', '0')
- n_cmd.setAttribute('MPIRunCommand', '')
- n_cmd.setAttribute('MPIRunArguments', '')
- n_cmd.setAttribute('MPIRunWorkingDirectory', '')
- n_cmd.setAttribute('ApplicationCommand', '')
- n_cmd.setAttribute('ApplicationArguments', '')
- n_cmd.setAttribute('ShimCommand', '')
- n_cmd.setAttribute('MPIAcceptMode', '')
- n_cmd.setAttribute('MPIAcceptFilter', '')
+ environment = ''
+
+ n_cmd = ['DebugSettings',
+ {'Command': abs_command,
+ 'WorkingDirectory': working_directory,
+ 'CommandArguments': " ".join(command[1:]),
+ 'RemoteMachine': socket.gethostname(),
+ 'Environment': environment,
+ 'EnvironmentMerge': 'true',
+ # Currently these are all "dummy" values that we're just setting
+ # in the default manner that MSVS does it. We could use some of
+ # these to add additional capabilities, I suppose, but they might
+ # not have parity with other platforms then.
+ 'Attach': 'false',
+ 'DebuggerType': '3', # 'auto' debugger
+ 'Remote': '1',
+ 'RemoteCommand': '',
+ 'HttpUrl': '',
+ 'PDBPath': '',
+ 'SQLDebugging': '',
+ 'DebuggerFlavor': '0',
+ 'MPIRunCommand': '',
+ 'MPIRunArguments': '',
+ 'MPIRunWorkingDirectory': '',
+ 'ApplicationCommand': '',
+ 'ApplicationArguments': '',
+ 'ShimCommand': '',
+ 'MPIAcceptMode': '',
+ 'MPIAcceptFilter': ''
+ }]
# Find the config, and add it if it doesn't exist.
- found = False
- for config in self.n_configs.childNodes:
- if config.getAttribute("Name") == config_name:
- found = True
-
- if not found:
+ if config_name not in self.configurations:
self.AddConfig(config_name)
# Add the DebugSettings onto the appropriate config.
- for config in self.n_configs.childNodes:
- if config.getAttribute("Name") == config_name:
- config.appendChild(n_cmd)
- break
+ self.configurations[config_name].append(n_cmd)
- def Write(self, writer=common.WriteOnDiff):
+ def WriteIfChanged(self):
"""Writes the user file."""
- f = writer(self.user_file_path)
- self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n')
- f.close()
-
-#------------------------------------------------------------------------------
+ configs = ['Configurations']
+ for config, spec in sorted(self.configurations.iteritems()):
+ configs.append(spec)
+
+ content = ['VisualStudioUserFile',
+ {'Version': self.version.ProjectVersion(),
+ 'Name': self.name
+ },
+ configs]
+ easy_xml.WriteXmlIfChanged(content, self.user_file_path,
+ encoding="Windows-1252")
diff --git a/tools/gyp/pylib/gyp/easy_xml.py b/tools/gyp/pylib/gyp/easy_xml.py
index 98e2923a11a..66241758d07 100644
--- a/tools/gyp/pylib/gyp/easy_xml.py
+++ b/tools/gyp/pylib/gyp/easy_xml.py
@@ -4,20 +4,19 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import xml.dom
-import xml_fix
-import common
+import re
-class EasyXml(object):
- """ Class to easily create XML files with substantial pre-defined structures.
- Visual Studio files have a lot of pre-defined structures. This class makes
+def XmlToString(content, encoding='utf-8', pretty=False):
+ """ Writes the XML content to disk, touching the file only if it has changed.
+
+ Visual Studio files have a lot of pre-defined structures. This function makes
it easy to represent these structures as Python data structures, instead of
having to create a lot of function calls.
- For this class, an XML element is represented as a list composed of:
+ Each XML element of the content is represented as a list composed of:
1. The name of the element, a string,
- 2. The attributes of the element, an dictionary (optional), and
+ 2. The attributes of the element, a dictionary (optional), and
3+. The content of the element, if any. Strings are simple text nodes and
lists are child elements.
@@ -37,85 +36,115 @@ class EasyXml(object):
['childtype', 'This is'],
['childtype', 'it!'],
]
+
+ Args:
+ content: The structured content to be converted.
+ encoding: The encoding to report on the first XML line.
+ pretty: True if we want pretty printing with indents and new lines.
+
+ Returns:
+ The XML content as a string.
"""
+ # We create a huge list of all the elements of the file.
+ xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
+ if pretty:
+ xml_parts.append('\n')
+ _ConstructContentList(xml_parts, content, pretty)
+
+ # Convert it to a string
+ return ''.join(xml_parts)
- def __init__(self, name, attributes=None):
- """ Constructs an object representing an XML document.
-
- Args:
- name: A string, the name of the root element.
- attributes: A dictionary, the attributes of the root.
- """
- xml_impl = xml.dom.getDOMImplementation()
- self.doc = xml_impl.createDocument(None, name, None)
- if attributes:
- self.SetAttributes(self.doc.documentElement, attributes)
-
- def AppendChildren(self, parent, children_specifications):
- """ Appends multiple children.
-
- Args:
- parent: The node to which the children will be added.
- children_specifications: A list of node specifications.
- """
- for specification in children_specifications:
+
+def _ConstructContentList(xml_parts, specification, pretty, level=0):
+ """ Appends the XML parts corresponding to the specification.
+
+ Args:
+ xml_parts: A list of XML parts to be appended to.
+ specification: The specification of the element. See EasyXml docs.
+ pretty: True if we want pretty printing with indents and new lines.
+ level: Indentation level.
+ """
+ # The first item in a specification is the name of the element.
+ if pretty:
+ indentation = ' ' * level
+ new_line = '\n'
+ else:
+ indentation = ''
+ new_line = ''
+ name = specification[0]
+ if not isinstance(name, str):
+ raise Exception('The first item of an EasyXml specification should be '
+ 'a string. Specification was ' + str(specification))
+ xml_parts.append(indentation + '<' + name)
+
+ # Optionally in second position is a dictionary of the attributes.
+ rest = specification[1:]
+ if rest and isinstance(rest[0], dict):
+ for at, val in sorted(rest[0].iteritems()):
+ xml_parts.append(' %s="%s"' % (at, _XmlEscape(val)))
+ rest = rest[1:]
+ if rest:
+ xml_parts.append('>')
+ all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
+ multi_line = not all_strings
+ if multi_line and new_line:
+ xml_parts.append(new_line)
+ for child_spec in rest:
# If it's a string, append a text node.
- # Otherwise append an XML node.
- if isinstance(specification, str):
- parent.appendChild(self.doc.createTextNode(specification))
+ # Otherwise recurse over that child definition
+ if isinstance(child_spec, str):
+ xml_parts.append(_XmlEscape(child_spec))
else:
- self.AppendNode(parent, specification)
-
- def AppendNode(self, parent, specification):
- """ Appends multiple children.
-
- Args:
- parent: The node to which the child will be added.
- children_specifications: A list, the node specification. The first
- entry is the name of the element. If the second entry is a
- dictionary, it is the attributes. The remaining entries of the
- list are the sub-elements.
- Returns:
- The XML element created.
- """
- name = specification[0]
- if not isinstance(name, str):
- raise Exception('The first item of an EasyXml specification should be '
- 'a string. Specification was ' + str(specification))
- element = self.doc.createElement(name)
- parent.appendChild(element)
- rest = specification[1:]
- # The second element is optionally a dictionary of the attributes.
- if rest and isinstance(rest[0], dict):
- self.SetAttributes(element, rest[0])
- rest = rest[1:]
- if rest:
- self.AppendChildren(element, rest)
- return element
-
- def SetAttributes(self, element, attribute_description):
- """ Sets the attributes of an element.
-
- Args:
- element: The node to which the child will be added.
- attribute_description: A dictionary that maps attribute names to
- attribute values.
- """
- for attribute, value in attribute_description.iteritems():
- element.setAttribute(attribute, value)
-
- def Root(self):
- """ Returns the root element. """
- return self.doc.documentElement
-
- def WriteIfChanged(self, path):
- """ Writes the XML doc but don't touch the file if unchanged. """
- f = common.WriteOnDiff(path)
- fix = xml_fix.XmlFix()
- self.doc.writexml(f, encoding='utf-8', addindent='', newl='')
- fix.Cleanup()
+ _ConstructContentList(xml_parts, child_spec, pretty, level + 1)
+ if multi_line and indentation:
+ xml_parts.append(indentation)
+ xml_parts.append('</%s>%s' % (name, new_line))
+ else:
+ xml_parts.append('/>%s' % new_line)
+
+
+def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False):
+ """ Writes the XML content to disk, touching the file only if it has changed.
+
+ Args:
+ content: The structured content to be written.
+ path: Location of the file.
+ encoding: The encoding to report on the first line of the XML file.
+ pretty: True if we want pretty printing with indents and new lines.
+ """
+ xml_string = XmlToString(content, encoding, pretty)
+
+ # Get the old content
+ try:
+ f = open(path, 'r')
+ existing = f.read()
+ f.close()
+ except:
+ existing = None
+
+ # It has changed, write it
+ if existing != xml_string:
+ f = open(path, 'w')
+ f.write(xml_string)
f.close()
- def __str__(self):
- """ Converts the doc to a string. """
- return self.doc.toxml()
+
+_xml_escape_map = {
+ '"': '&quot;',
+ "'": '&apos;',
+ '<': '&lt;',
+ '>': '&gt;',
+ '&': '&amp;',
+ '\n': '&#xA;',
+ '\r': '&#xD;',
+}
+
+
+_xml_escape_re = re.compile(
+ "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
+
+
+def _XmlEscape(value):
+ """ Escape a string for inclusion in XML."""
+ replace = lambda m: _xml_escape_map[m.string[m.start() : m.end()]]
+ return _xml_escape_re.sub(replace, value)
diff --git a/tools/gyp/pylib/gyp/easy_xml_test.py b/tools/gyp/pylib/gyp/easy_xml_test.py
index e34821f0698..a8f32a0cd5d 100644
--- a/tools/gyp/pylib/gyp/easy_xml_test.py
+++ b/tools/gyp/pylib/gyp/easy_xml_test.py
@@ -17,75 +17,85 @@ class TestSequenceFunctions(unittest.TestCase):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
- xml = easy_xml.EasyXml('test')
- self.assertEqual(str(xml), '<?xml version="1.0" ?><test/>')
+ self.assertEqual(
+ easy_xml.XmlToString(['test']),
+ '<?xml version="1.0" encoding="utf-8"?><test/>')
+
+ self.assertEqual(
+ easy_xml.XmlToString(['test'], encoding='Windows-1252'),
+ '<?xml version="1.0" encoding="Windows-1252"?><test/>')
def test_EasyXml_simple_with_attributes(self):
- xml = easy_xml.EasyXml('test2', {'a': 'value1', 'b': 'value2'})
- self.assertEqual(str(xml),
- '<?xml version="1.0" ?><test2 a="value1" b="value2"/>')
+ self.assertEqual(
+ easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
+ '<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
+
+ def test_EasyXml_escaping(self):
+ original = '<test>\'"\r&\nfoo'
+ converted = '&lt;test&gt;&apos;&quot;&#xD;&amp;&#xA;foo'
+ self.assertEqual(
+ easy_xml.XmlToString(['test3', {'a': original}, original]),
+ '<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
+ (converted, converted))
+
+ def test_EasyXml_pretty(self):
+ self.assertEqual(
+ easy_xml.XmlToString(
+ ['test3',
+ ['GrandParent',
+ ['Parent1',
+ ['Child']
+ ],
+ ['Parent2']
+ ]
+ ],
+ pretty=True),
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<test3>\n'
+ ' <GrandParent>\n'
+ ' <Parent1>\n'
+ ' <Child/>\n'
+ ' </Parent1>\n'
+ ' <Parent2/>\n'
+ ' </GrandParent>\n'
+ '</test3>\n')
- def test_EasyXml_add_node(self):
- # We want to create:
- target = ('<?xml version="1.0" ?>'
- '<test3>'
- '<GrandParent>'
- '<Parent1>'
- '<Child/>'
- '</Parent1>'
- '<Parent2/>'
- '</GrandParent>'
- '</test3>')
-
- # Do it the hard way first:
- xml = easy_xml.EasyXml('test3')
- grand_parent = xml.AppendNode(xml.Root(), ['GrandParent'])
- parent1 = xml.AppendNode(grand_parent, ['Parent1'])
- parent2 = xml.AppendNode(grand_parent, ['Parent2'])
- xml.AppendNode(parent1, ['Child'])
- self.assertEqual(str(xml), target)
-
- # Do it the easier way:
- xml = easy_xml.EasyXml('test3')
- xml.AppendNode(xml.Root(),
- ['GrandParent',
- ['Parent1', ['Child']],
- ['Parent2']])
- self.assertEqual(str(xml), target)
def test_EasyXml_complex(self):
# We want to create:
- target = ('<?xml version="1.0" ?>'
- '<Project>'
- '<PropertyGroup Label="Globals">'
- '<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
- '<Keyword>Win32Proj</Keyword>'
- '<RootNamespace>automated_ui_tests</RootNamespace>'
- '</PropertyGroup>'
- '<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
- '<PropertyGroup Condition="\'$(Configuration)|$(Platform)\'==\''
- 'Debug|Win32\'" Label="Configuration">'
- '<ConfigurationType>Application</ConfigurationType>'
- '<CharacterSet>Unicode</CharacterSet>'
- '</PropertyGroup>'
- '</Project>')
-
- xml = easy_xml.EasyXml('Project')
- xml.AppendChildren(xml.Root(), [
- ['PropertyGroup', {'Label': 'Globals'},
+ target = (
+ '<?xml version="1.0" encoding="utf-8"?>'
+ '<Project>'
+ '<PropertyGroup Label="Globals">'
+ '<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
+ '<Keyword>Win32Proj</Keyword>'
+ '<RootNamespace>automated_ui_tests</RootNamespace>'
+ '</PropertyGroup>'
+ '<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
+ '<PropertyGroup '
+ 'Condition="&apos;$(Configuration)|$(Platform)&apos;=='
+ '&apos;Debug|Win32&apos;" Label="Configuration">'
+ '<ConfigurationType>Application</ConfigurationType>'
+ '<CharacterSet>Unicode</CharacterSet>'
+ '</PropertyGroup>'
+ '</Project>')
+
+ xml = easy_xml.XmlToString(
+ ['Project',
+ ['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
- ],
- ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
- ['PropertyGroup',
+ ],
+ ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
+ ['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
- 'Label': 'Configuration'},
+ 'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
- ]
- ])
- self.assertEqual(str(xml), target)
+ ]
+ ])
+ self.assertEqual(xml, target)
if __name__ == '__main__':
diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py
index 7109cb0980d..4ed6b6eaa9b 100644
--- a/tools/gyp/pylib/gyp/generator/make.py
+++ b/tools/gyp/pylib/gyp/generator/make.py
@@ -368,7 +368,7 @@ command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
# so we can check their command lines.
# $? -- new prerequisites
# $| -- order-only dependencies
-prereq_changed = $(filter-out $|,$?)
+prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
# do_cmd: run a command via the above cmd_foo names, if necessary.
# Should always run for a given target to handle command-line changes.
@@ -454,90 +454,11 @@ SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
# Suffix rules, putting all outputs into $(obj).
""")
-SHARED_HEADER_SUFFIX_RULES_SRCDIR = {
- '.c': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-"""),
- '.s': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-"""),
- '.S': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-"""),
- '.cpp': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-"""),
- '.cc': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-"""),
- '.cxx': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-"""),
- '.m': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-"""),
- '.mm': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-"""),
-}
SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
# Try building from generated source, too.
""")
-SHARED_HEADER_SUFFIX_RULES_OBJDIR1 = {
- '.c': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-"""),
- '.cc': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-"""),
- '.cpp': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-"""),
- '.m': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-"""),
- '.mm': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-"""),
-}
-
-SHARED_HEADER_SUFFIX_RULES_OBJDIR2 = {
- '.c': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.c FORCE_DO_CMD
- @$(call do_cmd,cc,1)
-"""),
- '.cc': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-"""),
- '.cpp': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
- @$(call do_cmd,cxx,1)
-"""),
- '.m': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.m FORCE_DO_CMD
- @$(call do_cmd,objc,1)
-"""),
- '.mm': ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.mm FORCE_DO_CMD
- @$(call do_cmd,objcxx,1)
-"""),
-}
SHARED_FOOTER = """\
# "all" is a concatenation of the "all" targets from all the included
@@ -700,7 +621,7 @@ class XcodeSettings(object):
assert self._IsBundle()
return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
- def _GetBundleExtension(self):
+ def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles."""
assert self._IsBundle()
@@ -714,12 +635,15 @@ class XcodeSettings(object):
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
- def GetBundleName(self):
+ def GetProductName(self):
+ """Returns PRODUCT_NAME."""
+ return self.spec.get('product_name', self.spec['target_name'])
+
+ def GetWrapperName(self):
"""Returns the directory name of the bundle represented by this target.
Only valid for bundles."""
assert self._IsBundle()
- return self.spec.get('product_name',
- self.spec['target_name']) + self._GetBundleExtension()
+ return self.GetProductName() + self.GetWrapperExtension()
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
@@ -727,10 +651,10 @@ class XcodeSettings(object):
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
- self.GetBundleName(), 'Versions', self.GetFrameworkVersion())
+ self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
else:
# loadable_modules have a 'Contents' folder like executables.
- return os.path.join(self.GetBundleName(), 'Contents')
+ return os.path.join(self.GetWrapperName(), 'Contents')
def GetBundleResourceFolder(self):
"""Returns the qualified path to the bundle's resource folder. E.g.
@@ -826,10 +750,8 @@ class XcodeSettings(object):
self._WarnUnimplemented('ARCHS')
self._WarnUnimplemented('COPY_PHASE_STRIP')
self._WarnUnimplemented('DEPLOYMENT_POSTPROCESSING')
- self._WarnUnimplemented('DYLIB_INSTALL_NAME_BASE')
self._WarnUnimplemented('INFOPLIST_PREPROCESS')
self._WarnUnimplemented('INFOPLIST_PREPROCESSOR_DEFINITIONS')
- self._WarnUnimplemented('LD_DYLIB_INSTALL_NAME')
self._WarnUnimplemented('STRIPFLAGS')
self._WarnUnimplemented('STRIP_INSTALLED_PRODUCT')
@@ -931,6 +853,39 @@ class XcodeSettings(object):
ldflags.append('-L' + generator_default_variables['LIB_DIR'])
ldflags.append('-L' + generator_default_variables['PRODUCT_DIR'])
+ install_name = self.GetPerTargetSetting('LD_DYLIB_INSTALL_NAME')
+ if install_name:
+ # Hardcode support for the variables used in chromium for now, to unblock
+ # people using the make build.
+ if '$' in install_name:
+ assert install_name == ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
+ '$(WRAPPER_NAME)/$(PRODUCT_NAME)'), (
+ 'Variables in LD_DYLIB_INSTALL_NAME are not generally supported yet'
+ ' in target \'%s\' (got \'%s\')' %
+ (self.spec['target_name'], install_name))
+ install_base = self.GetPerTargetSetting('DYLIB_INSTALL_NAME_BASE')
+ # I'm not quite sure what :standardizepath does. Just call normpath(),
+ # but don't let @executable_path/../foo collapse to foo
+ prefix, rest = '', install_base
+ if install_base.startswith('@'):
+ prefix, rest = install_base.split('/', 1)
+ rest = os.path.normpath(rest) # :standardizepath
+ install_base = os.path.join(prefix, rest)
+
+ install_name = install_name.replace(
+ '$(DYLIB_INSTALL_NAME_BASE:standardizepath)', install_base)
+ install_name = install_name.replace(
+ '$(WRAPPER_NAME)', self.GetWrapperName())
+ install_name = install_name.replace(
+ '$(PRODUCT_NAME)', self.GetProductName())
+
+ install_name = QuoteSpaces(install_name)
+ ldflags.append('-install_name ' + install_name)
+ elif self.GetPerTargetSetting('DYLIB_INSTALL_NAME_BASE'):
+ # LD_DYLIB_INSTALL_NAME defaults to
+ # $(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH).
+ print 'Warning: DYLIB_INSTALL_NAME_BASE is not fully implemented.'
+
self.configname = None
return ldflags
@@ -946,7 +901,7 @@ class XcodeSettings(object):
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
- "(target %s" % (setting, spec['target_name']))
+ "(target %s)" % (setting, spec['target_name']))
if result is None:
return default
return result
@@ -1057,6 +1012,28 @@ class MakefileWriter:
# Keep track of the total number of outputs for this makefile.
self._num_outputs = 0
+ self.suffix_rules_srcdir = {}
+ self.suffix_rules_objdir1 = {}
+ self.suffix_rules_objdir2 = {}
+
+ # Generate suffix rules for all compilable extensions.
+ for ext in COMPILABLE_EXTENSIONS.keys():
+ # Suffix rules for source folder.
+ self.suffix_rules_srcdir.update({ext: ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
+ @$(call do_cmd,%s,1)
+""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
+
+ # Suffix rules for generated source files.
+ self.suffix_rules_objdir1.update({ext: ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
+ @$(call do_cmd,%s,1)
+""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
+ self.suffix_rules_objdir2.update({ext: ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
+ @$(call do_cmd,%s,1)
+""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
+
def NumOutputs(self):
return self._num_outputs
@@ -1165,15 +1142,15 @@ class MakefileWriter:
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
extensions = set([os.path.splitext(s)[1] for s in sources])
for ext in extensions:
- if ext in SHARED_HEADER_SUFFIX_RULES_SRCDIR:
- self.WriteLn(SHARED_HEADER_SUFFIX_RULES_SRCDIR[ext])
+ if ext in self.suffix_rules_srcdir:
+ self.WriteLn(self.suffix_rules_srcdir[ext])
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
for ext in extensions:
- if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR1:
- self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR1[ext])
+ if ext in self.suffix_rules_objdir1:
+ self.WriteLn(self.suffix_rules_objdir1[ext])
for ext in extensions:
- if ext in SHARED_HEADER_SUFFIX_RULES_OBJDIR2:
- self.WriteLn(SHARED_HEADER_SUFFIX_RULES_OBJDIR2[ext])
+ if ext in self.suffix_rules_objdir2:
+ self.WriteLn(self.suffix_rules_objdir2[ext])
self.WriteLn('# End of this set of suffix rules')
# Add dependency from bundle to bundle binary.
@@ -1672,7 +1649,7 @@ class MakefileWriter:
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = generator_default_variables['PRODUCT_DIR']
- return os.path.join(path, self.xcode_settings.GetBundleName())
+ return os.path.join(path, self.xcode_settings.GetWrapperName())
def ComputeMacBundleBinaryOutput(self, spec):
@@ -1735,7 +1712,12 @@ class MakefileWriter:
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(self, configname)
else:
- ldflags = config.get('ldflags')
+ ldflags = config.get('ldflags', [])
+ # Compute an rpath for this output if needed.
+ if any(dep.endswith('.so') for dep in deps):
+ # We want to get the literal string "$ORIGIN" into the link command,
+ # so we need lots of escaping.
+ ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
libraries = spec.get('libraries')
if libraries:
diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py
index d93a1d861e0..c4893b47587 100644
--- a/tools/gyp/pylib/gyp/generator/msvs.py
+++ b/tools/gyp/pylib/gyp/generator/msvs.py
@@ -56,7 +56,7 @@ generator_default_variables = {
# of the warnings.
# TODO(jeanluc) I had: 'LIB_DIR': '$(OutDir)lib',
- #'LIB_DIR': '$(OutDir)/lib',
+ 'LIB_DIR': '$(OutDir)/lib',
'RULE_INPUT_ROOT': '$(InputName)',
'RULE_INPUT_EXT': '$(InputExt)',
'RULE_INPUT_NAME': '$(InputFileName)',
@@ -480,8 +480,8 @@ def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
"""
rules_filename = '%s%s.rules' % (spec['target_name'],
options.suffix)
- rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename))
- rules_file.Create(spec['target_name'])
+ rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename),
+ spec['target_name'])
# Add each rule.
for r in rules:
rule_name = r['rule_name']
@@ -496,7 +496,7 @@ def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
outputs=outputs,
cmd=cmd)
# Write out rules file.
- rules_file.Write()
+ rules_file.WriteIfChanged()
# Add rules file to project.
p.AddToolFile(rules_filename)
@@ -575,18 +575,7 @@ def _GenerateExternalRules(rules, output_dir, spec,
'IntDir=$(IntDir)',
'-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
'-f', filename]
-
- # Currently this weird argument munging is used to duplicate the way a
- # python script would need to be run as part of the chrome tree.
- # Eventually we should add some sort of rule_default option to set this
- # per project. For now the behavior chrome needs is the default.
- mcs = rule.get('msvs_cygwin_shell')
- if mcs is None:
- mcs = int(spec.get('msvs_cygwin_shell', 1))
- elif isinstance(mcs, str):
- mcs = int(mcs)
- quote_cmd = int(rule.get('msvs_quote_cmd', 1))
- cmd = _BuildCommandLineForRuleRaw(spec, cmd, mcs, False, quote_cmd)
+ cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True)
# Insert makefile as 0'th input, so it gets the action attached there,
# as this is easier to understand from in the IDE.
all_inputs = list(all_inputs)
@@ -862,8 +851,8 @@ def _GenerateMSVSProject(project, options, version):
os.makedirs(vcproj_dir)
platforms = _GetUniquePlatforms(spec)
- p = MSVSProject.Writer(project.path, version=version)
- p.Create(spec['target_name'], guid=project.guid, platforms=platforms)
+ p = MSVSProject.Writer(project.path, version, spec['target_name'],
+ project.guid, platforms)
# Get directory project file is in.
gyp_dir = os.path.split(project.path)[0]
@@ -889,6 +878,7 @@ def _GenerateMSVSProject(project, options, version):
spec, options, gyp_dir, sources, excluded_sources))
# Add in files.
+ # _VerifySourcesExist(sources, gyp_dir)
p.AddFiles(sources)
_AddToolFilesToMSVS(p, spec)
@@ -905,7 +895,7 @@ def _GenerateMSVSProject(project, options, version):
_AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
# Write it out.
- p.Write()
+ p.WriteIfChanged()
def _GetUniquePlatforms(spec):
@@ -937,8 +927,8 @@ def _CreateMSVSUserFile(proj_path, version, spec):
"""
(domain, username) = _GetDomainAndUserName()
vcuser_filename = '.'.join([proj_path, domain, username, 'user'])
- user_file = MSVSUserFile.Writer(vcuser_filename, version=version)
- user_file.Create(spec['target_name'])
+ user_file = MSVSUserFile.Writer(vcuser_filename, version,
+ spec['target_name'])
return user_file
@@ -1106,7 +1096,7 @@ def _GetOutputFilePathAndTool(spec):
# TODO(jeanluc) If we want to avoid the MSB8012 warnings in
# VisualStudio 2010, we will have to change the value of $(OutDir)
# to contain the \lib suffix, rather than doing it as below.
- 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)\\', '.lib'),
+ 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)\\lib\\', '.lib'),
'dummy_executable': ('VCLinkerTool', 'Link', '$(IntDir)\\', '.junk'),
}
output_file_props = output_file_map.get(spec['type'])
@@ -1429,7 +1419,7 @@ def _WriteMSVSUserFile(project_path, version, spec):
for config_name, c_data in spec['configurations'].iteritems():
user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
action, environment, working_directory)
- user_file.Write()
+ user_file.WriteIfChanged()
def _AddCopies(actions_to_add, spec):
@@ -1538,11 +1528,8 @@ def _GetPathOfProject(qualified_target, spec, options, msvs_version):
if options.generator_output:
project_dir_path = os.path.dirname(os.path.abspath(proj_path))
proj_path = os.path.join(options.generator_output, proj_path)
- if options.msvs_abspath_output:
- fix_prefix = project_dir_path
- else:
- fix_prefix = gyp.common.RelativePath(project_dir_path,
- os.path.dirname(proj_path))
+ fix_prefix = gyp.common.RelativePath(project_dir_path,
+ os.path.dirname(proj_path))
return proj_path, fix_prefix
@@ -1613,14 +1600,6 @@ def CalculateVariables(default_variables, params):
# Stash msvs_version for later (so we don't have to probe the system twice).
params['msvs_version'] = msvs_version
- # The generation of Visual Studio vcproj files currently calculates the
- # relative path of some files more than once, which can cause errors depending
- # on the directory within which gyp is run. With this option, we output
- # these as absolute paths instead and are thus immune from that problem.
- # See http://code.google.com/p/gyp/issues/detail?id=201
- params['msvs_abspath_output'] = generator_flags.get(
- 'msvs_abspath_output', False)
-
# Set a variable so conditions can be based on msvs_version.
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
@@ -1720,7 +1699,6 @@ def GenerateOutput(target_list, target_dicts, data, params):
# Get the project file format version back out of where we stashed it in
# GeneratorCalculatedVariables.
msvs_version = params['msvs_version']
- options.msvs_abspath_output = params['msvs_abspath_output']
# Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
(target_list, target_dicts) = _ShardTargets(target_list, target_dicts)
@@ -1782,15 +1760,14 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files,
_AppendFiltersForMSBuild('', source_files, extension_to_rule_name,
filter_group, source_group)
if filter_group:
- doc = easy_xml.EasyXml(
- 'Project',
- {'ToolsVersion': '4.0',
- 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
- root = doc.Root()
- doc.AppendChildren(root, [
- ['ItemGroup'] + filter_group,
- ['ItemGroup'] + source_group])
- doc.WriteIfChanged(filters_path)
+ content = ['Project',
+ {'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
+ },
+ ['ItemGroup'] + filter_group,
+ ['ItemGroup'] + source_group
+ ]
+ easy_xml.WriteXmlIfChanged(content, filters_path)
elif os.path.exists(filters_path):
# We don't need this filter anymore. Delete the old filter file.
os.unlink(filters_path)
@@ -1958,12 +1935,10 @@ class MSBuildRule(object):
def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
"""Generate the .props file."""
- doc = easy_xml.EasyXml(
- 'Project',
- {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
- root = doc.Root()
+ content = ['Project',
+ {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}]
for rule in msbuild_rules:
- doc.AppendChildren(root, [
+ content.extend([
['PropertyGroup',
{'Condition': "'$(%s)' == '' and '$(%s)' == '' and "
"'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets,
@@ -1987,32 +1962,31 @@ def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
],
]
])
- doc.WriteIfChanged(props_path)
+ easy_xml.WriteXmlIfChanged(content, props_path)
def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
"""Generate the .targets file."""
- doc = easy_xml.EasyXml(
- 'Project',
- {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
- root = doc.Root()
- item_group = doc.AppendNode(
- root,
- ['ItemGroup',
- ['PropertyPageSchema',
- {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'}
- ],
- ])
+ content = ['Project',
+ {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
+ }
+ ]
+ item_group = [
+ 'ItemGroup',
+ ['PropertyPageSchema',
+ {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'}
+ ]
+ ]
for rule in msbuild_rules:
- doc.AppendNode(
- item_group,
+ item_group.append(
['AvailableItemName',
{'Include': rule.rule_name},
['Targets', rule.target_name],
])
+ content.append(item_group)
+
for rule in msbuild_rules:
- doc.AppendNode(
- root,
+ content.append(
['UsingTask',
{'TaskName': rule.rule_name,
'TaskFactory': 'XamlTaskFactory',
@@ -2074,7 +2048,7 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
'Inputs': rule_inputs
}
]
- doc.AppendChildren(root, [
+ content.extend([
['Target',
{'Name': rule.target_name,
'BeforeTargets': '$(%s)' % rule.before_targets,
@@ -2135,22 +2109,23 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
]
],
])
- doc.WriteIfChanged(targets_path)
+ easy_xml.WriteXmlIfChanged(content, targets_path)
def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
# Generate the .xml file
- doc = easy_xml.EasyXml(
+ content = [
'ProjectSchemaDefinitions',
{'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;'
'assembly=Microsoft.Build.Framework'),
'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml',
'xmlns:sys': 'clr-namespace:System;assembly=mscorlib',
'xmlns:transformCallback':
- 'Microsoft.Cpp.Dev10.ConvertPropertyCallback'})
- root = doc.Root()
+ 'Microsoft.Cpp.Dev10.ConvertPropertyCallback'
+ }
+ ]
for rule in msbuild_rules:
- doc.AppendChildren(root, [
+ content.extend([
['Rule',
{'Name': rule.rule_name,
'PageTemplate': 'tool',
@@ -2312,7 +2287,7 @@ def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
}
]
])
- doc.WriteIfChanged(xml_path)
+ easy_xml.WriteXmlIfChanged(content, xml_path)
def _GetConfigurationAndPlatform(name, settings):
@@ -2597,9 +2572,6 @@ def _GetValueFormattedForMSBuild(tool_name, name, value):
'DisableSpecificWarnings',
'PreprocessorDefinitions']:
value.append('%%(%s)' % name)
- # TODO(jeanluc) Not all of them need to be fixed, why?
- if name in ['AdditionalIncludeDirectories', 'AdditionalLibraryDirectories']:
- value = _FixPaths(value)
# For most tools, entries in a list should be separated with ';' but some
# settings use a space. Check for those first.
exceptions = {
@@ -2617,15 +2589,35 @@ def _GetValueFormattedForMSBuild(tool_name, name, value):
return formatted_value
-def _GetMSBuildSources(spec, root_dir, sources, exclusions,
- extension_to_rule_name, actions_spec,
- sources_handled_by_action):
+def _VerifySourcesExist(sources, root_dir):
+ """Verifies that all source files exist on disk.
+
+ Checks that all regular source files, i.e. not created at run time,
+ exist on disk. Missing files cause needless recompilation but no otherwise
+ visible errors.
+
+ Arguments:
+ sources: A recursive list of Filter/file names.
+ root_dir: The root directory for the relative path names.
+ """
+ for source in sources:
+ if isinstance(source, MSVSProject.Filter):
+ _VerifySourcesExist(source.contents, root_dir)
+ else:
+ if '$' not in source:
+ full_path = os.path.join(root_dir, source)
+ if not os.path.exists(full_path):
+ print 'Error: Missing input file ' + full_path
+
+
+def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name,
+ actions_spec, sources_handled_by_action):
groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule']
grouped_sources = {}
for g in groups:
grouped_sources[g] = []
- _AddSources2(spec, root_dir, sources, exclusions, grouped_sources,
+ _AddSources2(spec, sources, exclusions, grouped_sources,
extension_to_rule_name, sources_handled_by_action)
sources = []
for g in groups:
@@ -2636,20 +2628,13 @@ def _GetMSBuildSources(spec, root_dir, sources, exclusions,
return sources
-def _AddSources2(spec, root_dir, sources, exclusions, grouped_sources,
+def _AddSources2(spec, sources, exclusions, grouped_sources,
extension_to_rule_name, sources_handled_by_action):
for source in sources:
if isinstance(source, MSVSProject.Filter):
- _AddSources2(spec, root_dir, source.contents, exclusions, grouped_sources,
+ _AddSources2(spec, source.contents, exclusions, grouped_sources,
extension_to_rule_name, sources_handled_by_action)
else:
- # If it is a regular source file, i.e. not created at run time,
- # warn if it does not exists. Missing header files will cause needless
- # recompilation but no otherwise visible errors.
- if '$' not in source:
- full_path = os.path.join(root_dir, source)
- if not os.path.exists(full_path):
- print 'Warning: Missing input file ' + full_path
if not source in sources_handled_by_action:
detail = []
excluded_configurations = exclusions.get(source, [])
@@ -2736,19 +2721,13 @@ def _GenerateMSBuildProject(project, options, version):
_GenerateMSBuildFiltersFile(project.path + '.filters', sources,
extension_to_rule_name)
+ # _VerifySourcesExist(sources, gyp_dir)
for (_, configuration) in configurations.iteritems():
_FinalizeMSBuildSettings(spec, configuration)
# Add attributes to root element
- doc = easy_xml.EasyXml(
- 'Project',
- {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003',
- 'ToolsVersion': version.ProjectVersion(),
- 'DefaultTargets': 'Build'
- })
-
import_default_section = [
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]]
import_cpp_props_section = [
@@ -2757,7 +2736,14 @@ def _GenerateMSBuildProject(project, options, version):
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
- content = _GetMSBuildProjectConfigurations(configurations)
+ content = [
+ 'Project',
+ {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003',
+ 'ToolsVersion': version.ProjectVersion(),
+ 'DefaultTargets': 'Build'
+ }]
+
+ content += _GetMSBuildProjectConfigurations(configurations)
content += _GetMSBuildGlobalProperties(spec, project.guid, gyp_file_name)
content += import_default_section
content += _GetMSBuildConfigurationDetails(spec, project.build_file)
@@ -2769,7 +2755,7 @@ def _GenerateMSBuildProject(project, options, version):
project.build_file)
content += _GetMSBuildToolSettingsSections(spec, configurations)
content += _GetMSBuildSources(
- spec, gyp_dir, sources, exclusions, extension_to_rule_name, actions_spec,
+ spec, sources, exclusions, extension_to_rule_name, actions_spec,
sources_handled_by_action)
content += _GetMSBuildProjectReferences(project)
content += import_cpp_targets_section
@@ -2778,8 +2764,7 @@ def _GenerateMSBuildProject(project, options, version):
# TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
# has_run_as = _WriteMSVSUserFile(project.path, version, spec)
- doc.AppendChildren(doc.Root(), content)
- doc.WriteIfChanged(project.path)
+ easy_xml.WriteXmlIfChanged(content, project.path)
def _GetMSBuildExtensions(props_files_of_rules):
diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py
index 8a44d4e99d4..314b5c66de3 100644
--- a/tools/gyp/pylib/gyp/input.py
+++ b/tools/gyp/pylib/gyp/input.py
@@ -413,7 +413,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
index = index + 1
else:
raise Exception, \
- "Unable to find targets in build file %s" % os.path.abspath(build_file_path)
+ "Unable to find targets in build file %s" % build_file_path
# No longer needed.
del build_file_data['target_defaults']
diff --git a/tools/gyp/samples/samples b/tools/gyp/samples/samples
new file mode 100755
index 00000000000..804b6189987
--- /dev/null
+++ b/tools/gyp/samples/samples
@@ -0,0 +1,81 @@
+#!/usr/bin/python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os.path
+import shutil
+import sys
+
+
+gyps = [
+ 'app/app.gyp',
+ 'base/base.gyp',
+ 'build/temp_gyp/googleurl.gyp',
+ 'build/all.gyp',
+ 'build/common.gypi',
+ 'build/external_code.gypi',
+ 'chrome/test/security_tests/security_tests.gyp',
+ 'chrome/third_party/hunspell/hunspell.gyp',
+ 'chrome/chrome.gyp',
+ 'media/media.gyp',
+ 'net/net.gyp',
+ 'printing/printing.gyp',
+ 'sdch/sdch.gyp',
+ 'skia/skia.gyp',
+ 'testing/gmock.gyp',
+ 'testing/gtest.gyp',
+ 'third_party/bzip2/bzip2.gyp',
+ 'third_party/icu38/icu38.gyp',
+ 'third_party/libevent/libevent.gyp',
+ 'third_party/libjpeg/libjpeg.gyp',
+ 'third_party/libpng/libpng.gyp',
+ 'third_party/libxml/libxml.gyp',
+ 'third_party/libxslt/libxslt.gyp',
+ 'third_party/lzma_sdk/lzma_sdk.gyp',
+ 'third_party/modp_b64/modp_b64.gyp',
+ 'third_party/npapi/npapi.gyp',
+ 'third_party/sqlite/sqlite.gyp',
+ 'third_party/zlib/zlib.gyp',
+ 'v8/tools/gyp/v8.gyp',
+ 'webkit/activex_shim/activex_shim.gyp',
+ 'webkit/activex_shim_dll/activex_shim_dll.gyp',
+ 'webkit/build/action_csspropertynames.py',
+ 'webkit/build/action_cssvaluekeywords.py',
+ 'webkit/build/action_jsconfig.py',
+ 'webkit/build/action_makenames.py',
+ 'webkit/build/action_maketokenizer.py',
+ 'webkit/build/action_useragentstylesheets.py',
+ 'webkit/build/rule_binding.py',
+ 'webkit/build/rule_bison.py',
+ 'webkit/build/rule_gperf.py',
+ 'webkit/tools/test_shell/test_shell.gyp',
+ 'webkit/webkit.gyp',
+]
+
+
+def Main(argv):
+ if len(argv) != 3 or argv[1] not in ['push', 'pull']:
+ print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0]
+ return 1
+
+ path_to_chrome = argv[2]
+
+ for g in gyps:
+ chrome_file = os.path.join(path_to_chrome, g)
+ local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1])
+ if argv[1] == 'push':
+ print 'Copying %s to %s' % (local_file, chrome_file)
+ shutil.copyfile(local_file, chrome_file)
+ elif argv[1] == 'pull':
+ print 'Copying %s to %s' % (chrome_file, local_file)
+ shutil.copyfile(chrome_file, local_file)
+ else:
+ assert False
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv))
diff --git a/tools/gyp/samples/samples.bat b/tools/gyp/samples/samples.bat
new file mode 100644
index 00000000000..778d9c90f06
--- /dev/null
+++ b/tools/gyp/samples/samples.bat
@@ -0,0 +1,5 @@
+@rem Copyright (c) 2009 Google Inc. All rights reserved.
+@rem Use of this source code is governed by a BSD-style license that can be
+@rem found in the LICENSE file.
+
+@python %~dp0/samples %*
diff --git a/tools/gyp/tools/README b/tools/gyp/tools/README
new file mode 100644
index 00000000000..712e4efbb7a
--- /dev/null
+++ b/tools/gyp/tools/README
@@ -0,0 +1,15 @@
+pretty_vcproj:
+ Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2]
+
+ They key/value pair are used to resolve vsprops name.
+
+ For example, if I want to diff the base.vcproj project:
+
+ pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt
+ pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
+
+ And you can use your favorite diff tool to see the changes.
+
+ Note: In the case of base.vcproj, the original vcproj is one level up the generated one.
+ I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt
+ before you perform the diff. \ No newline at end of file
diff --git a/tools/gyp/tools/graphviz.py b/tools/gyp/tools/graphviz.py
new file mode 100755
index 00000000000..7f7166802b7
--- /dev/null
+++ b/tools/gyp/tools/graphviz.py
@@ -0,0 +1,95 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Using the JSON dumped by the dump-dependency-json generator,
+generate input suitable for graphviz to render a dependency graph of
+targets."""
+
+import collections
+import json
+import sys
+
+
+def ParseTarget(target):
+ target, _, suffix = target.partition('#')
+ filename, _, target = target.partition(':')
+ return filename, target, suffix
+
+
+def LoadEdges(filename, targets):
+ """Load the edges map from the dump file, and filter it to only
+ show targets in |targets| and their depedendents."""
+
+ file = open('dump.json')
+ edges = json.load(file)
+ file.close()
+
+ # Copy out only the edges we're interested in from the full edge list.
+ target_edges = {}
+ to_visit = targets[:]
+ while to_visit:
+ src = to_visit.pop()
+ if src in target_edges:
+ continue
+ target_edges[src] = edges[src]
+ to_visit.extend(edges[src])
+
+ return target_edges
+
+
+def WriteGraph(edges):
+ """Print a graphviz graph to stdout.
+ |edges| is a map of target to a list of other targets it depends on."""
+
+ # Bucket targets by file.
+ files = collections.defaultdict(list)
+ for src, dst in edges.items():
+ build_file, target_name, toolset = ParseTarget(src)
+ files[build_file].append(src)
+
+ print 'digraph D {'
+ print ' fontsize=8' # Used by subgraphs.
+ print ' node [fontsize=8]'
+
+ # Output nodes by file. We must first write out each node within
+ # its file grouping before writing out any edges that may refer
+ # to those nodes.
+ for filename, targets in files.items():
+ if len(targets) == 1:
+ # If there's only one node for this file, simplify
+ # the display by making it a box without an internal node.
+ target = targets[0]
+ build_file, target_name, toolset = ParseTarget(target)
+ print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
+ target_name)
+ else:
+ # Group multiple nodes together in a subgraph.
+ print ' subgraph "cluster_%s" {' % filename
+ print ' label = "%s"' % filename
+ for target in targets:
+ build_file, target_name, toolset = ParseTarget(target)
+ print ' "%s" [label="%s"]' % (target, target_name)
+ print ' }'
+
+ # Now that we've placed all the nodes within subgraphs, output all
+ # the edges between nodes.
+ for src, dsts in edges.items():
+ for dst in dsts:
+ print ' "%s" -> "%s"' % (src, dst)
+
+ print '}'
+
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print >>sys.stderr, __doc__
+ print >>sys.stderr
+ print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0])
+ sys.exit(1)
+
+ edges = LoadEdges('dump.json', sys.argv[1:])
+
+ WriteGraph(edges)
diff --git a/tools/gyp/tools/pretty_gyp.py b/tools/gyp/tools/pretty_gyp.py
new file mode 100644
index 00000000000..04c79012ee1
--- /dev/null
+++ b/tools/gyp/tools/pretty_gyp.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file pretty-prints the contents of a GYP file.
+
+import sys
+import re
+
+input = []
+if len(sys.argv) > 1:
+ input_file = open(sys.argv[1])
+ input = input_file.read().splitlines()
+ input_file.close()
+else:
+ input = sys.stdin.read().splitlines()
+
+# This is used to remove comments when we're counting braces.
+comment_re = re.compile(r'\s*#.*')
+
+# This is used to remove quoted strings when we're counting braces.
+# It takes into account quoted quotes, and makes sure that the quotes
+# match.
+# NOTE: It does not handle quotes that span more than one line, or
+# cases where an escaped quote is preceeded by an escaped backslash.
+quote_re_str = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
+quote_re = re.compile(quote_re_str)
+
+def comment_replace(matchobj):
+ return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
+
+def mask_comments(input):
+ # This is used to mask the quoted strings so we skip braces inside
+ # quoted strings.
+ search_re = re.compile(r'(.*?)(#)(.*)')
+ return [search_re.sub(comment_replace, line) for line in input]
+
+def quote_replace(matchobj):
+ return "%s%s%s%s" % (matchobj.group(1),
+ matchobj.group(2),
+ 'x'*len(matchobj.group(3)),
+ matchobj.group(2))
+
+def mask_quotes(input):
+ # This is used to mask the quoted strings so we skip braces inside
+ # quoted strings.
+ search_re = re.compile(r'(.*?)' + quote_re_str)
+ return [search_re.sub(quote_replace, line) for line in input]
+
+def do_split(input, masked_input, search_re):
+ output = []
+ mask_output = []
+ for (line, masked_line) in zip(input, masked_input):
+ m = search_re.match(masked_line)
+ while m:
+ split = len(m.group(1))
+ line = line[:split] + r'\n' + line[split:]
+ masked_line = masked_line[:split] + r'\n' + masked_line[split:]
+ m = search_re.match(masked_line)
+ output.extend(line.split(r'\n'))
+ mask_output.extend(masked_line.split(r'\n'))
+ return (output, mask_output)
+
+# This masks out the quotes and comments, and then splits appropriate
+# lines (lines that matche the double_*_brace re's above) before
+# indenting them below.
+def split_double_braces(input):
+ # These are used to split lines which have multiple braces on them, so
+ # that the indentation looks prettier when all laid out (e.g. closing
+ # braces make a nice diagonal line).
+ double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
+ double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
+
+ masked_input = mask_quotes(input)
+ masked_input = mask_comments(masked_input)
+
+ (output, mask_output) = do_split(input, masked_input, double_open_brace_re)
+ (output, mask_output) = do_split(output, mask_output, double_close_brace_re)
+
+ return output
+
+# This keeps track of the number of braces on a given line and returns
+# the result. It starts at zero and subtracts for closed braces, and
+# adds for open braces.
+def count_braces(line):
+ open_braces = ['[', '(', '{']
+ close_braces = [']', ')', '}']
+ closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
+ cnt = 0
+ stripline = comment_re.sub(r'', line)
+ stripline = quote_re.sub(r"''", stripline)
+ for char in stripline:
+ for brace in open_braces:
+ if char == brace:
+ cnt += 1
+ for brace in close_braces:
+ if char == brace:
+ cnt -= 1
+
+ after = False
+ if cnt > 0:
+ after = True
+
+ # This catches the special case of a closing brace having something
+ # other than just whitespace ahead of it -- we don't want to
+ # unindent that until after this line is printed so it stays with
+ # the previous indentation level.
+ if cnt < 0 and closing_prefix_re.match(stripline):
+ after = True
+ return (cnt, after)
+
+# This does the main work of indenting the input based on the brace counts.
+def prettyprint_input(lines):
+ indent = 0
+ basic_offset = 2
+ last_line = ""
+ for line in lines:
+ if comment_re.match(line):
+ print line
+ else:
+ line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
+ if len(line) > 0:
+ (brace_diff, after) = count_braces(line)
+ if brace_diff != 0:
+ if after:
+ print " " * (basic_offset * indent) + line
+ indent += brace_diff
+ else:
+ indent += brace_diff
+ print " " * (basic_offset * indent) + line
+ else:
+ print " " * (basic_offset * indent) + line
+ else:
+ print ""
+ last_line = line
+
+# Split up the double braces.
+lines = split_double_braces(input)
+
+# Indent and print the output.
+prettyprint_input(lines)
diff --git a/tools/gyp/tools/pretty_sln.py b/tools/gyp/tools/pretty_sln.py
new file mode 100755
index 00000000000..0741fff1776
--- /dev/null
+++ b/tools/gyp/tools/pretty_sln.py
@@ -0,0 +1,167 @@
+#!/usr/bin/python2.5
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints the information in a sln file in a diffable way.
+
+ It first outputs each projects in alphabetical order with their
+ dependencies.
+
+ Then it outputs a possible build order.
+"""
+
+__author__ = 'nsylvain (Nicolas Sylvain)'
+
+import os
+import re
+import sys
+import pretty_vcproj
+
+def BuildProject(project, built, projects, deps):
+ # if all dependencies are done, we can build it, otherwise we try to build the
+ # dependency.
+ # This is not infinite-recursion proof.
+ for dep in deps[project]:
+ if dep not in built:
+ BuildProject(dep, built, projects, deps)
+ print project
+ built.append(project)
+
+def ParseSolution(solution_file):
+ # All projects, their clsid and paths.
+ projects = dict()
+
+ # A list of dependencies associated with a project.
+ dependencies = dict()
+
+ # Regular expressions that matches the SLN format.
+ # The first line of a project definition.
+ begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
+ '}"\) = "(.*)", "(.*)", "(.*)"$'))
+ # The last line of a project definition.
+ end_project = re.compile('^EndProject$')
+ # The first line of a dependency list.
+ begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$')
+ # The last line of a dependency list.
+ end_dep = re.compile('EndProjectSection$')
+ # A line describing a dependency.
+ dep_line = re.compile(' *({.*}) = ({.*})$')
+
+ in_deps = False
+ solution = open(solution_file)
+ for line in solution:
+ results = begin_project.search(line)
+ if results:
+ # Hack to remove icu because the diff is too different.
+ if results.group(1).find('icu') != -1:
+ continue
+ # We remove "_gyp" from the names because it helps to diff them.
+ current_project = results.group(1).replace('_gyp', '')
+ projects[current_project] = [results.group(2).replace('_gyp', ''),
+ results.group(3),
+ results.group(2)]
+ dependencies[current_project] = []
+ continue
+
+ results = end_project.search(line)
+ if results:
+ current_project = None
+ continue
+
+ results = begin_dep.search(line)
+ if results:
+ in_deps = True
+ continue
+
+ results = end_dep.search(line)
+ if results:
+ in_deps = False
+ continue
+
+ results = dep_line.search(line)
+ if results and in_deps and current_project:
+ dependencies[current_project].append(results.group(1))
+ continue
+
+ # Change all dependencies clsid to name instead.
+ for project in dependencies:
+ # For each dependencies in this project
+ new_dep_array = []
+ for dep in dependencies[project]:
+ # Look for the project name matching this cldis
+ for project_info in projects:
+ if projects[project_info][1] == dep:
+ new_dep_array.append(project_info)
+ dependencies[project] = sorted(new_dep_array)
+
+ return (projects, dependencies)
+
+def PrintDependencies(projects, deps):
+ print "---------------------------------------"
+ print "Dependencies for all projects"
+ print "---------------------------------------"
+ print "-- --"
+
+ for (project, dep_list) in sorted(deps.items()):
+ print "Project : %s" % project
+ print "Path : %s" % projects[project][0]
+ if dep_list:
+ for dep in dep_list:
+ print " - %s" % dep
+ print ""
+
+ print "-- --"
+
+def PrintBuildOrder(projects, deps):
+ print "---------------------------------------"
+ print "Build order "
+ print "---------------------------------------"
+ print "-- --"
+
+ built = []
+ for (project, dep_list) in sorted(deps.items()):
+ if project not in built:
+ BuildProject(project, built, projects, deps)
+
+ print "-- --"
+
+def PrintVCProj(projects):
+
+ for project in projects:
+ print "-------------------------------------"
+ print "-------------------------------------"
+ print project
+ print project
+ print project
+ print "-------------------------------------"
+ print "-------------------------------------"
+
+ project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
+ projects[project][2]))
+
+ pretty = pretty_vcproj
+ argv = [ '',
+ project_path,
+ '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
+ ]
+ argv.extend(sys.argv[3:])
+ pretty.main(argv)
+
+def main():
+ # check if we have exactly 1 parameter.
+ if len(sys.argv) < 2:
+ print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
+ return
+
+ (projects, deps) = ParseSolution(sys.argv[1])
+ PrintDependencies(projects, deps)
+ PrintBuildOrder(projects, deps)
+
+ if '--recursive' in sys.argv:
+ PrintVCProj(projects)
+
+if __name__ == '__main__':
+ main()
+
diff --git a/tools/gyp/tools/pretty_vcproj.py b/tools/gyp/tools/pretty_vcproj.py
new file mode 100755
index 00000000000..292a39f7cf3
--- /dev/null
+++ b/tools/gyp/tools/pretty_vcproj.py
@@ -0,0 +1,316 @@
+#!/usr/bin/python2.5
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Make the format of a vcproj really pretty.
+
+ This script normalize and sort an xml. It also fetches all the properties
+ inside linked vsprops and include them explicitly in the vcproj.
+
+ It outputs the resulting xml to stdout.
+"""
+
+__author__ = 'nsylvain (Nicolas Sylvain)'
+
+import os
+import sys
+
+from xml.dom.minidom import parse
+from xml.dom.minidom import Node
+
+REPLACEMENTS = dict()
+ARGUMENTS = None
+
+class CmpTuple:
+ """Compare function between 2 tuple."""
+ def __call__(self, x, y):
+ (key1, value1) = x
+ (key2, value2) = y
+ return cmp(key1, key2)
+
+class CmpNode:
+ """Compare function between 2 xml nodes."""
+
+ def get_string(self, node):
+ node_string = "node"
+ node_string += node.nodeName
+ if node.nodeValue:
+ node_string += node.nodeValue
+
+ if node.attributes:
+ # We first sort by name, if present.
+ node_string += node.getAttribute("Name")
+
+ all_nodes = []
+ for (name, value) in node.attributes.items():
+ all_nodes.append((name, value))
+
+ all_nodes.sort(CmpTuple())
+ for (name, value) in all_nodes:
+ node_string += name
+ node_string += value
+
+ return node_string
+
+ def __call__(self, x, y):
+ return cmp(self.get_string(x), self.get_string(y))
+
+def PrettyPrintNode(node, indent=0):
+ if node.nodeType == Node.TEXT_NODE:
+ if node.data.strip():
+ print '%s%s' % (' '*indent, node.data.strip())
+ return
+
+ if node.childNodes:
+ node.normalize()
+ # Get the number of attributes
+ attr_count = 0
+ if node.attributes:
+ attr_count = node.attributes.length
+
+ # Print the main tag
+ if attr_count == 0:
+ print '%s<%s>' % (' '*indent, node.nodeName)
+ else:
+ print '%s<%s' % (' '*indent, node.nodeName)
+
+ all_attributes = []
+ for (name, value) in node.attributes.items():
+ all_attributes.append((name, value))
+ all_attributes.sort(CmpTuple())
+ for (name, value) in all_attributes:
+ print '%s %s="%s"' % (' '*indent, name, value)
+ print '%s>' % (' '*indent)
+ if node.nodeValue:
+ print '%s %s' % (' '*indent, node.nodeValue)
+
+ for sub_node in node.childNodes:
+ PrettyPrintNode(sub_node, indent=indent+2)
+ print '%s</%s>' % (' '*indent, node.nodeName)
+
+def FlattenFilter(node):
+ """Returns a list of all the node and sub nodes."""
+ node_list = []
+
+ if (node.attributes and
+ node.getAttribute('Name') == '_excluded_files'):
+ # We don't add the "_excluded_files" filter.
+ return []
+
+ for current in node.childNodes:
+ if current.nodeName == 'Filter':
+ node_list.extend(FlattenFilter(current))
+ else:
+ node_list.append(current)
+
+ return node_list
+
+def FixFilenames(filenames, current_directory):
+ new_list = []
+ for filename in filenames:
+ if filename:
+ for key in REPLACEMENTS:
+ filename = filename.replace(key, REPLACEMENTS[key])
+ os.chdir(current_directory)
+ filename = filename.strip('"\' ')
+ if filename.startswith('$'):
+ new_list.append(filename)
+ else:
+ new_list.append(os.path.abspath(filename))
+ return new_list
+
+def AbsoluteNode(node):
+ # Make all the properties we know about in this node absolute.
+ if node.attributes:
+ for (name, value) in node.attributes.items():
+ if name in ['InheritedPropertySheets', 'RelativePath',
+ 'AdditionalIncludeDirectories',
+ 'IntermediateDirectory', 'OutputDirectory',
+ 'AdditionalLibraryDirectories']:
+ # We want to fix up these paths
+ path_list = value.split(';')
+ new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
+ node.setAttribute(name, ';'.join(new_list))
+ if not value:
+ node.removeAttribute(name)
+
+def CleanupVcproj(node):
+ # For each sub node, we call recursively this function.
+ for sub_node in node.childNodes:
+ AbsoluteNode(sub_node)
+ CleanupVcproj(sub_node)
+
+ # Normalize the node, and remove all extranous whitespaces.
+ for sub_node in node.childNodes:
+ if sub_node.nodeType == Node.TEXT_NODE:
+ sub_node.data = sub_node.data.replace("\r", "")
+ sub_node.data = sub_node.data.replace("\n", "")
+ sub_node.data = sub_node.data.rstrip()
+
+ # Fix all the semicolon separated attributes to be sorted, and we also
+ # remove the dups.
+ if node.attributes:
+ for (name, value) in node.attributes.items():
+ sorted_list = sorted(value.split(';'))
+ unique_list = []
+ [unique_list.append(i) for i in sorted_list if not unique_list.count(i)]
+ node.setAttribute(name, ';'.join(unique_list))
+ if not value:
+ node.removeAttribute(name)
+
+ if node.childNodes:
+ node.normalize()
+
+ # For each node, take a copy, and remove it from the list.
+ node_array = []
+ while node.childNodes and node.childNodes[0]:
+ # Take a copy of the node and remove it from the list.
+ current = node.childNodes[0]
+ node.removeChild(current)
+
+ # If the child is a filter, we want to append all its children
+ # to this same list.
+ if current.nodeName == 'Filter':
+ node_array.extend(FlattenFilter(current))
+ else:
+ node_array.append(current)
+
+
+ # Sort the list.
+ node_array.sort(CmpNode())
+
+ # Insert the nodes in the correct order.
+ for new_node in node_array:
+ # But don't append empty tool node.
+ if new_node.nodeName == 'Tool':
+ if new_node.attributes and new_node.attributes.length == 1:
+ # This one was empty.
+ continue
+ if new_node.nodeName == 'UserMacro':
+ continue
+ node.appendChild(new_node)
+
+def GetConfiguationNodes(vcproj):
+ #TODO(nsylvain): Find a better way to navigate the xml.
+ nodes = []
+ for node in vcproj.childNodes:
+ if node.nodeName == "Configurations":
+ for sub_node in node.childNodes:
+ if sub_node.nodeName == "Configuration":
+ nodes.append(sub_node)
+
+ return nodes
+
+def GetChildrenVsprops(filename):
+ dom = parse(filename)
+ if dom.documentElement.attributes:
+ vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
+ return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
+ return []
+
+def SeekToNode(node1, child2):
+ # A text node does not have properties.
+ if child2.nodeType == Node.TEXT_NODE:
+ return None
+
+ # Get the name of the current node.
+ current_name = child2.getAttribute("Name")
+ if not current_name:
+ # There is no name. We don't know how to merge.
+ return None
+
+ # Look through all the nodes to find a match.
+ for sub_node in node1.childNodes:
+ if sub_node.nodeName == child2.nodeName:
+ name = sub_node.getAttribute("Name")
+ if name == current_name:
+ return sub_node
+
+ # No match. We give up.
+ return None
+
+def MergeAttributes(node1, node2):
+ # No attributes to merge?
+ if not node2.attributes:
+ return
+
+ for (name, value2) in node2.attributes.items():
+ # Don't merge the 'Name' attribute.
+ if name == 'Name':
+ continue
+ value1 = node1.getAttribute(name)
+ if value1:
+ # The attribute exist in the main node. If it's equal, we leave it
+ # untouched, otherwise we concatenate it.
+ if value1 != value2:
+ node1.setAttribute(name, ';'.join([value1, value2]))
+ else:
+ # The attribute does nto exist in the main node. We append this one.
+ node1.setAttribute(name, value2)
+
+ # If the attribute was a property sheet attributes, we remove it, since
+ # they are useless.
+ if name == 'InheritedPropertySheets':
+ node1.removeAttribute(name)
+
+def MergeProperties(node1, node2):
+ MergeAttributes(node1, node2)
+ for child2 in node2.childNodes:
+ child1 = SeekToNode(node1, child2)
+ if child1:
+ MergeProperties(child1, child2)
+ else:
+ node1.appendChild(child2.cloneNode(True))
+
+def main(argv):
+ global REPLACEMENTS
+ global ARGUMENTS
+ ARGUMENTS = argv
+ """Main function of this vcproj prettifier."""
+
+ # check if we have exactly 1 parameter.
+ if len(argv) < 2:
+ print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
+ '[key2=value2]' % argv[0])
+ return
+
+ # Parse the keys
+ for i in range(2, len(argv)):
+ (key, value) = argv[i].split('=')
+ REPLACEMENTS[key] = value
+
+ # Open the vcproj and parse the xml.
+ dom = parse(argv[1])
+
+ # First thing we need to do is find the Configuration Node and merge them
+ # with the vsprops they include.
+ for configuration_node in GetConfiguationNodes(dom.documentElement):
+ # Get the property sheets associated with this configuration.
+ vsprops = configuration_node.getAttribute('InheritedPropertySheets')
+
+ # Fix the filenames to be absolute.
+ vsprops_list = FixFilenames(vsprops.strip().split(';'),
+ os.path.dirname(argv[1]))
+
+ # Extend the list of vsprops with all vsprops contained in the current
+ # vsprops.
+ for current_vsprops in vsprops_list:
+ vsprops_list.extend(GetChildrenVsprops(current_vsprops))
+
+ # Now that we have all the vsprops, we need to merge them.
+ for current_vsprops in vsprops_list:
+ MergeProperties(configuration_node,
+ parse(current_vsprops).documentElement)
+
+ # Now that everything is merged, we need to cleanup the xml.
+ CleanupVcproj(dom.documentElement)
+
+ # Finally, we use the prett xml function to print the vcproj back to the
+ # user.
+ #print dom.toprettyxml(newl="\n")
+ PrettyPrintNode(dom.documentElement)
+
+if __name__ == '__main__':
+ main(sys.argv)