diff options
author | isaacs <i@izs.me> | 2014-03-19 03:06:47 +0400 |
---|---|---|
committer | isaacs <i@izs.me> | 2014-03-19 03:06:47 +0400 |
commit | 3fe6931037fa49e37a4ade857b857ea3e636dac7 (patch) | |
tree | b98b6c9caae22c0050e7a1f44944a12226b8bfc9 /node_modules | |
parent | 7b941497631ca2338fa477507386a8e694fd3216 (diff) |
node-gyp@0.13
Diffstat (limited to 'node_modules')
24 files changed, 2479 insertions, 253 deletions
diff --git a/node_modules/node-gyp/addon.gypi b/node_modules/node-gyp/addon.gypi index 6b9e20e20..0b81fab20 100644 --- a/node_modules/node-gyp/addon.gypi +++ b/node_modules/node-gyp/addon.gypi @@ -42,7 +42,7 @@ '-luuid.lib', '-lodbc32.lib', '-lDelayImp.lib', - '-l<(node_root_dir)/$(Configuration)/node.lib' + '-l"<(node_root_dir)/$(ConfigurationName)/node.lib"' ], # warning C4251: 'node::ObjectWrap::handle_' : class 'v8::Persistent<T>' # needs to have dll-interface to be used by clients of class 'node::ObjectWrap' diff --git a/node_modules/node-gyp/gyp/AUTHORS b/node_modules/node-gyp/gyp/AUTHORS index 234e1483f..9389ca0a2 100644 --- a/node_modules/node-gyp/gyp/AUTHORS +++ b/node_modules/node-gyp/gyp/AUTHORS @@ -7,4 +7,5 @@ Yandex LLC Steven Knight <knight@baldmt.com> Ryan Norton <rnorton10@gmail.com> +David J. Sankel <david@sankelsoftware.com> Eric N. Vander Weele <ericvw@gmail.com> diff --git a/node_modules/node-gyp/gyp/PRESUBMIT.py b/node_modules/node-gyp/gyp/PRESUBMIT.py index 5567b88bd..9c474eb2b 100644 --- a/node_modules/node-gyp/gyp/PRESUBMIT.py +++ b/node_modules/node-gyp/gyp/PRESUBMIT.py @@ -97,14 +97,19 @@ def CheckChangeOnCommit(input_api, output_api): 'http://gyp-status.appspot.com/status', 'http://gyp-status.appspot.com/current')) + import os import sys old_sys_path = sys.path try: sys.path = ['pylib', 'test/lib'] + sys.path + blacklist = PYLINT_BLACKLIST + if sys.platform == 'win32': + blacklist = [os.path.normpath(x).replace('\\', '\\\\') + for x in PYLINT_BLACKLIST] report.extend(input_api.canned_checks.RunPylint( input_api, output_api, - black_list=PYLINT_BLACKLIST, + black_list=blacklist, disabled_warnings=PYLINT_DISABLED_WARNINGS)) finally: sys.path = old_sys_path diff --git a/node_modules/node-gyp/gyp/buildbot/buildbot_run.py b/node_modules/node-gyp/gyp/buildbot/buildbot_run.py index 398eb87a8..979073c77 100755 --- a/node_modules/node-gyp/gyp/buildbot/buildbot_run.py +++ b/node_modules/node-gyp/gyp/buildbot/buildbot_run.py @@ -23,6 +23,8 @@ BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__)) TRUNK_DIR = os.path.dirname(BUILDBOT_DIR) ROOT_DIR = os.path.dirname(TRUNK_DIR) ANDROID_DIR = os.path.join(ROOT_DIR, 'android') +CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake') +CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin') OUT_DIR = os.path.join(TRUNK_DIR, 'out') @@ -34,6 +36,43 @@ def CallSubProcess(*args, **kwargs): sys.exit(1) +def PrepareCmake(): + """Build CMake 2.8.8 since the version in Precise is 2.8.7.""" + if os.environ['BUILDBOT_CLOBBER'] == '1': + print '@@@BUILD_STEP Clobber CMake checkout@@@' + shutil.rmtree(CMAKE_DIR) + + # We always build CMake 2.8.8, so no need to do anything + # if the directory already exists. + if os.path.isdir(CMAKE_DIR): + return + + print '@@@BUILD_STEP Initialize CMake checkout@@@' + os.mkdir(CMAKE_DIR) + CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot']) + CallSubProcess(['git', 'config', '--global', + 'user.email', 'chrome-bot@google.com']) + CallSubProcess(['git', 'config', '--global', 'color.ui', 'false']) + + print '@@@BUILD_STEP Sync CMake@@@' + CallSubProcess( + ['git', 'clone', + '--depth', '1', + '--single-branch', + '--branch', 'v2.8.8', + '--', + 'git://cmake.org/cmake.git', + CMAKE_DIR], + cwd=CMAKE_DIR) + + print '@@@BUILD_STEP Build CMake@@@' + CallSubProcess( + ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR], + cwd=CMAKE_DIR) + + CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR) + + def PrepareAndroidTree(): """Prepare an Android tree to run 'android' format tests.""" if os.environ['BUILDBOT_CLOBBER'] == '1': @@ -91,6 +130,7 @@ def GypTestFormat(title, format=None, msvs_version=None): '--all', '--passed', '--format', format, + '--path', CMAKE_BIN_DIR, '--chdir', 'trunk']) if format == 'android': # gyptest needs the environment setup from envsetup/lunch in order to build @@ -124,6 +164,8 @@ def GypBuild(): elif sys.platform.startswith('linux'): retcode += GypTestFormat('ninja') retcode += GypTestFormat('make') + PrepareCmake() + retcode += GypTestFormat('cmake') elif sys.platform == 'darwin': retcode += GypTestFormat('ninja') retcode += GypTestFormat('xcode') diff --git a/node_modules/node-gyp/gyp/gyp b/node_modules/node-gyp/gyp/gyp index a157f3495..b53a6dde8 100755 --- a/node_modules/node-gyp/gyp/gyp +++ b/node_modules/node-gyp/gyp/gyp @@ -3,5 +3,6 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -BASE=`dirname $0` -python $BASE/gyp_main.py "$@" +set -e +base=$(dirname "$0") +exec python "${base}/gyp_main.py" "$@" diff --git a/node_modules/node-gyp/gyp/gyptest.py b/node_modules/node-gyp/gyp/gyptest.py index a80dfbf2d..8f3ee0ffb 100755 --- a/node_modules/node-gyp/gyp/gyptest.py +++ b/node_modules/node-gyp/gyp/gyptest.py @@ -176,7 +176,7 @@ def main(argv=None): if opts.path: extra_path = [os.path.abspath(p) for p in opts.path] extra_path = os.pathsep.join(extra_path) - os.environ['PATH'] += os.pathsep + extra_path + os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH'] if not args: if not opts.all: diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py index e8be38628..0c9532d8e 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py @@ -812,6 +812,8 @@ _Same(_link, 'UACExecutionLevel', _Enumeration(['AsInvoker', # /level='asInvoker' 'HighestAvailable', # /level='highestAvailable' 'RequireAdministrator'])) # /level='requireAdministrator' +_Same(_link, 'MinimumRequiredVersion', _string) +_Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX # Options found in MSVS that have been renamed in MSBuild. @@ -850,8 +852,6 @@ _MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS _MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND _MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND _MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name) -_MSBuildOnly(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX -_MSBuildOnly(_link, 'MinimumRequiredVersion', _string) _MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false' _MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN _MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION @@ -985,6 +985,7 @@ _Same(_lib, 'OutputFile', _file_name) # /OUT _Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO _Same(_lib, 'UseUnicodeResponseFiles', _boolean) _Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG +_Same(_lib, 'TargetMachine', _target_machine_enumeration) # TODO(jeanluc) _link defines the same value that gets moved to # ProjectReference. We may want to validate that they are consistent. @@ -1003,7 +1004,6 @@ _MSBuildOnly(_lib, 'MinimumRequiredVersion', _string) _MSBuildOnly(_lib, 'Name', _file_name) # /NAME _MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE _MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration) -_MSBuildOnly(_lib, 'TargetMachine', _target_machine_enumeration) _MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX _MSBuildOnly(_lib, 'Verbose', _boolean) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py index bb30a7ba0..03b6d8ad4 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py @@ -96,9 +96,11 @@ class VisualStudioVersion(object): else: assert target_arch == 'x64' arg = 'x86_amd64' - if (os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or + # Use the 64-on-64 compiler if we're not using an express + # edition and we're running on a 64bit OS. + if self.short_name[-1] != 'e' and ( + os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'): - # Use the 64-on-64 compiler if we can. arg = 'amd64' return [os.path.normpath( os.path.join(self.path, 'VC/vcvarsall.bat')), arg] diff --git a/node_modules/node-gyp/gyp/pylib/gyp/common.py b/node_modules/node-gyp/gyp/pylib/gyp/common.py index b9d2abef0..f9c6c6f3a 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/common.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/common.py @@ -391,6 +391,14 @@ def WriteOnDiff(filename): return Writer() +def EnsureDirExists(path): + """Make sure the directory for |path| exists.""" + try: + os.makedirs(os.path.dirname(path)) + except OSError: + pass + + def GetFlavor(params): """Returns |params.flavor| if it's set, the system's default flavor else.""" flavors = { diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py index 9476a1df6..41346e2b1 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py @@ -145,7 +145,7 @@ class AndroidMkWriter(object): spec, configs: gyp info part_of_all: flag indicating this target is part of 'all' """ - make.ensure_directory_exists(output_filename) + gyp.common.EnsureDirExists(output_filename) self.fp = open(output_filename, 'w') @@ -452,7 +452,7 @@ class AndroidMkWriter(object): (output, path)) self.WriteLn('\t@echo Copying: $@') self.WriteLn('\t$(hide) mkdir -p $(dir $@)') - self.WriteLn('\t$(hide) $(ACP) -r $< $@') + self.WriteLn('\t$(hide) $(ACP) -rpf $< $@') self.WriteLn() outputs.append(output) self.WriteLn('%s = %s' % (variable, @@ -983,7 +983,7 @@ def GenerateOutput(target_list, target_dicts, data, params): makefile_path = os.path.join(options.toplevel_dir, makefile_name) assert not options.generator_output, ( 'The Android backend does not support options.generator_output.') - make.ensure_directory_exists(makefile_path) + gyp.common.EnsureDirExists(makefile_path) root_makefile = open(makefile_path, 'w') root_makefile.write(header) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py new file mode 100644 index 000000000..10d015ee8 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py @@ -0,0 +1,1143 @@ +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""cmake output module + +This module is under development and should be considered experimental. + +This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is +created for each configuration. + +This module's original purpose was to support editing in IDEs like KDevelop +which use CMake for project management. It is also possible to use CMake to +generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator +will convert the CMakeLists.txt to a code::blocks cbp for the editor to read, +but build using CMake. As a result QtCreator editor is unaware of compiler +defines. The generated CMakeLists.txt can also be used to build on Linux. There +is currently no support for building on platforms other than Linux. + +The generated CMakeLists.txt should properly compile all projects. However, +there is a mismatch between gyp and cmake with regard to linking. All attempts +are made to work around this, but CMake sometimes sees -Wl,--start-group as a +library and incorrectly repeats it. As a result the output of this generator +should not be relied on for building. + +When using with kdevelop, use version 4.4+. Previous versions of kdevelop will +not be able to find the header file directories described in the generated +CMakeLists.txt file. +""" + +import multiprocessing +import os +import signal +import string +import subprocess +import gyp.common + +generator_default_variables = { + 'EXECUTABLE_PREFIX': '', + 'EXECUTABLE_SUFFIX': '', + 'STATIC_LIB_PREFIX': 'lib', + 'STATIC_LIB_SUFFIX': '.a', + 'SHARED_LIB_PREFIX': 'lib', + 'SHARED_LIB_SUFFIX': '.so', + 'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}', + 'LIB_DIR': '${obj}.${TOOLSET}', + 'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni', + 'SHARED_INTERMEDIATE_DIR': '${obj}/gen', + 'PRODUCT_DIR': '${builddir}', + 'RULE_INPUT_PATH': '${RULE_INPUT_PATH}', + 'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}', + 'RULE_INPUT_NAME': '${RULE_INPUT_NAME}', + 'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}', + 'RULE_INPUT_EXT': '${RULE_INPUT_EXT}', + 'CONFIGURATION_NAME': '${configuration}', +} + +FULL_PATH_VARS = ('${CMAKE_SOURCE_DIR}', '${builddir}', '${obj}') + +generator_supports_multiple_toolsets = True +generator_wants_static_library_dependencies_adjusted = True + +COMPILABLE_EXTENSIONS = { + '.c': 'cc', + '.cc': 'cxx', + '.cpp': 'cxx', + '.cxx': 'cxx', + '.s': 's', # cc + '.S': 's', # cc +} + + +def RemovePrefix(a, prefix): + """Returns 'a' without 'prefix' if it starts with 'prefix'.""" + return a[len(prefix):] if a.startswith(prefix) else a + + +def CalculateVariables(default_variables, params): + """Calculate additional variables for use in the build (called by gyp).""" + default_variables.setdefault('OS', gyp.common.GetFlavor(params)) + + +def Compilable(filename): + """Return true if the file is compilable (should be in OBJS).""" + return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS) + + +def Linkable(filename): + """Return true if the file is linkable (should be on the link line).""" + return filename.endswith('.o') + + +def NormjoinPathForceCMakeSource(base_path, rel_path): + """Resolves rel_path against base_path and returns the result. + + If rel_path is an absolute path it is returned unchanged. + Otherwise it is resolved against base_path and normalized. + If the result is a relative path, it is forced to be relative to the + CMakeLists.txt. + """ + if os.path.isabs(rel_path): + return rel_path + if any([rel_path.startswith(var) for var in FULL_PATH_VARS]): + return rel_path + # TODO: do we need to check base_path for absolute variables as well? + return os.path.join('${CMAKE_SOURCE_DIR}', + os.path.normpath(os.path.join(base_path, rel_path))) + + +def NormjoinPath(base_path, rel_path): + """Resolves rel_path against base_path and returns the result. + TODO: what is this really used for? + If rel_path begins with '$' it is returned unchanged. + Otherwise it is resolved against base_path if relative, then normalized. + """ + if rel_path.startswith('$') and not rel_path.startswith('${configuration}'): + return rel_path + return os.path.normpath(os.path.join(base_path, rel_path)) + + +def CMakeStringEscape(a): + """Escapes the string 'a' for use inside a CMake string. + + This means escaping + '\' otherwise it may be seen as modifying the next character + '"' otherwise it will end the string + ';' otherwise the string becomes a list + + The following do not need to be escaped + '#' when the lexer is in string state, this does not start a comment + + The following are yet unknown + '$' generator variables (like ${obj}) must not be escaped, + but text $ should be escaped + what is wanted is to know which $ come from generator variables + """ + return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"') + + +def SetFileProperty(output, source_name, property_name, values, sep): + """Given a set of source file, sets the given property on them.""" + output.write('set_source_files_properties(') + output.write(source_name) + output.write(' PROPERTIES ') + output.write(property_name) + output.write(' "') + for value in values: + output.write(CMakeStringEscape(value)) + output.write(sep) + output.write('")\n') + + +def SetFilesProperty(output, source_names, property_name, values, sep): + """Given a set of source files, sets the given property on them.""" + output.write('set_source_files_properties(\n') + for source_name in source_names: + output.write(' ') + output.write(source_name) + output.write('\n') + output.write(' PROPERTIES\n ') + output.write(property_name) + output.write(' "') + for value in values: + output.write(CMakeStringEscape(value)) + output.write(sep) + output.write('"\n)\n') + + +def SetTargetProperty(output, target_name, property_name, values, sep=''): + """Given a target, sets the given property.""" + output.write('set_target_properties(') + output.write(target_name) + output.write(' PROPERTIES ') + output.write(property_name) + output.write(' "') + for value in values: + output.write(CMakeStringEscape(value)) + output.write(sep) + output.write('")\n') + + +def SetVariable(output, variable_name, value): + """Sets a CMake variable.""" + output.write('set(') + output.write(variable_name) + output.write(' "') + output.write(CMakeStringEscape(value)) + output.write('")\n') + + +def SetVariableList(output, variable_name, values): + """Sets a CMake variable to a list.""" + if not values: + return SetVariable(output, variable_name, "") + if len(values) == 1: + return SetVariable(output, variable_name, values[0]) + output.write('list(APPEND ') + output.write(variable_name) + output.write('\n "') + output.write('"\n "'.join([CMakeStringEscape(value) for value in values])) + output.write('")\n') + + +def UnsetVariable(output, variable_name): + """Unsets a CMake variable.""" + output.write('unset(') + output.write(variable_name) + output.write(')\n') + + +def WriteVariable(output, variable_name, prepend=None): + if prepend: + output.write(prepend) + output.write('${') + output.write(variable_name) + output.write('}') + + +class CMakeTargetType: + def __init__(self, command, modifier, property_modifier): + self.command = command + self.modifier = modifier + self.property_modifier = property_modifier + + +cmake_target_type_from_gyp_target_type = { + 'executable': CMakeTargetType('add_executable', None, 'RUNTIME'), + 'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'), + 'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'), + 'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'), + 'none': CMakeTargetType('add_custom_target', 'SOURCES', None), +} + + +def StringToCMakeTargetName(a): + """Converts the given string 'a' to a valid CMake target name. + + All invalid characters are replaced by '_'. + Invalid for cmake: ' ', '/', '(', ')' + Invalid for make: ':' + Invalid for unknown reasons but cause failures: '.' + """ + return a.translate(string.maketrans(' /():.', '______')) + + +def WriteActions(target_name, actions, extra_sources, extra_deps, + path_to_gyp, output): + """Write CMake for the 'actions' in the target. + + Args: + target_name: the name of the CMake target being generated. + actions: the Gyp 'actions' dict for this target. + extra_sources: [(<cmake_src>, <src>)] to append with generated source files. + extra_deps: [<cmake_taget>] to append with generated targets. + path_to_gyp: relative path from CMakeLists.txt being generated to + the Gyp file in which the target being generated is defined. + """ + for action in actions: + action_name = StringToCMakeTargetName(action['action_name']) + action_target_name = '%s__%s' % (target_name, action_name) + + inputs = action['inputs'] + inputs_name = action_target_name + '__input' + SetVariableList(output, inputs_name, + [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs]) + + outputs = action['outputs'] + cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out) + for out in outputs] + outputs_name = action_target_name + '__output' + SetVariableList(output, outputs_name, cmake_outputs) + + # Build up a list of outputs. + # Collect the output dirs we'll need. + dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir) + + if int(action.get('process_outputs_as_sources', False)): + extra_sources.extend(zip(cmake_outputs, outputs)) + + # add_custom_command + output.write('add_custom_command(OUTPUT ') + WriteVariable(output, outputs_name) + output.write('\n') + + if len(dirs) > 0: + for directory in dirs: + output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ') + output.write(directory) + output.write('\n') + + output.write(' COMMAND ') + output.write(gyp.common.EncodePOSIXShellList(action['action'])) + output.write('\n') + + output.write(' DEPENDS ') + WriteVariable(output, inputs_name) + output.write('\n') + + output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/') + output.write(path_to_gyp) + output.write('\n') + + output.write(' COMMENT ') + if 'message' in action: + output.write(action['message']) + else: + output.write(action_target_name) + output.write('\n') + + output.write(' VERBATIM\n') + output.write(')\n') + + # add_custom_target + output.write('add_custom_target(') + output.write(action_target_name) + output.write('\n DEPENDS ') + WriteVariable(output, outputs_name) + output.write('\n SOURCES ') + WriteVariable(output, inputs_name) + output.write('\n)\n') + + extra_deps.append(action_target_name) + + +def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source): + if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")): + if any([rule_source.startswith(var) for var in FULL_PATH_VARS]): + return rel_path + return NormjoinPathForceCMakeSource(base_path, rel_path) + + +def WriteRules(target_name, rules, extra_sources, extra_deps, + path_to_gyp, output): + """Write CMake for the 'rules' in the target. + + Args: + target_name: the name of the CMake target being generated. + actions: the Gyp 'actions' dict for this target. + extra_sources: [(<cmake_src>, <src>)] to append with generated source files. + extra_deps: [<cmake_taget>] to append with generated targets. + path_to_gyp: relative path from CMakeLists.txt being generated to + the Gyp file in which the target being generated is defined. + """ + for rule in rules: + rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name']) + + inputs = rule.get('inputs', []) + inputs_name = rule_name + '__input' + SetVariableList(output, inputs_name, + [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs]) + outputs = rule['outputs'] + var_outputs = [] + + for count, rule_source in enumerate(rule.get('rule_sources', [])): + action_name = rule_name + '_' + str(count) + + rule_source_dirname, rule_source_basename = os.path.split(rule_source) + rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename) + + SetVariable(output, 'RULE_INPUT_PATH', rule_source) + SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname) + SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename) + SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root) + SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext) + + # Build up a list of outputs. + # Collect the output dirs we'll need. + dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir) + + # Create variables for the output, as 'local' variable will be unset. + these_outputs = [] + for output_index, out in enumerate(outputs): + output_name = action_name + '_' + str(output_index) + SetVariable(output, output_name, + NormjoinRulePathForceCMakeSource(path_to_gyp, out, + rule_source)) + if int(rule.get('process_outputs_as_sources', False)): + extra_sources.append(('${' + output_name + '}', out)) + these_outputs.append('${' + output_name + '}') + var_outputs.append('${' + output_name + '}') + + # add_custom_command + output.write('add_custom_command(OUTPUT\n') + for out in these_outputs: + output.write(' ') + output.write(out) + output.write('\n') + + for directory in dirs: + output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ') + output.write(directory) + output.write('\n') + + output.write(' COMMAND ') + output.write(gyp.common.EncodePOSIXShellList(rule['action'])) + output.write('\n') + + output.write(' DEPENDS ') + WriteVariable(output, inputs_name) + output.write(' ') + output.write(NormjoinPath(path_to_gyp, rule_source)) + output.write('\n') + + # CMAKE_SOURCE_DIR is where the CMakeLists.txt lives. + # The cwd is the current build directory. + output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/') + output.write(path_to_gyp) + output.write('\n') + + output.write(' COMMENT ') + if 'message' in rule: + output.write(rule['message']) + else: + output.write(action_name) + output.write('\n') + + output.write(' VERBATIM\n') + output.write(')\n') + + UnsetVariable(output, 'RULE_INPUT_PATH') + UnsetVariable(output, 'RULE_INPUT_DIRNAME') + UnsetVariable(output, 'RULE_INPUT_NAME') + UnsetVariable(output, 'RULE_INPUT_ROOT') + UnsetVariable(output, 'RULE_INPUT_EXT') + + # add_custom_target + output.write('add_custom_target(') + output.write(rule_name) + output.write(' DEPENDS\n') + for out in var_outputs: + output.write(' ') + output.write(out) + output.write('\n') + output.write('SOURCES ') + WriteVariable(output, inputs_name) + output.write('\n') + for rule_source in rule.get('rule_sources', []): + output.write(' ') + output.write(NormjoinPath(path_to_gyp, rule_source)) + output.write('\n') + output.write(')\n') + + extra_deps.append(rule_name) + + +def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output): + """Write CMake for the 'copies' in the target. + + Args: + target_name: the name of the CMake target being generated. + actions: the Gyp 'actions' dict for this target. + extra_deps: [<cmake_taget>] to append with generated targets. + path_to_gyp: relative path from CMakeLists.txt being generated to + the Gyp file in which the target being generated is defined. + """ + copy_name = target_name + '__copies' + + # CMake gets upset with custom targets with OUTPUT which specify no output. + have_copies = any(copy['files'] for copy in copies) + if not have_copies: + output.write('add_custom_target(') + output.write(copy_name) + output.write(')\n') + extra_deps.append(copy_name) + return + + class Copy: + def __init__(self, ext, command): + self.cmake_inputs = [] + self.cmake_outputs = [] + self.gyp_inputs = [] + self.gyp_outputs = [] + self.ext = ext + self.inputs_name = None + self.outputs_name = None + self.command = command + + file_copy = Copy('', 'copy') + dir_copy = Copy('_dirs', 'copy_directory') + + for copy in copies: + files = copy['files'] + destination = copy['destination'] + for src in files: + path = os.path.normpath(src) + basename = os.path.split(path)[1] + dst = os.path.join(destination, basename) + + copy = file_copy if os.path.basename(src) else dir_copy + + copy.cmake_inputs.append(NormjoinPath(path_to_gyp, src)) + copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst)) + copy.gyp_inputs.append(src) + copy.gyp_outputs.append(dst) + + for copy in (file_copy, dir_copy): + if copy.cmake_inputs: + copy.inputs_name = copy_name + '__input' + copy.ext + SetVariableList(output, copy.inputs_name, copy.cmake_inputs) + + copy.outputs_name = copy_name + '__output' + copy.ext + SetVariableList(output, copy.outputs_name, copy.cmake_outputs) + + # add_custom_command + output.write('add_custom_command(\n') + + output.write('OUTPUT') + for copy in (file_copy, dir_copy): + if copy.outputs_name: + WriteVariable(output, copy.outputs_name, ' ') + output.write('\n') + + for copy in (file_copy, dir_copy): + for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs): + # 'cmake -E copy src dst' will create the 'dst' directory if needed. + output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command) + output.write(src) + output.write(' ') + output.write(dst) + output.write("\n") + + output.write('DEPENDS') + for copy in (file_copy, dir_copy): + if copy.inputs_name: + WriteVariable(output, copy.inputs_name, ' ') + output.write('\n') + + output.write('WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/') + output.write(path_to_gyp) + output.write('\n') + + output.write('COMMENT Copying for ') + output.write(target_name) + output.write('\n') + + output.write('VERBATIM\n') + output.write(')\n') + + # add_custom_target + output.write('add_custom_target(') + output.write(copy_name) + output.write('\n DEPENDS') + for copy in (file_copy, dir_copy): + if copy.outputs_name: + WriteVariable(output, copy.outputs_name, ' ') + output.write('\n SOURCES') + if file_copy.inputs_name: + WriteVariable(output, file_copy.inputs_name, ' ') + output.write('\n)\n') + + extra_deps.append(copy_name) + + +def CreateCMakeTargetBaseName(qualified_target): + """This is the name we would like the target to have.""" + _, gyp_target_name, gyp_target_toolset = ( + gyp.common.ParseQualifiedTarget(qualified_target)) + cmake_target_base_name = gyp_target_name + if gyp_target_toolset and gyp_target_toolset != 'target': + cmake_target_base_name += '_' + gyp_target_toolset + return StringToCMakeTargetName(cmake_target_base_name) + + +def CreateCMakeTargetFullName(qualified_target): + """An unambiguous name for the target.""" + gyp_file, gyp_target_name, gyp_target_toolset = ( + gyp.common.ParseQualifiedTarget(qualified_target)) + cmake_target_full_name = gyp_file + ':' + gyp_target_name + if gyp_target_toolset and gyp_target_toolset != 'target': + cmake_target_full_name += '_' + gyp_target_toolset + return StringToCMakeTargetName(cmake_target_full_name) + + +class CMakeNamer(object): + """Converts Gyp target names into CMake target names. + + CMake requires that target names be globally unique. One way to ensure + this is to fully qualify the names of the targets. Unfortunatly, this + ends up with all targets looking like "chrome_chrome_gyp_chrome" instead + of just "chrome". If this generator were only interested in building, it + would be possible to fully qualify all target names, then create + unqualified target names which depend on all qualified targets which + should have had that name. This is more or less what the 'make' generator + does with aliases. However, one goal of this generator is to create CMake + files for use with IDEs, and fully qualified names are not as user + friendly. + + Since target name collision is rare, we do the above only when required. + + Toolset variants are always qualified from the base, as this is required for + building. However, it also makes sense for an IDE, as it is possible for + defines to be different. + """ + def __init__(self, target_list): + self.cmake_target_base_names_conficting = set() + + cmake_target_base_names_seen = set() + for qualified_target in target_list: + cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target) + + if cmake_target_base_name not in cmake_target_base_names_seen: + cmake_target_base_names_seen.add(cmake_target_base_name) + else: + self.cmake_target_base_names_conficting.add(cmake_target_base_name) + + def CreateCMakeTargetName(self, qualified_target): + base_name = CreateCMakeTargetBaseName(qualified_target) + if base_name in self.cmake_target_base_names_conficting: + return CreateCMakeTargetFullName(qualified_target) + return base_name + + +def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, + options, generator_flags, all_qualified_targets, output): + + # The make generator does this always. + # TODO: It would be nice to be able to tell CMake all dependencies. + circular_libs = generator_flags.get('circular', True) + + if not generator_flags.get('standalone', False): + output.write('\n#') + output.write(qualified_target) + output.write('\n') + + gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) + rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir) + rel_gyp_dir = os.path.dirname(rel_gyp_file) + + # Relative path from build dir to top dir. + build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) + # Relative path from build dir to gyp dir. + build_to_gyp = os.path.join(build_to_top, rel_gyp_dir) + + path_from_cmakelists_to_gyp = build_to_gyp + + spec = target_dicts.get(qualified_target, {}) + config = spec.get('configurations', {}).get(config_to_use, {}) + + target_name = spec.get('target_name', '<missing target name>') + target_type = spec.get('type', '<missing target type>') + target_toolset = spec.get('toolset') + + SetVariable(output, 'TARGET', target_name) + SetVariable(output, 'TOOLSET', target_toolset) + + cmake_target_name = namer.CreateCMakeTargetName(qualified_target) + + extra_sources = [] + extra_deps = [] + + # Actions must come first, since they can generate more OBJs for use below. + if 'actions' in spec: + WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps, + path_from_cmakelists_to_gyp, output) + + # Rules must be early like actions. + if 'rules' in spec: + WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps, + path_from_cmakelists_to_gyp, output) + + # Copies + if 'copies' in spec: + WriteCopies(cmake_target_name, spec['copies'], extra_deps, + path_from_cmakelists_to_gyp, output) + + # Target and sources + srcs = spec.get('sources', []) + + # Gyp separates the sheep from the goats based on file extensions. + def partition(l, p): + return reduce(lambda x, e: x[not p(e)].append(e) or x, l, ([], [])) + compilable_srcs, other_srcs = partition(srcs, Compilable) + + # CMake gets upset when executable targets provide no sources. + if target_type == 'executable' and not compilable_srcs and not extra_sources: + print ('Executable %s has no complilable sources, treating as "none".' % + target_name ) + target_type = 'none' + + cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type) + if cmake_target_type is None: + print ('Target %s has unknown target type %s, skipping.' % + ( target_name, target_type ) ) + return + + other_srcs_name = None + if other_srcs: + other_srcs_name = cmake_target_name + '__other_srcs' + SetVariableList(output, other_srcs_name, + [NormjoinPath(path_from_cmakelists_to_gyp, src) for src in other_srcs]) + + # CMake is opposed to setting linker directories and considers the practice + # of setting linker directories dangerous. Instead, it favors the use of + # find_library and passing absolute paths to target_link_libraries. + # However, CMake does provide the command link_directories, which adds + # link directories to targets defined after it is called. + # As a result, link_directories must come before the target definition. + # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES. + library_dirs = config.get('library_dirs') + if library_dirs is not None: + output.write('link_directories(') + for library_dir in library_dirs: + output.write(' ') + output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir)) + output.write('\n') + output.write(')\n') + + output.write(cmake_target_type.command) + output.write('(') + output.write(cmake_target_name) + + if cmake_target_type.modifier is not None: + output.write(' ') + output.write(cmake_target_type.modifier) + + if other_srcs_name: + WriteVariable(output, other_srcs_name, ' ') + + output.write('\n') + + for src in compilable_srcs: + output.write(' ') + output.write(NormjoinPath(path_from_cmakelists_to_gyp, src)) + output.write('\n') + for extra_source in extra_sources: + output.write(' ') + src, _ = extra_source + output.write(NormjoinPath(path_from_cmakelists_to_gyp, src)) + output.write('\n') + + output.write(')\n') + + # Output name and location. + if target_type != 'none': + # Mark uncompiled sources as uncompiled. + if other_srcs_name: + output.write('set_source_files_properties(') + WriteVariable(output, other_srcs_name, '') + output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n') + + # Output directory + target_output_directory = spec.get('product_dir') + if target_output_directory is None: + if target_type in ('executable', 'loadable_module'): + target_output_directory = generator_default_variables['PRODUCT_DIR'] + elif target_type in ('shared_library'): + target_output_directory = '${builddir}/lib.${TOOLSET}' + elif spec.get('standalone_static_library', False): + target_output_directory = generator_default_variables['PRODUCT_DIR'] + else: + base_path = gyp.common.RelativePath(os.path.dirname(gyp_file), + options.toplevel_dir) + target_output_directory = '${obj}.${TOOLSET}' + target_output_directory = ( + os.path.join(target_output_directory, base_path)) + + cmake_target_output_directory = NormjoinPathForceCMakeSource( + path_from_cmakelists_to_gyp, + target_output_directory) + SetTargetProperty(output, + cmake_target_name, + cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY', + cmake_target_output_directory) + + # Output name + default_product_prefix = '' + default_product_name = target_name + default_product_ext = '' + if target_type == 'static_library': + static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX'] + default_product_name = RemovePrefix(default_product_name, + static_library_prefix) + default_product_prefix = static_library_prefix + default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX'] + + elif target_type in ('loadable_module', 'shared_library'): + shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX'] + default_product_name = RemovePrefix(default_product_name, + shared_library_prefix) + default_product_prefix = shared_library_prefix + default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX'] + + elif target_type != 'executable': + print ('ERROR: What output file should be generated?', + 'type', target_type, 'target', target_name) + + product_prefix = spec.get('product_prefix', default_product_prefix) + product_name = spec.get('product_name', default_product_name) + product_ext = spec.get('product_extension') + if product_ext: + product_ext = '.' + product_ext + else: + product_ext = default_product_ext + + SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix) + SetTargetProperty(output, cmake_target_name, + cmake_target_type.property_modifier + '_OUTPUT_NAME', + product_name) + SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext) + + # Make the output of this target referenceable as a source. + cmake_target_output_basename = product_prefix + product_name + product_ext + cmake_target_output = os.path.join(cmake_target_output_directory, + cmake_target_output_basename) + SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '') + + # Let CMake know if the 'all' target should depend on this target. + exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets + else 'FALSE') + SetTargetProperty(output, cmake_target_name, + 'EXCLUDE_FROM_ALL', exclude_from_all) + for extra_target_name in extra_deps: + SetTargetProperty(output, extra_target_name, + 'EXCLUDE_FROM_ALL', exclude_from_all) + + # Includes + includes = config.get('include_dirs') + if includes: + # This (target include directories) is what requires CMake 2.8.8 + includes_name = cmake_target_name + '__include_dirs' + SetVariableList(output, includes_name, + [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include) + for include in includes]) + output.write('set_property(TARGET ') + output.write(cmake_target_name) + output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ') + WriteVariable(output, includes_name, '') + output.write(')\n') + + # Defines + defines = config.get('defines') + if defines is not None: + SetTargetProperty(output, + cmake_target_name, + 'COMPILE_DEFINITIONS', + defines, + ';') + + # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493 + # CMake currently does not have target C and CXX flags. + # So, instead of doing... + + # cflags_c = config.get('cflags_c') + # if cflags_c is not None: + # SetTargetProperty(output, cmake_target_name, + # 'C_COMPILE_FLAGS', cflags_c, ' ') + + # cflags_cc = config.get('cflags_cc') + # if cflags_cc is not None: + # SetTargetProperty(output, cmake_target_name, + # 'CXX_COMPILE_FLAGS', cflags_cc, ' ') + + # Instead we must... + s_sources = [] + c_sources = [] + cxx_sources = [] + for src in srcs: + _, ext = os.path.splitext(src) + src_type = COMPILABLE_EXTENSIONS.get(ext, None) + + if src_type == 's': + s_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src)) + + if src_type == 'cc': + c_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src)) + + if src_type == 'cxx': + cxx_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src)) + + for extra_source in extra_sources: + src, real_source = extra_source + _, ext = os.path.splitext(real_source) + src_type = COMPILABLE_EXTENSIONS.get(ext, None) + + if src_type == 's': + s_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src)) + + if src_type == 'cc': + c_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src)) + + if src_type == 'cxx': + cxx_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src)) + + cflags = config.get('cflags', []) + cflags_c = config.get('cflags_c', []) + cflags_cxx = config.get('cflags_cc', []) + if c_sources and not (s_sources or cxx_sources): + flags = [] + flags.extend(cflags) + flags.extend(cflags_c) + SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ') + + elif cxx_sources and not (s_sources or c_sources): + flags = [] + flags.extend(cflags) + flags.extend(cflags_cxx) + SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ') + + else: + if s_sources and cflags: + SetFilesProperty(output, s_sources, 'COMPILE_FLAGS', cflags, ' ') + + if c_sources and (cflags or cflags_c): + flags = [] + flags.extend(cflags) + flags.extend(cflags_c) + SetFilesProperty(output, c_sources, 'COMPILE_FLAGS', flags, ' ') + + if cxx_sources and (cflags or cflags_cxx): + flags = [] + flags.extend(cflags) + flags.extend(cflags_cxx) + SetFilesProperty(output, cxx_sources, 'COMPILE_FLAGS', flags, ' ') + + # Have assembly link as c if there are no other files + if not c_sources and not cxx_sources and s_sources: + SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C']) + + # Linker flags + ldflags = config.get('ldflags') + if ldflags is not None: + SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ') + + # Note on Dependencies and Libraries: + # CMake wants to handle link order, resolving the link line up front. + # Gyp does not retain or enforce specifying enough information to do so. + # So do as other gyp generators and use --start-group and --end-group. + # Give CMake as little information as possible so that it doesn't mess it up. + + # Dependencies + rawDeps = spec.get('dependencies', []) + + static_deps = [] + shared_deps = [] + other_deps = [] + for rawDep in rawDeps: + dep_cmake_name = namer.CreateCMakeTargetName(rawDep) + dep_spec = target_dicts.get(rawDep, {}) + dep_target_type = dep_spec.get('type', None) + + if dep_target_type == 'static_library': + static_deps.append(dep_cmake_name) + elif dep_target_type == 'shared_library': + shared_deps.append(dep_cmake_name) + else: + other_deps.append(dep_cmake_name) + + # ensure all external dependencies are complete before internal dependencies + # extra_deps currently only depend on their own deps, so otherwise run early + if static_deps or shared_deps or other_deps: + for extra_dep in extra_deps: + output.write('add_dependencies(') + output.write(extra_dep) + output.write('\n') + for deps in (static_deps, shared_deps, other_deps): + for dep in gyp.common.uniquer(deps): + output.write(' ') + output.write(dep) + output.write('\n') + output.write(')\n') + + linkable = target_type in ('executable', 'loadable_module', 'shared_library') + other_deps.extend(extra_deps) + if other_deps or (not linkable and (static_deps or shared_deps)): + output.write('add_dependencies(') + output.write(cmake_target_name) + output.write('\n') + for dep in gyp.common.uniquer(other_deps): + output.write(' ') + output.write(dep) + output.write('\n') + if not linkable: + for deps in (static_deps, shared_deps): + for lib_dep in gyp.common.uniquer(deps): + output.write(' ') + output.write(lib_dep) + output.write('\n') + output.write(')\n') + + # Libraries + if linkable: + external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0] + if external_libs or static_deps or shared_deps: + output.write('target_link_libraries(') + output.write(cmake_target_name) + output.write('\n') + if static_deps: + write_group = circular_libs and len(static_deps) > 1 + if write_group: + output.write('-Wl,--start-group\n') + for dep in gyp.common.uniquer(static_deps): + output.write(' ') + output.write(dep) + output.write('\n') + if write_group: + output.write('-Wl,--end-group\n') + if shared_deps: + for dep in gyp.common.uniquer(shared_deps): + output.write(' ') + output.write(dep) + output.write('\n') + if external_libs: + for lib in gyp.common.uniquer(external_libs): + output.write(' ') + output.write(lib) + output.write('\n') + + output.write(')\n') + + UnsetVariable(output, 'TOOLSET') + UnsetVariable(output, 'TARGET') + + +def GenerateOutputForConfig(target_list, target_dicts, data, + params, config_to_use): + options = params['options'] + generator_flags = params['generator_flags'] + + # generator_dir: relative path from pwd to where make puts build files. + # Makes migrating from make to cmake easier, cmake doesn't put anything here. + # Each Gyp configuration creates a different CMakeLists.txt file + # to avoid incompatibilities between Gyp and CMake configurations. + generator_dir = os.path.relpath(options.generator_output or '.') + + # output_dir: relative path from generator_dir to the build directory. + output_dir = generator_flags.get('output_dir', 'out') + + # build_dir: relative path from source root to our output files. + # e.g. "out/Debug" + build_dir = os.path.normpath(os.path.join(generator_dir, + output_dir, + config_to_use)) + + toplevel_build = os.path.join(options.toplevel_dir, build_dir) + + output_file = os.path.join(toplevel_build, 'CMakeLists.txt') + gyp.common.EnsureDirExists(output_file) + + output = open(output_file, 'w') + output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n') + output.write('cmake_policy(VERSION 2.8.8)\n') + + _, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1]) + output.write('project(') + output.write(project_target) + output.write(')\n') + + SetVariable(output, 'configuration', config_to_use) + + # The following appears to be as-yet undocumented. + # http://public.kitware.com/Bug/view.php?id=8392 + output.write('enable_language(ASM)\n') + # ASM-ATT does not support .S files. + # output.write('enable_language(ASM-ATT)\n') + + SetVariable(output, 'builddir', '${CMAKE_BINARY_DIR}') + SetVariable(output, 'obj', '${builddir}/obj') + output.write('\n') + + # TODO: Undocumented/unsupported (the CMake Java generator depends on it). + # CMake by default names the object resulting from foo.c to be foo.c.o. + # Gyp traditionally names the object resulting from foo.c foo.o. + # This should be irrelevant, but some targets extract .o files from .a + # and depend on the name of the extracted .o files. + output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n') + output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n') + output.write('\n') + + namer = CMakeNamer(target_list) + + # The list of targets upon which the 'all' target should depend. + # CMake has it's own implicit 'all' target, one is not created explicitly. + all_qualified_targets = set() + for build_file in params['build_files']: + for qualified_target in gyp.common.AllTargets(target_list, + target_dicts, + os.path.normpath(build_file)): + all_qualified_targets.add(qualified_target) + + for qualified_target in target_list: + WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, + options, generator_flags, all_qualified_targets, output) + + output.close() + + +def PerformBuild(data, configurations, params): + options = params['options'] + generator_flags = params['generator_flags'] + + # generator_dir: relative path from pwd to where make puts build files. + # Makes migrating from make to cmake easier, cmake doesn't put anything here. + generator_dir = os.path.relpath(options.generator_output or '.') + + # output_dir: relative path from generator_dir to the build directory. + output_dir = generator_flags.get('output_dir', 'out') + + for config_name in configurations: + # build_dir: relative path from source root to our output files. + # e.g. "out/Debug" + build_dir = os.path.normpath(os.path.join(generator_dir, + output_dir, + config_name)) + arguments = ['cmake', '-G', 'Ninja'] + print 'Generating [%s]: %s' % (config_name, arguments) + subprocess.check_call(arguments, cwd=build_dir) + + arguments = ['ninja', '-C', build_dir] + print 'Building [%s]: %s' % (config_name, arguments) + subprocess.check_call(arguments) + + +def CallGenerateOutputForConfig(arglist): + # Ignore the interrupt signal so that the parent process catches it and + # kills all multiprocessing children. + signal.signal(signal.SIGINT, signal.SIG_IGN) + + target_list, target_dicts, data, params, config_name = arglist + GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) + + +def GenerateOutput(target_list, target_dicts, data, params): + user_config = params.get('generator_flags', {}).get('config', None) + if user_config: + GenerateOutputForConfig(target_list, target_dicts, data, + params, user_config) + else: + config_names = target_dicts[target_list[0]]['configurations'].keys() + if params['parallel']: + try: + pool = multiprocessing.Pool(len(config_names)) + arglists = [] + for config_name in config_names: + arglists.append((target_list, target_dicts, data, + params, config_name)) + pool.map(CallGenerateOutputForConfig, arglists) + except KeyboardInterrupt, e: + pool.terminate() + raise e + else: + for config_name in config_names: + GenerateOutputForConfig(target_list, target_dicts, data, + params, config_name) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py index a80edc892..84380b04d 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py @@ -270,9 +270,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'), os.path.join(toplevel_build, 'gen')] - if not os.path.exists(toplevel_build): - os.makedirs(toplevel_build) - out = open(os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), 'w') + out_name = os.path.join(toplevel_build, 'eclipse-cdt-settings.xml') + gyp.common.EnsureDirExists(out_name) + out = open(out_name, 'w') out.write('<?xml version="1.0" encoding="UTF-8"?>\n') out.write('<cdtprojectproperties>\n') diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py index efd59bc4a..b3f8a2b77 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py @@ -57,6 +57,7 @@ generator_wants_sorted_dependencies = False generator_additional_non_configuration_keys = [] generator_additional_path_sections = [] generator_extra_sources_for_rules = [] +generator_filelist_paths = None def CalculateVariables(default_variables, params): @@ -103,11 +104,17 @@ def CalculateGeneratorInputInfo(params): global generator_wants_sorted_dependencies generator_wants_sorted_dependencies = True + output_dir = params['options'].generator_output or \ + params['options'].toplevel_dir + builddir_name = generator_flags.get('output_dir', 'out') + qualified_out_dir = os.path.normpath(os.path.join( + output_dir, builddir_name, 'gypfiles')) -def ensure_directory_exists(path): - dir = os.path.dirname(path) - if dir and not os.path.exists(dir): - os.makedirs(dir) + global generator_filelist_paths + generator_filelist_paths = { + 'toplevel': params['options'].toplevel_dir, + 'qualified_out_dir': qualified_out_dir, + } # The .d checking code below uses these functions: @@ -678,7 +685,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD spec, configs: gyp info part_of_all: flag indicating this target is part of 'all' """ - ensure_directory_exists(output_filename) + gyp.common.EnsureDirExists(output_filename) self.fp = open(output_filename, 'w') @@ -807,7 +814,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD targets: list of "all" targets for this sub-project build_dir: build output directory, relative to the sub-project """ - ensure_directory_exists(output_filename) + gyp.common.EnsureDirExists(output_filename) self.fp = open(output_filename, 'w') self.fp.write(header) # For consistency with other builders, put sub-project build output in the @@ -1946,7 +1953,8 @@ def GenerateOutput(target_list, target_dicts, data, params): # We write the file in the base_path directory. output_file = os.path.join(options.depth, base_path, base_name) if options.generator_output: - output_file = os.path.join(options.generator_output, output_file) + output_file = os.path.join( + options.depth, options.generator_output, base_path, base_name) base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.toplevel_dir) return base_path, output_file @@ -1969,7 +1977,8 @@ def GenerateOutput(target_list, target_dicts, data, params): makefile_path = os.path.join(options.toplevel_dir, makefile_name) if options.generator_output: global srcdir_prefix - makefile_path = os.path.join(options.generator_output, makefile_path) + makefile_path = os.path.join( + options.toplevel_dir, options.generator_output, makefile_name) srcdir = gyp.common.RelativePath(srcdir, options.generator_output) srcdir_prefix = '$(srcdir)/' @@ -2045,8 +2054,9 @@ def GenerateOutput(target_list, target_dicts, data, params): make_global_settings += ( 'ifneq (,$(filter $(origin %s), undefined default))\n' % key) # Let gyp-time envvars win over global settings. - if key in os.environ: - value = os.environ[key] + env_key = key.replace('.', '_') # CC.host -> CC_host + if env_key in os.environ: + value = os.environ[env_key] make_global_settings += ' %s = %s\n' % (key, value) make_global_settings += 'endif\n' else: @@ -2056,7 +2066,7 @@ def GenerateOutput(target_list, target_dicts, data, params): header_params['make_global_settings'] = make_global_settings - ensure_directory_exists(makefile_path) + gyp.common.EnsureDirExists(makefile_path) root_makefile = open(makefile_path, 'w') root_makefile.write(SHARED_HEADER % header_params) # Currently any versions have the same effect, but in future the behavior @@ -2088,7 +2098,8 @@ def GenerateOutput(target_list, target_dicts, data, params): this_make_global_settings = data[build_file].get('make_global_settings', []) assert make_global_settings_array == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets.") + "make_global_settings needs to be the same for all targets. %s vs. %s" % + (this_make_global_settings, make_global_settings)) build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir)) included_files = data[build_file]['included_files'] diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py index 63afda914..c59aea106 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py @@ -2,6 +2,7 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import collections import copy import ntpath import os @@ -21,6 +22,16 @@ import gyp.MSVSUtil as MSVSUtil import gyp.MSVSVersion as MSVSVersion from gyp.common import GypError +# TODO: Remove once bots are on 2.7, http://crbug.com/241769 +def _import_OrderedDict(): + import collections + try: + return collections.OrderedDict + except AttributeError: + import gyp.ordered_dict + return gyp.ordered_dict.OrderedDict +OrderedDict = _import_OrderedDict() + # Regular expression for validating Visual Studio GUIDs. If the GUID # contains lowercase hex letters, MSVS will be fine. However, @@ -86,6 +97,46 @@ cached_username = None cached_domain = None +# Based on http://code.activestate.com/recipes/576694/. +class OrderedSet(collections.MutableSet): + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.map = {} # key --> [key, prev, next] + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def discard(self, key): + if key in self.map: + key, prev, next = self.map.pop(key) + prev[2] = next + next[1] = prev + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[1] + curr[2] = end[1] = self.map[key] = [key, curr, end] + + def update(self, iterable): + for i in iterable: + if i not in self: + self.add(i) + + def __iter__(self): + end = self.end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + # TODO(gspencer): Switch the os.environ calls to be # win32api.GetDomainName() and win32api.GetUserName() once the # python version in depot_tools has been updated to work on Vista @@ -158,13 +209,14 @@ def _FixPaths(paths): def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, - list_excluded=True): + list_excluded=True, msvs_version=None): """Converts a list split source file paths into a vcproj folder hierarchy. Arguments: sources: A list of source file paths split. prefix: A list of source file path layers meant to apply to each of sources. excluded: A set of excluded files. + msvs_version: A MSVSVersion object. Returns: A hierarchy of filenames and MSVSProject.Filter objects that matches the @@ -179,7 +231,7 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, if not prefix: prefix = [] result = [] excluded_result = [] - folders = dict() + folders = OrderedDict() # Gather files into the final result, excluded, or folders. for s in sources: if len(s) == 1: @@ -188,15 +240,28 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, excluded_result.append(filename) else: result.append(filename) - else: + elif msvs_version and not msvs_version.UsesVcxproj(): + # For MSVS 2008 and earlier, we need to process all files before walking + # the sub folders. if not folders.get(s[0]): folders[s[0]] = [] folders[s[0]].append(s[1:]) + else: + contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]], + excluded=excluded, + list_excluded=list_excluded, + msvs_version=msvs_version) + contents = MSVSProject.Filter(s[0], contents=contents) + result.append(contents) # Add a folder for excluded files. if excluded_result and list_excluded: excluded_folder = MSVSProject.Filter('_excluded_files', contents=excluded_result) result.append(excluded_folder) + + if msvs_version and msvs_version.UsesVcxproj(): + return result + # Populate all the folders. for f in folders: contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f], @@ -204,7 +269,6 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, list_excluded=list_excluded) contents = MSVSProject.Filter(f, contents=contents) result.append(contents) - return result @@ -415,13 +479,13 @@ def _AddAccumulatedActionsToMSVS(p, spec, actions_dict): dicts describing the actions attached to that input file. """ for primary_input in actions_dict: - inputs = set() - outputs = set() + inputs = OrderedSet() + outputs = OrderedSet() descriptions = [] commands = [] for action in actions_dict[primary_input]: - inputs.update(set(action['inputs'])) - outputs.update(set(action['outputs'])) + inputs.update(OrderedSet(action['inputs'])) + outputs.update(OrderedSet(action['outputs'])) descriptions.append(action['description']) commands.append(action['command']) # Add the custom build step for one input file. @@ -477,8 +541,8 @@ def _RuleInputsAndOutputs(rule, trigger_file): """ raw_inputs = _FixPaths(rule.get('inputs', [])) raw_outputs = _FixPaths(rule.get('outputs', [])) - inputs = set() - outputs = set() + inputs = OrderedSet() + outputs = OrderedSet() inputs.add(trigger_file) for i in raw_inputs: inputs.add(_RuleExpandPath(i, trigger_file)) @@ -549,16 +613,16 @@ def _GenerateExternalRules(rules, output_dir, spec, mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') # Gather stuff needed to emit all: target. - all_inputs = set() - all_outputs = set() - all_output_dirs = set() + all_inputs = OrderedSet() + all_outputs = OrderedSet() + all_output_dirs = OrderedSet() first_outputs = [] for rule in rules: trigger_files = _FindRuleTriggerFiles(rule, sources) for tf in trigger_files: inputs, outputs = _RuleInputsAndOutputs(rule, tf) - all_inputs.update(set(inputs)) - all_outputs.update(set(outputs)) + all_inputs.update(OrderedSet(inputs)) + all_outputs.update(OrderedSet(outputs)) # Only use one target from each rule as the dependency for # 'all' so we don't try to build each rule multiple times. first_outputs.append(list(outputs)[0]) @@ -799,8 +863,8 @@ def _AdjustSourcesForRules(spec, rules, sources, excluded_sources): trigger_files = _FindRuleTriggerFiles(rule, sources) for trigger_file in trigger_files: inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file) - inputs = set(_FixPaths(inputs)) - outputs = set(_FixPaths(outputs)) + inputs = OrderedSet(_FixPaths(inputs)) + outputs = OrderedSet(_FixPaths(outputs)) inputs.remove(_FixPath(trigger_file)) sources.update(inputs) if not spec.get('msvs_external_builder'): @@ -817,7 +881,7 @@ def _FilterActionsFromExcluded(excluded_sources, actions_to_add): Returns: excluded_sources with files that have actions attached removed. """ - must_keep = set(_FixPaths(actions_to_add.keys())) + must_keep = OrderedSet(_FixPaths(actions_to_add.keys())) return [s for s in excluded_sources if s not in must_keep] @@ -900,9 +964,7 @@ def _GenerateMSVSProject(project, options, version, generator_flags): generator_flags: dict of generator-specific flags. """ spec = project.spec - vcproj_dir = os.path.dirname(project.path) - if vcproj_dir and not os.path.exists(vcproj_dir): - os.makedirs(vcproj_dir) + gyp.common.EnsureDirExists(project.path) platforms = _GetUniquePlatforms(spec) p = MSVSProject.Writer(project.path, version, spec['target_name'], @@ -929,8 +991,9 @@ def _GenerateMSVSProject(project, options, version, generator_flags): actions_to_add) list_excluded = generator_flags.get('msvs_list_excluded_files', True) sources, excluded_sources, excluded_idl = ( - _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, project_dir, sources, excluded_sources, list_excluded)) + _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir, + sources, excluded_sources, + list_excluded, version)) # Add in files. missing_sources = _VerifySourcesExist(sources, project_dir) @@ -965,7 +1028,7 @@ def _GetUniquePlatforms(spec): The MSVSUserFile object created. """ # Gather list of unique platforms. - platforms = set() + platforms = OrderedSet() for configuration in spec['configurations']: platforms.add(_ConfigPlatform(spec['configurations'][configuration])) platforms = list(platforms) @@ -1152,7 +1215,7 @@ def _GetLibraries(spec): # in libraries that are assumed to be in the default library path). # Also remove duplicate entries, leaving only the last duplicate, while # preserving order. - found = set() + found = OrderedSet() unique_libraries_list = [] for entry in reversed(libraries): library = re.sub('^\-l', '', entry) @@ -1331,8 +1394,7 @@ def _GetMSVSAttributes(spec, config, config_type): def _AddNormalizedSources(sources_set, sources_array): - sources = [_NormalizedSource(s) for s in sources_array] - sources_set.update(set(sources)) + sources_set.update(_NormalizedSource(s) for s in sources_array) def _PrepareListOfSources(spec, generator_flags, gyp_file): @@ -1350,9 +1412,9 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file): A pair of (list of sources, list of excluded sources). The sources will be relative to the gyp file. """ - sources = set() + sources = OrderedSet() _AddNormalizedSources(sources, spec.get('sources', [])) - excluded_sources = set() + excluded_sources = OrderedSet() # Add in the gyp file. if not generator_flags.get('standalone'): sources.add(gyp_file) @@ -1362,7 +1424,7 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file): inputs = a['inputs'] inputs = [_NormalizedSource(i) for i in inputs] # Add all inputs to sources and excluded sources. - inputs = set(inputs) + inputs = OrderedSet(inputs) sources.update(inputs) if not spec.get('msvs_external_builder'): excluded_sources.update(inputs) @@ -1375,7 +1437,7 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file): def _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, gyp_dir, sources, excluded_sources, list_excluded): + spec, options, gyp_dir, sources, excluded_sources, list_excluded, version): """Adjusts the list of sources and excluded sources. Also converts the sets to lists. @@ -1386,12 +1448,13 @@ def _AdjustSourcesAndConvertToFilterHierarchy( gyp_dir: The path to the gyp file being processed. sources: A set of sources to be included for this project. excluded_sources: A set of sources to be excluded for this project. + version: A MSVSVersion object. Returns: A trio of (list of sources, list of excluded sources, path of excluded IDL file) """ # Exclude excluded sources coming into the generator. - excluded_sources.update(set(spec.get('sources_excluded', []))) + excluded_sources.update(OrderedSet(spec.get('sources_excluded', []))) # Add excluded sources into sources for good measure. sources.update(excluded_sources) # Convert to proper windows form. @@ -1410,7 +1473,13 @@ def _AdjustSourcesAndConvertToFilterHierarchy( # Convert to folders and the right slashes. sources = [i.split('\\') for i in sources] sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded, - list_excluded=list_excluded) + list_excluded=list_excluded, + msvs_version=version) + + # Prune filters with a single child to flatten ugly directory structures + # such as ../../src/modules/module1 etc. + while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter): + sources = sources[0].contents return sources, excluded_sources, excluded_idl @@ -1479,7 +1548,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): def _AddToolFilesToMSVS(p, spec): # Add in tool files (rules). - tool_files = set() + tool_files = OrderedSet() for _, config in spec['configurations'].iteritems(): for f in config.get('msvs_tool_files', []): tool_files.add(f) @@ -3057,9 +3126,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): spec = project.spec configurations = spec['configurations'] project_dir, project_file_name = os.path.split(project.path) - msbuildproj_dir = os.path.dirname(project.path) - if msbuildproj_dir and not os.path.exists(msbuildproj_dir): - os.makedirs(msbuildproj_dir) + gyp.common.EnsureDirExists(project.path) # Prepare list of sources and excluded sources. gyp_path = _NormalizedSource(project.build_file) relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) @@ -3088,7 +3155,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags): _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir, sources, excluded_sources, - list_excluded)) + list_excluded, version)) # Don't add actions if we are using an external builder like ninja. if not spec.get('msvs_external_builder'): @@ -3208,16 +3275,16 @@ def _GenerateActionsForMSBuild(spec, actions_to_add): Returns: A pair of (action specification, the sources handled by this action). """ - sources_handled_by_action = set() + sources_handled_by_action = OrderedSet() actions_spec = [] for primary_input, actions in actions_to_add.iteritems(): - inputs = set() - outputs = set() + inputs = OrderedSet() + outputs = OrderedSet() descriptions = [] commands = [] for action in actions: - inputs.update(set(action['inputs'])) - outputs.update(set(action['outputs'])) + inputs.update(OrderedSet(action['inputs'])) + outputs.update(OrderedSet(action['outputs'])) descriptions.append(action['description']) cmd = action['command'] # For most actions, add 'call' so that actions that invoke batch files diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py index 85d459e6a..c2951a4c5 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py @@ -814,15 +814,18 @@ class NinjaWriter: cflags_c = self.msvs_settings.GetCflagsC(config_name) cflags_cc = self.msvs_settings.GetCflagsCC(config_name) extra_defines = self.msvs_settings.GetComputedDefines(config_name) - pdbpath = self.msvs_settings.GetCompilerPdbName( + # See comment at cc_command for why there's two .pdb files. + pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName( config_name, self.ExpandSpecial) - if not pdbpath: + if not pdbpath_c: obj = 'obj' if self.toolset != 'target': obj += '.' + self.toolset - pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, - self.name + '.pdb')) - self.WriteVariableList(ninja_file, 'pdbname', [pdbpath]) + pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name)) + pdbpath_c = pdbpath + '.c.pdb' + pdbpath_cc = pdbpath + '.cc.pdb' + self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c]) + self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc]) self.WriteVariableList(ninja_file, 'pchprefix', [self.name]) else: cflags = config.get('cflags', []) @@ -1034,15 +1037,21 @@ class NinjaWriter: self.GypPathToNinja, arch) ldflags = env_ldflags + ldflags elif self.flavor == 'win': - manifest_name = self.GypPathToUniqueOutput( + manifest_base_name = self.GypPathToUniqueOutput( self.ComputeOutputFileName(spec)) - ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name, - self.GypPathToNinja, self.ExpandSpecial, manifest_name, is_executable) + ldflags, intermediate_manifest, manifest_files = \ + self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja, + self.ExpandSpecial, manifest_base_name, + output, is_executable, + self.toplevel_build) ldflags = env_ldflags + ldflags self.WriteVariableList(ninja_file, 'manifests', manifest_files) + implicit_deps = implicit_deps.union(manifest_files) + if intermediate_manifest: + self.WriteVariableList( + ninja_file, 'intermediatemanifest', [intermediate_manifest]) command_suffix = _GetWinLinkRuleNameSuffix( - self.msvs_settings.IsEmbedManifest(config_name), - self.msvs_settings.IsLinkIncremental(config_name)) + self.msvs_settings.IsEmbedManifest(config_name)) def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja) if def_file: implicit_deps.add(def_file) @@ -1087,16 +1096,27 @@ class NinjaWriter: extra_bindings.append(('lib', gyp.common.EncodePOSIXShellArgument(output))) if self.flavor == 'win': - extra_bindings.append(('dll', output)) + extra_bindings.append(('binary', output)) if '/NOENTRY' not in ldflags: self.target.import_lib = output + '.lib' extra_bindings.append(('implibflag', '/IMPLIB:%s' % self.target.import_lib)) + pdbname = self.msvs_settings.GetPDBName( + config_name, self.ExpandSpecial, output + '.pdb') output = [output, self.target.import_lib] + if pdbname: + output.append(pdbname) elif not self.is_mac_bundle: output = [output, output + '.TOC'] else: command = command + '_notoc' + elif self.flavor == 'win': + extra_bindings.append(('binary', output)) + pdbname = self.msvs_settings.GetPDBName( + config_name, self.ExpandSpecial, output + '.pdb') + if pdbname: + output = [output, pdbname] + if len(solibs): extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs))) @@ -1502,10 +1522,7 @@ def CalculateGeneratorInputInfo(params): def OpenOutput(path, mode='w'): """Open |path| for writing, creating directories if necessary.""" - try: - os.makedirs(os.path.dirname(path)) - except OSError: - pass + gyp.common.EnsureDirExists(path) return open(path, mode) @@ -1540,7 +1557,10 @@ def GetDefaultConcurrentLinks(): mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30))) # total / 4GB hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32))) - return min(mem_limit, hard_cap) + # return min(mem_limit, hard_cap) + # TODO(scottmg): Temporary speculative fix for OOM on builders + # See http://crbug.com/333000. + return 2 elif sys.platform.startswith('linux'): with open("/proc/meminfo") as meminfo: memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB') @@ -1564,81 +1584,57 @@ def GetDefaultConcurrentLinks(): return 1 -def _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental): +def _GetWinLinkRuleNameSuffix(embed_manifest): """Returns the suffix used to select an appropriate linking rule depending on - whether the manifest embedding and/or incremental linking is enabled.""" - suffix = '' - if embed_manifest: - suffix += '_embed' - if link_incremental: - suffix += '_inc' - return suffix + whether the manifest embedding is enabled.""" + return '_embed' if embed_manifest else '' -def _AddWinLinkRules(master_ninja, embed_manifest, link_incremental): +def _AddWinLinkRules(master_ninja, embed_manifest): """Adds link rules for Windows platform to |master_ninja|.""" def FullLinkCommand(ldcmd, out, binary_type): - cmd = ('cmd /c %(ldcmd)s' - ' && %(python)s gyp-win-tool manifest-wrapper $arch' - ' cmd /c if exist %(out)s.manifest del %(out)s.manifest' - ' && %(python)s gyp-win-tool manifest-wrapper $arch' - ' $mt -nologo -manifest $manifests') - if embed_manifest and not link_incremental: - # Embed manifest into a binary. If incremental linking is enabled, - # embedding is postponed to the re-linking stage (see below). - cmd += ' -outputresource:%(out)s;%(resname)s' - else: - # Save manifest as an external file. - cmd += ' -out:%(out)s.manifest' - if link_incremental: - # There is no point in generating separate rule for the case when - # incremental linking is enabled, but manifest embedding is disabled. - # In that case the basic rule should be used (e.g. 'link'). - # See also implementation of _GetWinLinkRuleNameSuffix(). - assert embed_manifest - # Make .rc file out of manifest, compile it to .res file and re-link. - cmd += (' && %(python)s gyp-win-tool manifest-to-rc $arch' - ' %(out)s.manifest %(out)s.manifest.rc %(resname)s' - ' && %(python)s gyp-win-tool rc-wrapper $arch $rc' - ' %(out)s.manifest.rc' - ' && %(ldcmd)s %(out)s.manifest.res') resource_name = { 'exe': '1', 'dll': '2', }[binary_type] - return cmd % {'python': sys.executable, - 'out': out, - 'ldcmd': ldcmd, - 'resname': resource_name} - - rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental) - dlldesc = 'LINK%s(DLL) $dll' % rule_name_suffix.upper() - dllcmd = ('%s gyp-win-tool link-wrapper $arch ' - '$ld /nologo $implibflag /DLL /OUT:$dll ' - '/PDB:$dll.pdb @$dll.rsp' % sys.executable) - dllcmd = FullLinkCommand(dllcmd, '$dll', 'dll') + return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \ + '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \ + '$manifests' % { + 'python': sys.executable, + 'out': out, + 'ldcmd': ldcmd, + 'resname': resource_name, + 'embed': embed_manifest } + rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest) + use_separate_mspdbsrv = ( + int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0) + dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper() + dllcmd = ('%s gyp-win-tool link-wrapper $arch %s ' + '$ld /nologo $implibflag /DLL /OUT:$binary ' + '@$binary.rsp' % (sys.executable, use_separate_mspdbsrv)) + dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll') master_ninja.rule('solink' + rule_name_suffix, description=dlldesc, command=dllcmd, - rspfile='$dll.rsp', + rspfile='$binary.rsp', rspfile_content='$libs $in_newline $ldflags', restat=True, pool='link_pool') master_ninja.rule('solink_module' + rule_name_suffix, description=dlldesc, command=dllcmd, - rspfile='$dll.rsp', + rspfile='$binary.rsp', rspfile_content='$libs $in_newline $ldflags', restat=True, pool='link_pool') # Note that ldflags goes at the end so that it has the option of # overriding default settings earlier in the command line. - exe_cmd = ('%s gyp-win-tool link-wrapper $arch ' - '$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp' % - sys.executable) - exe_cmd = FullLinkCommand(exe_cmd, '$out', 'exe') + exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s ' + '$ld /nologo /OUT:$binary @$binary.rsp' % + (sys.executable, use_separate_mspdbsrv)) + exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe') master_ninja.rule('link' + rule_name_suffix, - description='LINK%s $out' % rule_name_suffix.upper(), + description='LINK%s $binary' % rule_name_suffix.upper(), command=exe_cmd, - rspfile='$out.rsp', + rspfile='$binary.rsp', rspfile_content='$in_newline $libs $ldflags', pool='link_pool') @@ -1656,9 +1652,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, toplevel_build = os.path.join(options.toplevel_dir, build_dir) - master_ninja = ninja_syntax.Writer( - OpenOutput(os.path.join(toplevel_build, 'build.ninja')), - width=120) + master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja')) + master_ninja = ninja_syntax.Writer(master_ninja_file, width=120) # Put build-time support tools in out/{config_name}. gyp.common.CopyTool(flavor, toplevel_build) @@ -1679,8 +1674,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, ld = 'link.exe' ld_host = '$ld' else: - cc = 'gcc' - cxx = 'g++' + cc = 'cc' + cxx = 'c++' ld = '$cc' ldxx = '$cxx' ld_host = '$cc_host' @@ -1798,14 +1793,20 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, depfile='$out.d', deps=deps) else: + # TODO(scottmg) Separate pdb names is a test to see if it works around + # http://crbug.com/142362. It seems there's a race between the creation of + # the .pdb by the precompiled header step for .cc and the compilation of + # .c files. This should be handled by mspdbsrv, but rarely errors out with + # c1xx : fatal error C1033: cannot open program database + # By making the rules target separate pdb files this might be avoided. cc_command = ('ninja -t msvc -e $arch ' + '-- ' '$cc /nologo /showIncludes /FC ' - '@$out.rsp /c $in /Fo$out /Fd$pdbname ') + '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ') cxx_command = ('ninja -t msvc -e $arch ' + '-- ' '$cxx /nologo /showIncludes /FC ' - '@$out.rsp /c $in /Fo$out /Fd$pdbname ') + '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ') master_ninja.rule( 'cc', description='CC $out', @@ -1893,17 +1894,13 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, master_ninja.rule( 'alink', description='LIB $out', - command=('%s gyp-win-tool link-wrapper $arch ' + command=('%s gyp-win-tool link-wrapper $arch False ' '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' % sys.executable), rspfile='$out.rsp', rspfile_content='$in_newline $libflags') - _AddWinLinkRules(master_ninja, embed_manifest=True, link_incremental=True) - _AddWinLinkRules(master_ninja, embed_manifest=True, link_incremental=False) - _AddWinLinkRules(master_ninja, embed_manifest=False, link_incremental=False) - # Do not generate rules for embed_manifest=False and link_incremental=True - # because in that case rules for (False, False) should be used (see - # implementation of _GetWinLinkRuleNameSuffix()). + _AddWinLinkRules(master_ninja, embed_manifest=True) + _AddWinLinkRules(master_ninja, embed_manifest=False) else: master_ninja.rule( 'objc', @@ -2047,7 +2044,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, this_make_global_settings = data[build_file].get('make_global_settings', []) assert make_global_settings == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets.") + "make_global_settings needs to be the same for all targets. %s vs. %s" % + (this_make_global_settings, make_global_settings)) spec = target_dicts[qualified_target] if flavor == 'mac': @@ -2098,6 +2096,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, master_ninja.build('all', 'phony', list(all_outputs)) master_ninja.default(generator_flags.get('default_target', 'all')) + master_ninja_file.close() + def PerformBuild(data, configurations, params): options = params['options'] diff --git a/node_modules/node-gyp/gyp/pylib/gyp/input.py b/node_modules/node-gyp/gyp/pylib/gyp/input.py index 45e791d14..6472912db 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/input.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/input.py @@ -822,8 +822,7 @@ def ExpandVariables(input, phase, variables, build_file): rel_build_file_dir = build_file_dir qualified_out_dir = generator_filelist_paths['qualified_out_dir'] path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement) - if not os.path.isdir(os.path.dirname(path)): - os.makedirs(os.path.dirname(path)) + gyp.common.EnsureDirExists(path) replacement = gyp.common.RelativePath(path, build_file_dir) f = gyp.common.WriteOnDiff(path) @@ -2278,6 +2277,7 @@ def ProcessListFiltersInDict(name, the_dict): continue if not isinstance(the_dict[list_key], list): + value = the_dict[list_key] raise ValueError, name + ' key ' + list_key + \ ' must be list, not ' + \ value.__class__.__name__ + ' when applying ' + \ diff --git a/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py index 20b3a4865..ac19b6dc8 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py @@ -9,6 +9,8 @@ These functions are executed via gyp-mac-tool when using the Makefile generator. """ import fcntl +import fnmatch +import glob import json import os import plistlib @@ -17,6 +19,7 @@ import shutil import string import subprocess import sys +import tempfile def main(args): @@ -259,6 +262,250 @@ class MacTool(object): os.remove(link) os.symlink(dest, link) + def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): + """Code sign a bundle. + + This function tries to code sign an iOS bundle, following the same + algorithm as Xcode: + 1. copy ResourceRules.plist from the user or the SDK into the bundle, + 2. pick the provisioning profile that best match the bundle identifier, + and copy it into the bundle as embedded.mobileprovision, + 3. copy Entitlements.plist from user or SDK next to the bundle, + 4. code sign the bundle. + """ + resource_rules_path = self._InstallResourceRules(resource_rules) + substitutions, overrides = self._InstallProvisioningProfile( + provisioning, self._GetCFBundleIdentifier()) + entitlements_path = self._InstallEntitlements( + entitlements, substitutions, overrides) + subprocess.check_call([ + 'codesign', '--force', '--sign', key, '--resource-rules', + resource_rules_path, '--entitlements', entitlements_path, + os.path.join( + os.environ['TARGET_BUILD_DIR'], + os.environ['FULL_PRODUCT_NAME'])]) + + def _InstallResourceRules(self, resource_rules): + """Installs ResourceRules.plist from user or SDK into the bundle. + + Args: + resource_rules: string, optional, path to the ResourceRules.plist file + to use, default to "${SDKROOT}/ResourceRules.plist" + + Returns: + Path to the copy of ResourceRules.plist into the bundle. + """ + source_path = resource_rules + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['CONTENTS_FOLDER_PATH'], + 'ResourceRules.plist') + if not source_path: + source_path = os.path.join( + os.environ['SDKROOT'], 'ResourceRules.plist') + shutil.copy2(source_path, target_path) + return target_path + + def _InstallProvisioningProfile(self, profile, bundle_identifier): + """Installs embedded.mobileprovision into the bundle. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple containing two dictionary: variables substitutions and values + to overrides when generating the entitlements file. + """ + source_path, provisioning_data, team_id = self._FindProvisioningProfile( + profile, bundle_identifier) + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['CONTENTS_FOLDER_PATH'], + 'embedded.mobileprovision') + shutil.copy2(source_path, target_path) + substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') + return substitutions, provisioning_data['Entitlements'] + + def _FindProvisioningProfile(self, profile, bundle_identifier): + """Finds the .mobileprovision file to use for signing the bundle. + + Checks all the installed provisioning profiles (or if the user specified + the PROVISIONING_PROFILE variable, only consult it) and select the most + specific that correspond to the bundle identifier. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple of the path to the selected provisioning profile, the data of + the embedded plist in the provisioning profile and the team identifier + to use for code signing. + + Raises: + SystemExit: if no .mobileprovision can be used to sign the bundle. + """ + profiles_dir = os.path.join( + os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') + if not os.path.isdir(profiles_dir): + print >>sys.stderr, ( + 'cannot find mobile provisioning for %s' % bundle_identifier) + sys.exit(1) + provisioning_profiles = None + if profile: + profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') + if os.path.exists(profile_path): + provisioning_profiles = [profile_path] + if not provisioning_profiles: + provisioning_profiles = glob.glob( + os.path.join(profiles_dir, '*.mobileprovision')) + valid_provisioning_profiles = {} + for profile_path in provisioning_profiles: + profile_data = self._LoadProvisioningProfile(profile_path) + app_id_pattern = profile_data.get( + 'Entitlements', {}).get('application-identifier', '') + for team_identifier in profile_data.get('TeamIdentifier', []): + app_id = '%s.%s' % (team_identifier, bundle_identifier) + if fnmatch.fnmatch(app_id, app_id_pattern): + valid_provisioning_profiles[app_id_pattern] = ( + profile_path, profile_data, team_identifier) + if not valid_provisioning_profiles: + print >>sys.stderr, ( + 'cannot find mobile provisioning for %s' % bundle_identifier) + sys.exit(1) + # If the user has multiple provisioning profiles installed that can be + # used for ${bundle_identifier}, pick the most specific one (ie. the + # provisioning profile whose pattern is the longest). + selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) + return valid_provisioning_profiles[selected_key] + + def _LoadProvisioningProfile(self, profile_path): + """Extracts the plist embedded in a provisioning profile. + + Args: + profile_path: string, path to the .mobileprovision file + + Returns: + Content of the plist embedded in the provisioning profile as a dictionary. + """ + with tempfile.NamedTemporaryFile() as temp: + subprocess.check_call([ + 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) + return self._LoadPlistMaybeBinary(temp.name) + + def _LoadPlistMaybeBinary(self, plist_path): + """Loads into a memory a plist possibly encoded in binary format. + + This is a wrapper around plistlib.readPlist that tries to convert the + plist to the XML format if it can't be parsed (assuming that it is in + the binary format). + + Args: + plist_path: string, path to a plist file, in XML or binary format + + Returns: + Content of the plist as a dictionary. + """ + try: + # First, try to read the file using plistlib that only supports XML, + # and if an exception is raised, convert a temporary copy to XML and + # load that copy. + return plistlib.readPlist(plist_path) + except: + pass + with tempfile.NamedTemporaryFile() as temp: + shutil.copy2(plist_path, temp.name) + subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) + return plistlib.readPlist(temp.name) + + def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): + """Constructs a dictionary of variable substitutions for Entitlements.plist. + + Args: + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + app_identifier_prefix: string, value for AppIdentifierPrefix + + Returns: + Dictionary of substitutions to apply when generating Entitlements.plist. + """ + return { + 'CFBundleIdentifier': bundle_identifier, + 'AppIdentifierPrefix': app_identifier_prefix, + } + + def _GetCFBundleIdentifier(self): + """Extracts CFBundleIdentifier value from Info.plist in the bundle. + + Returns: + Value of CFBundleIdentifier in the Info.plist located in the bundle. + """ + info_plist_path = os.path.join( + os.environ['TARGET_BUILD_DIR'], + os.environ['INFOPLIST_PATH']) + info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) + return info_plist_data['CFBundleIdentifier'] + + def _InstallEntitlements(self, entitlements, substitutions, overrides): + """Generates and install the ${BundleName}.xcent entitlements file. + + Expands variables "$(variable)" pattern in the source entitlements file, + add extra entitlements defined in the .mobileprovision file and the copy + the generated plist to "${BundlePath}.xcent". + + Args: + entitlements: string, optional, path to the Entitlements.plist template + to use, defaults to "${SDKROOT}/Entitlements.plist" + substitutions: dictionary, variable substitutions + overrides: dictionary, values to add to the entitlements + + Returns: + Path to the generated entitlements file. + """ + source_path = entitlements + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['PRODUCT_NAME'] + '.xcent') + if not source_path: + source_path = os.path.join( + os.environ['SDKROOT'], + 'Entitlements.plist') + shutil.copy2(source_path, target_path) + data = self._LoadPlistMaybeBinary(target_path) + data = self._ExpandVariables(data, substitutions) + if overrides: + for key in overrides: + if key not in data: + data[key] = overrides[key] + plistlib.writePlist(data, target_path) + return target_path + + def _ExpandVariables(self, data, substitutions): + """Expands variables "$(variable)" in data. + + Args: + data: object, can be either string, list or dictionary + substitutions: dictionary, variable substitutions to perform + + Returns: + Copy of data where each references to "$(variable)" has been replaced + by the corresponding value found in substitutions, or left intact if + the key was not found. + """ + if isinstance(data, str): + for key, value in substitutions.iteritems(): + data = data.replace('$(%s)' % key, value) + return data + if isinstance(data, list): + return [self._ExpandVariables(v, substitutions) for v in data] + if isinstance(data, dict): + return dict((k, self._ExpandVariables(data[k], + substitutions)) for k in data) + return data if __name__ == '__main__': sys.exit(main(sys.argv[1:])) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py b/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py index 3ac153dd8..6428fced0 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py @@ -317,15 +317,20 @@ class MsvsSettings(object): output_file, config=config)) return output_file - def GetPDBName(self, config, expand_special): - """Gets the explicitly overridden pdb name for a target or returns None - if it's not overridden.""" + def GetPDBName(self, config, expand_special, default): + """Gets the explicitly overridden pdb name for a target or returns + default if it's not overridden, or if no pdb will be generated.""" config = self._TargetConfig(config) output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config) - if output_file: - output_file = expand_special(self.ConvertVSMacros( - output_file, config=config)) - return output_file + generate_debug_info = self._Setting( + ('VCLinkerTool', 'GenerateDebugInformation'), config) + if generate_debug_info: + if output_file: + return expand_special(self.ConvertVSMacros(output_file, config=config)) + else: + return default + else: + return None def GetCflags(self, config): """Returns the flags that need to be added to .c and .cc compilations.""" @@ -420,6 +425,7 @@ class MsvsSettings(object): libflags.extend(self._GetAdditionalLibraryDirectories( 'VCLibrarianTool', config, gyp_to_build_path)) lib('LinkTimeCodeGeneration', map={'true': '/LTCG'}) + lib('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:') lib('AdditionalOptions') return libflags @@ -441,8 +447,19 @@ class MsvsSettings(object): if def_file: ldflags.append('/DEF:"%s"' % def_file) + def GetPGDName(self, config, expand_special): + """Gets the explicitly overridden pgd name for a target or returns None + if it's not overridden.""" + config = self._TargetConfig(config) + output_file = self._Setting( + ('VCLinkerTool', 'ProfileGuidedDatabase'), config) + if output_file: + output_file = expand_special(self.ConvertVSMacros( + output_file, config=config)) + return output_file + def GetLdflags(self, config, gyp_to_build_path, expand_special, - manifest_base_name, is_executable): + manifest_base_name, output_name, is_executable, build_dir): """Returns the flags that need to be added to link commands, and the manifest files.""" config = self._TargetConfig(config) @@ -455,28 +472,47 @@ class MsvsSettings(object): ldflags.extend(self._GetAdditionalLibraryDirectories( 'VCLinkerTool', config, gyp_to_build_path)) ld('DelayLoadDLLs', prefix='/DELAYLOAD:') + ld('TreatLinkerWarningAsErrors', prefix='/WX', + map={'true': '', 'false': ':NO'}) out = self.GetOutputName(config, expand_special) if out: ldflags.append('/OUT:' + out) - pdb = self.GetPDBName(config, expand_special) + pdb = self.GetPDBName(config, expand_special, output_name + '.pdb') if pdb: ldflags.append('/PDB:' + pdb) + pgd = self.GetPGDName(config, expand_special) + if pgd: + ldflags.append('/PGD:' + pgd) map_file = self.GetMapFileName(config, expand_special) ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file else '/MAP'}) ld('MapExports', map={'true': '/MAPINFO:EXPORTS'}) ld('AdditionalOptions', prefix='') - ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:') + + minimum_required_version = self._Setting( + ('VCLinkerTool', 'MinimumRequiredVersion'), config, default='') + if minimum_required_version: + minimum_required_version = ',' + minimum_required_version + ld('SubSystem', + map={'1': 'CONSOLE%s' % minimum_required_version, + '2': 'WINDOWS%s' % minimum_required_version}, + prefix='/SUBSYSTEM:') + ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE') ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL') + ld('BaseAddress', prefix='/BASE:') ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED') ld('RandomizedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE') ld('DataExecutionPrevention', map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT') ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:') + ld('ForceSymbolReferences', prefix='/INCLUDE:') ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:') - ld('LinkTimeCodeGeneration', map={'1': '/LTCG'}) + ld('LinkTimeCodeGeneration', + map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE', + '4': ':PGUPDATE'}, + prefix='/LTCG') ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:') ld('ResourceOnlyDLL', map={'true': '/NOENTRY'}) ld('EntryPointSymbol', prefix='/ENTRY:') @@ -501,27 +537,55 @@ class MsvsSettings(object): ldflags.append('/NXCOMPAT') have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags) - manifest_flags, intermediate_manifest_file = self._GetLdManifestFlags( - config, manifest_base_name, is_executable and not have_def_file) + manifest_flags, intermediate_manifest, manifest_files = \ + self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path, + is_executable and not have_def_file, build_dir) ldflags.extend(manifest_flags) - manifest_files = self._GetAdditionalManifestFiles(config, gyp_to_build_path) - manifest_files.append(intermediate_manifest_file) - - return ldflags, manifest_files + return ldflags, intermediate_manifest, manifest_files + + def _GetLdManifestFlags(self, config, name, gyp_to_build_path, + allow_isolation, build_dir): + """Returns a 3-tuple: + - the set of flags that need to be added to the link to generate + a default manifest + - the intermediate manifest that the linker will generate that should be + used to assert it doesn't add anything to the merged one. + - the list of all the manifest files to be merged by the manifest tool and + included into the link.""" + generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'), + config, + default='true') + if generate_manifest != 'true': + # This means not only that the linker should not generate the intermediate + # manifest but also that the manifest tool should do nothing even when + # additional manifests are specified. + return ['/MANIFEST:NO'], [], [] - def _GetLdManifestFlags(self, config, name, allow_isolation): - """Returns the set of flags that need to be added to the link to generate - a default manifest, as well as the name of the generated file.""" - # The manifest is generated by default. output_name = name + '.intermediate.manifest' flags = [ '/MANIFEST', '/ManifestFile:' + output_name, ] + # Instead of using the MANIFESTUAC flags, we generate a .manifest to + # include into the list of manifests. This allows us to avoid the need to + # do two passes during linking. The /MANIFEST flag and /ManifestFile are + # still used, and the intermediate manifest is used to assert that the + # final manifest we get from merging all the additional manifest files + # (plus the one we generate here) isn't modified by merging the + # intermediate into it. + + # Always NO, because we generate a manifest file that has what we want. + flags.append('/MANIFESTUAC:NO') + config = self._TargetConfig(config) enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config, default='true') + manifest_files = [] + generated_manifest_outer = \ +"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \ +"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \ +"</assembly>" if enable_uac == 'true': execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'), config, default='0') @@ -533,14 +597,38 @@ class MsvsSettings(object): ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config, default='false') - flags.append('''/MANIFESTUAC:"level='%s' uiAccess='%s'"''' % - (execution_level_map[execution_level], ui_access)) + + inner = ''' +<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> + <security> + <requestedPrivileges> + <requestedExecutionLevel level='%s' uiAccess='%s' /> + </requestedPrivileges> + </security> +</trustInfo>''' % (execution_level_map[execution_level], ui_access) else: - flags.append('/MANIFESTUAC:NO') + inner = '' + + generated_manifest_contents = generated_manifest_outer % inner + generated_name = name + '.generated.manifest' + # Need to join with the build_dir here as we're writing it during + # generation time, but we return the un-joined version because the build + # will occur in that directory. We only write the file if the contents + # have changed so that simply regenerating the project files doesn't + # cause a relink. + build_dir_generated_name = os.path.join(build_dir, generated_name) + gyp.common.EnsureDirExists(build_dir_generated_name) + f = gyp.common.WriteOnDiff(build_dir_generated_name) + f.write(generated_manifest_contents) + f.close() + manifest_files = [generated_name] if allow_isolation: flags.append('/ALLOWISOLATION') - return flags, output_name + + manifest_files += self._GetAdditionalManifestFiles(config, + gyp_to_build_path) + return flags, output_name, manifest_files def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): """Gets additional manifest files that are added to the default one @@ -563,7 +651,8 @@ class MsvsSettings(object): def IsEmbedManifest(self, config): """Returns whether manifest should be linked into binary.""" config = self._TargetConfig(config) - embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config) + embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config, + default='true') return embed == 'true' def IsLinkIncremental(self, config): diff --git a/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py b/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py new file mode 100644 index 000000000..a1e89f919 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py @@ -0,0 +1,289 @@ +# Unmodified from http://code.activestate.com/recipes/576693/ +# other than to add MIT license header (as specified on page, but not in code). +# Linked from Python documentation here: +# http://docs.python.org/2/library/collections.html#collections.OrderedDict +# +# This should be deleted once Py2.7 is available on all bots, see +# http://crbug.com/241769. +# +# Copyright (c) 2009 Raymond Hettinger. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. + +try: + from thread import get_ident as _get_ident +except ImportError: + from dummy_thread import get_ident as _get_ident + +try: + from _abcoll import KeysView, ValuesView, ItemsView +except ImportError: + pass + + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + # Suppress 'OrderedDict.update: Method has no argument': + # pylint: disable=E0211 + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running={}): + 'od.__repr__() <==> repr(od)' + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + diff --git a/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py index 3424c015d..e9d7df0bb 100755 --- a/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py @@ -10,12 +10,17 @@ These functions are executed via gyp-win-tool when using the ninja generator. """ import os +import re import shutil import subprocess +import string import sys BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +# A regex matching an argument corresponding to the output filename passed to +# link.exe. +_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE) def main(args): executor = WinTool() @@ -28,6 +33,32 @@ class WinTool(object): """This class performs all the Windows tooling steps. The methods can either be executed directly, or dispatched from an argument list.""" + def _UseSeparateMspdbsrv(self, env, args): + """Allows to use a unique instance of mspdbsrv.exe per linker instead of a + shared one.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + if args[0] != 'link.exe': + return + + # Use the output filename passed to the linker to generate an endpoint name + # for mspdbsrv.exe. + endpoint_name = None + for arg in args: + m = _LINK_EXE_OUT_ARG.match(arg) + if m: + endpoint_name = '%s_%d' % (m.group('out'), os.getpid()) + break + + if endpoint_name is None: + return + + # Adds the appropriate environment variable. This will be read by link.exe + # to know which instance of mspdbsrv.exe it should connect to (if it's + # not set then the default endpoint is used). + env['_MSPDBSRV_ENDPOINT_'] = endpoint_name + def Dispatch(self, args): """Dispatches a string command to a method.""" if len(args) < 1: @@ -65,19 +96,100 @@ class WinTool(object): else: shutil.copy2(source, dest) - def ExecLinkWrapper(self, arch, *args): + def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args): """Filter diagnostic output from link that looks like: ' Creating library ui.dll.lib and object ui.dll.exp' This happens when there are exports from the dll or exe. """ env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() + if use_separate_mspdbsrv == 'True': + self._UseSeparateMspdbsrv(env, args) + link = subprocess.Popen(args, + shell=True, + env=env, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + out, _ = link.communicate() for line in out.splitlines(): if not line.startswith(' Creating library '): print line - return popen.returncode + return link.returncode + + def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname, + mt, rc, intermediate_manifest, *manifests): + """A wrapper for handling creating a manifest resource and then executing + a link command.""" + # The 'normal' way to do manifests is to have link generate a manifest + # based on gathering dependencies from the object files, then merge that + # manifest with other manifests supplied as sources, convert the merged + # manifest to a resource, and then *relink*, including the compiled + # version of the manifest resource. This breaks incremental linking, and + # is generally overly complicated. Instead, we merge all the manifests + # provided (along with one that includes what would normally be in the + # linker-generated one, see msvs_emulation.py), and include that into the + # first and only link. We still tell link to generate a manifest, but we + # only use that to assert that our simpler process did not miss anything. + variables = { + 'python': sys.executable, + 'arch': arch, + 'out': out, + 'ldcmd': ldcmd, + 'resname': resname, + 'mt': mt, + 'rc': rc, + 'intermediate_manifest': intermediate_manifest, + 'manifests': ' '.join(manifests), + } + add_to_ld = '' + if manifests: + subprocess.check_call( + '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo ' + '-manifest %(manifests)s -out:%(out)s.manifest' % variables) + if embed_manifest == 'True': + subprocess.check_call( + '%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest' + ' %(out)s.manifest.rc %(resname)s' % variables) + subprocess.check_call( + '%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s ' + '%(out)s.manifest.rc' % variables) + add_to_ld = ' %(out)s.manifest.res' % variables + subprocess.check_call(ldcmd + add_to_ld) + + # Run mt.exe on the theoretically complete manifest we generated, merging + # it with the one the linker generated to confirm that the linker + # generated one does not add anything. This is strictly unnecessary for + # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not + # used in a #pragma comment. + if manifests: + # Merge the intermediate one with ours to .assert.manifest, then check + # that .assert.manifest is identical to ours. + subprocess.check_call( + '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo ' + '-manifest %(out)s.manifest %(intermediate_manifest)s ' + '-out:%(out)s.assert.manifest' % variables) + assert_manifest = '%(out)s.assert.manifest' % variables + our_manifest = '%(out)s.manifest' % variables + # Load and normalize the manifests. mt.exe sometimes removes whitespace, + # and sometimes doesn't unfortunately. + with open(our_manifest, 'rb') as our_f: + with open(assert_manifest, 'rb') as assert_f: + our_data = our_f.read().translate(None, string.whitespace) + assert_data = assert_f.read().translate(None, string.whitespace) + if our_data != assert_data: + os.unlink(out) + def dump(filename): + sys.stderr.write('%s\n-----\n' % filename) + with open(filename, 'rb') as f: + sys.stderr.write(f.read() + '\n-----\n') + dump(intermediate_manifest) + dump(our_manifest) + dump(assert_manifest) + sys.stderr.write( + 'Linker generated manifest "%s" added to final manifest "%s" ' + '(result in "%s"). ' + 'Were /MANIFEST switches used in #pragma statements? ' % ( + intermediate_manifest, our_manifest, assert_manifest)) + return 1 def ExecManifestWrapper(self, arch, *args): """Run manifest tool with environment set. Strip out undesirable warning @@ -166,11 +278,14 @@ class WinTool(object): """Runs an action command line from a response file using the environment for |arch|. If |dir| is supplied, use that as the working directory.""" env = self._GetEnv(arch) + # TODO(scottmg): This is a temporary hack to get some specific variables + # through to actions that are set after gyp-time. http://crbug.com/333738. + for k, v in os.environ.iteritems(): + if k not in env: + env[k] = v args = open(rspfile).read() dir = dir[0] if dir else None - popen = subprocess.Popen(args, shell=True, env=env, cwd=dir) - popen.wait() - return popen.returncode + return subprocess.call(args, shell=True, env=env, cwd=dir) if __name__ == '__main__': sys.exit(main(sys.argv[1:])) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py index d50eac004..30f27d583 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py @@ -9,11 +9,13 @@ other build systems, such as make and ninja. import copy import gyp.common +import os import os.path import re import shlex import subprocess import sys +import tempfile from gyp.common import GypError class XcodeSettings(object): @@ -22,6 +24,7 @@ class XcodeSettings(object): # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached # at class-level for efficiency. _sdk_path_cache = {} + _sdk_root_cache = {} # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so # cached at class-level for efficiency. @@ -31,6 +34,10 @@ class XcodeSettings(object): # cached at class-level for efficiency. _codesigning_key_cache = {} + # Populated lazily by _XcodeVersion. Shared by all XcodeSettings, so cached + # at class-level for efficiency. + _xcode_version_cache = () + def __init__(self, spec): self.spec = spec @@ -262,7 +269,7 @@ class XcodeSettings(object): """Returns the architectures this target should be built for.""" # TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set # CURRENT_ARCH / NATIVE_ARCH env vars? - return self.xcode_settings[configname].get('ARCHS', ['i386']) + return self.xcode_settings[configname].get('ARCHS', [self._DefaultArch()]) def _GetStdout(self, cmdlist): job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE) @@ -291,9 +298,14 @@ class XcodeSettings(object): sdk_root = self._SdkRoot(configname) if sdk_root.startswith('/'): return sdk_root + return self._XcodeSdkPath(sdk_root) + + def _XcodeSdkPath(self, sdk_root): if sdk_root not in XcodeSettings._sdk_path_cache: - XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem( - sdk_root, 'Path') + sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path') + XcodeSettings._sdk_path_cache[sdk_root] = sdk_path + if sdk_root: + XcodeSettings._sdk_root_cache[sdk_path] = sdk_root return XcodeSettings._sdk_path_cache[sdk_root] def _AppendPlatformVersionMinFlags(self, lst): @@ -318,7 +330,7 @@ class XcodeSettings(object): cflags = [] sdk_root = self._SdkPath() - if 'SDKROOT' in self._Settings(): + if 'SDKROOT' in self._Settings() and sdk_root: cflags.append('-isysroot %s' % sdk_root) if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'): @@ -384,7 +396,7 @@ class XcodeSettings(object): if arch is not None: archs = [arch] else: - archs = self._Settings().get('ARCHS', ['i386']) + archs = self._Settings().get('ARCHS', [self._DefaultArch()]) if len(archs) != 1: # TODO: Supporting fat binaries will be annoying. self._WarnUnimplemented('ARCHS') @@ -404,11 +416,14 @@ class XcodeSettings(object): cflags += self._Settings().get('WARNING_CFLAGS', []) - if 'SDKROOT' in self._Settings(): - config = self.spec['configurations'][self.configname] - framework_dirs = config.get('mac_framework_dirs', []) - for directory in framework_dirs: - cflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root)) + if sdk_root: + framework_root = sdk_root + else: + framework_root = '' + config = self.spec['configurations'][self.configname] + framework_dirs = config.get('mac_framework_dirs', []) + for directory in framework_dirs: + cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root)) self.configname = None return cflags @@ -624,7 +639,7 @@ class XcodeSettings(object): self._AppendPlatformVersionMinFlags(ldflags) - if 'SDKROOT' in self._Settings(): + if 'SDKROOT' in self._Settings() and self._SdkPath(): ldflags.append('-isysroot ' + self._SdkPath()) for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []): @@ -638,7 +653,7 @@ class XcodeSettings(object): if arch is not None: archs = [arch] else: - archs = self._Settings().get('ARCHS', ['i386']) + archs = self._Settings().get('ARCHS', [self._DefaultArch()]) if len(archs) != 1: # TODO: Supporting fat binaries will be annoying. self._WarnUnimplemented('ARCHS') @@ -655,11 +670,13 @@ class XcodeSettings(object): for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []): ldflags.append('-Wl,-rpath,' + rpath) - if 'SDKROOT' in self._Settings(): - config = self.spec['configurations'][self.configname] - framework_dirs = config.get('mac_framework_dirs', []) - for directory in framework_dirs: - ldflags.append('-F' + directory.replace('$(SDKROOT)', self._SdkPath())) + sdk_root = self._SdkPath() + if not sdk_root: + sdk_root = '' + config = self.spec['configurations'][self.configname] + framework_dirs = config.get('mac_framework_dirs', []) + for directory in framework_dirs: + ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root)) self.configname = None return ldflags @@ -789,33 +806,40 @@ class XcodeSettings(object): if not (self.isIOS and self.spec['type'] == "executable"): return [] - identity = self.xcode_settings[configname].get('CODE_SIGN_IDENTITY', '') - if identity == '': + settings = self.xcode_settings[configname] + key = self._GetIOSCodeSignIdentityKey(settings) + if not key: return [] + + # Warn for any unimplemented signing xcode keys. + unimpl = ['OTHER_CODE_SIGN_FLAGS'] + unimpl = set(unimpl) & set(self.xcode_settings[configname].keys()) + if unimpl: + print 'Warning: Some codesign keys not implemented, ignoring: %s' % ( + ', '.join(sorted(unimpl))) + + return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % ( + os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key, + settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''), + settings.get('CODE_SIGN_ENTITLEMENTS', ''), + settings.get('PROVISIONING_PROFILE', '')) + ] + + def _GetIOSCodeSignIdentityKey(self, settings): + identity = settings.get('CODE_SIGN_IDENTITY') + if not identity: + return None if identity not in XcodeSettings._codesigning_key_cache: - proc = subprocess.Popen(['security', 'find-identity', '-p', 'codesigning', - '-v'], stdout=subprocess.PIPE) - output = proc.communicate()[0].strip() - key = None - for item in output.split("\n"): - if identity in item: - assert key == None, ( - "Multiple codesigning identities for identity: %s" % - identity) - key = item.split(' ')[1] - XcodeSettings._codesigning_key_cache[identity] = key - key = XcodeSettings._codesigning_key_cache[identity] - if key: - # Warn for any unimplemented signing xcode keys. - unimpl = ['CODE_SIGN_RESOURCE_RULES_PATH', 'OTHER_CODE_SIGN_FLAGS', - 'CODE_SIGN_ENTITLEMENTS'] - keys = set(self.xcode_settings[configname].keys()) - unimpl = set(unimpl) & keys - if unimpl: - print 'Warning: Some codesign keys not implemented, ignoring:', \ - ' '.join(unimpl) - return ['codesign --force --sign %s %s' % (key, output_binary)] - return [] + output = subprocess.check_output( + ['security', 'find-identity', '-p', 'codesigning', '-v']) + for line in output.splitlines(): + if identity in line: + fingerprint = line.split()[1] + cache = XcodeSettings._codesigning_key_cache + assert identity not in cache or fingerprint == cache[identity], ( + "Multiple codesigning fingerprints for identity: %s" % identity) + XcodeSettings._codesigning_key_cache[identity] = fingerprint + return XcodeSettings._codesigning_key_cache.get(identity, '') def AddImplicitPostbuilds(self, configname, output, output_binary, postbuilds=[], quiet=False): @@ -835,10 +859,11 @@ class XcodeSettings(object): l = '-l' + m.group(1) else: l = library - if self._SdkPath(): - return l.replace('$(SDKROOT)', self._SdkPath(config_name)) - else: - return l + + sdk_root = self._SdkPath(config_name) + if not sdk_root: + sdk_root = '' + return l.replace('$(SDKROOT)', sdk_root) def AdjustLibraries(self, libraries, config_name=None): """Transforms entries like 'Cocoa.framework' in libraries into entries like @@ -851,6 +876,27 @@ class XcodeSettings(object): def _BuildMachineOSBuild(self): return self._GetStdout(['sw_vers', '-buildVersion']) + # This method ported from the logic in Homebrew's CLT version check + def _CLTVersion(self): + # pkgutil output looks like + # package-id: com.apple.pkg.CLTools_Executables + # version: 5.0.1.0.1.1382131676 + # volume: / + # location: / + # install-time: 1382544035 + # groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group + STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo" + FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI" + MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables" + + regex = re.compile('version: (?P<version>.+)') + for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]: + try: + output = self._GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key]) + return re.search(regex, output).groupdict()['version'] + except: + continue + def _XcodeVersion(self): # `xcodebuild -version` output looks like # Xcode 4.6.3 @@ -860,14 +906,33 @@ class XcodeSettings(object): # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0 # BuildVersion: 10M2518 # Convert that to '0463', '4H1503'. - version_list = self._GetStdout(['xcodebuild', '-version']).splitlines() - version = version_list[0] - build = version_list[-1] - # Be careful to convert "4.2" to "0420": - version = version.split()[-1].replace('.', '') - version = (version + '0' * (3 - len(version))).zfill(4) - build = build.split()[-1] - return version, build + if len(XcodeSettings._xcode_version_cache) == 0: + try: + version_list = self._GetStdout(['xcodebuild', '-version']).splitlines() + # In some circumstances xcodebuild exits 0 but doesn't return + # the right results; for example, a user on 10.7 or 10.8 with + # a bogus path set via xcode-select + # In that case this may be a CLT-only install so fall back to + # checking that version. + if len(version_list) < 2: + raise GypError, "xcodebuild returned unexpected results" + except: + version = self._CLTVersion() + if version: + version = re.match('(\d\.\d\.?\d*)', version).groups()[0] + else: + raise GypError, "No Xcode or CLT version detected!" + # The CLT has no build information, so we return an empty string. + version_list = [version, ''] + version = version_list[0] + build = version_list[-1] + # Be careful to convert "4.2" to "0420": + version = version.split()[-1].replace('.', '') + version = (version + '0' * (3 - len(version))).zfill(4) + if build: + build = build.split()[-1] + XcodeSettings._xcode_version_cache = (version, build) + return XcodeSettings._xcode_version_cache def _XcodeIOSDeviceFamily(self, configname): family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1') @@ -884,6 +949,8 @@ class XcodeSettings(object): cache['DTXcodeBuild'] = xcode_build sdk_root = self._SdkRoot(configname) + if not sdk_root: + sdk_root = self._DefaultSdkRoot() cache['DTSDKName'] = sdk_root if xcode >= '0430': cache['DTSDKBuild'] = self._GetSdkVersionInfoItem( @@ -908,6 +975,55 @@ class XcodeSettings(object): items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname) return items + def _DefaultSdkRoot(self): + """Returns the default SDKROOT to use. + + Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode + project, then the environment variable was empty. Starting with this + version, Xcode uses the name of the newest SDK installed. + """ + if self._XcodeVersion() < '0500': + return '' + default_sdk_path = self._XcodeSdkPath('') + default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path) + if default_sdk_root: + return default_sdk_root + try: + all_sdks = self._GetStdout(['xcodebuild', '-showsdks']) + except: + # If xcodebuild fails, there will be no valid SDKs + return '' + for line in all_sdks.splitlines(): + items = line.split() + if len(items) >= 3 and items[-2] == '-sdk': + sdk_root = items[-1] + sdk_path = self._XcodeSdkPath(sdk_root) + if sdk_path == default_sdk_path: + return sdk_root + return '' + + def _DefaultArch(self): + # For Mac projects, Xcode changed the default value used when ARCHS is not + # set from "i386" to "x86_64". + # + # For iOS projects, if ARCHS is unset, it defaults to "armv7 armv7s" when + # building for a device, and the simulator binaries are always build for + # "i386". + # + # For new projects, ARCHS is set to $(ARCHS_STANDARD_INCLUDING_64_BIT), + # which correspond to "armv7 armv7s arm64", and when building the simulator + # the architecture is either "i386" or "x86_64" depending on the simulated + # device (respectively 32-bit or 64-bit device). + # + # Since the value returned by this function is only used when ARCHS is not + # set, then on iOS we return "i386", as the default xcode project generator + # does not set ARCHS if it is not set in the .gyp file. + if self.isIOS: + return 'i386' + version, build = self._XcodeVersion() + if version >= '0500': + return 'x86_64' + return 'i386' class MacPrefixHeader(object): """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature. diff --git a/node_modules/node-gyp/lib/configure.js b/node_modules/node-gyp/lib/configure.js index e101a315a..268c33625 100644 --- a/node_modules/node-gyp/lib/configure.js +++ b/node_modules/node-gyp/lib/configure.js @@ -1,4 +1,3 @@ - module.exports = exports = configure /** @@ -14,6 +13,7 @@ var fs = require('graceful-fs') , semver = require('semver') , mkdirp = require('mkdirp') , cp = require('child_process') + , extend = require('util')._extend , spawn = cp.spawn , execFile = cp.execFile , win = process.platform == 'win32' @@ -73,7 +73,9 @@ function configure (gyp, argv, callback) { } function checkPythonVersion () { - var env = { TERM: 'dumb', PATH: process.env.PATH }; + var env = extend({}, process.env); + env.TERM = 'dumb'; + execFile(python, ['-c', 'import platform; print(platform.python_version());'], { env: env }, function (err, stdout) { if (err) { return callback(err) @@ -88,7 +90,7 @@ function configure (gyp, argv, callback) { log.silly('stripping "rc" identifier from version') version = version.replace(/rc(.*)$/ig, '') } - var range = semver.Range('>=2.5.0 <3.0.0'); + var range = semver.Range('>=2.5.0 <3.0.0') if (range.test(version)) { getNodeDir() } else { @@ -210,7 +212,7 @@ function configure (gyp, argv, callback) { variables.copy_dev_lib = !gyp.opts.nodedir // disable -T "thin" static archives by default - variables.standalone_static_library = gyp.opts.thin ? 0 : 1; + variables.standalone_static_library = gyp.opts.thin ? 0 : 1 // loop through the rest of the opts and add the unknown ones as variables. // this allows for module-specific configure flags like: @@ -311,6 +313,7 @@ function configure (gyp, argv, callback) { argv.push('-Dnode_root_dir=' + nodeDir) argv.push('-Dmodule_root_dir=' + process.cwd()) argv.push('--depth=.') + argv.push('--no-parallel') // tell gyp to write the Makefile/Solution files into output_dir argv.push('--generator-output', output_dir) diff --git a/node_modules/node-gyp/lib/install.js b/node_modules/node-gyp/lib/install.js index 60dc3cd63..a9a18bcc7 100644 --- a/node_modules/node-gyp/lib/install.js +++ b/node_modules/node-gyp/lib/install.js @@ -12,6 +12,7 @@ var fs = require('graceful-fs') , tar = require('tar') , rm = require('rimraf') , path = require('path') + , crypto = require('crypto') , zlib = require('zlib') , log = require('npmlog') , semver = require('semver') @@ -151,6 +152,15 @@ function install (gyp, argv, callback) { return req } + function getContentSha(res, callback) { + var shasum = crypto.createHash('sha1') + res.on('data', function (chunk) { + shasum.update(chunk) + }).on('end', function () { + callback(null, shasum.digest('hex')) + }) + } + function go () { log.verbose('ensuring nodedir is created', devDir) @@ -171,13 +181,16 @@ function install (gyp, argv, callback) { } // now download the node tarball - var tarPath = gyp.opts['tarball']; + var tarPath = gyp.opts['tarball'] var tarballUrl = tarPath ? tarPath : distUrl + '/v' + version + '/node-v' + version + '.tar.gz' , badDownload = false , extractCount = 0 , gunzip = zlib.createGunzip() , extracter = tar.Extract({ path: devDir, strip: 1, filter: isValid }) + var contentShasums = {} + var expectShasums = {} + // checks if a file to be extracted from the tarball is valid. // only .h header files and the gyp files get extracted function isValid () { @@ -230,6 +243,13 @@ function install (gyp, argv, callback) { cb(new Error(res.statusCode + ' status code downloading tarball')) return } + // content sha1 + getContentSha(res, function (_, sha1) { + var filename = path.basename(tarballUrl).trim() + contentShasums[filename] = sha1 + log.verbose('content sha1', filename, sha1) + }) + // start unzipping and untaring req.pipe(gunzip).pipe(extracter) }) @@ -254,6 +274,10 @@ function install (gyp, argv, callback) { var installVersionPath = path.resolve(devDir, 'installVersion') fs.writeFile(installVersionPath, gyp.package.installVersion + '\n', deref) + // download SHASUMS.txt + async++ + downloadShasums(deref) + if (async === 0) { // no async tasks required cb() @@ -261,10 +285,59 @@ function install (gyp, argv, callback) { function deref (err) { if (err) return cb(err) - --async || cb() + + async-- + if (!async) { + log.verbose('download contents shasums', JSON.stringify(contentShasums)) + // check content shasums + for (var k in contentShasums) { + log.verbose('validating download shasum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k]) + if (contentShasums[k] !== expectShasums[k]) { + cb(new Error(k + ' local sha1 ' + contentShasums[k] + ' not match remote ' + expectShasums[k])) + return + } + } + cb() + } } } + function downloadShasums(done) { + log.verbose('check download content sha1, need to download `SHASUMS.txt`...') + var shasumsPath = path.resolve(devDir, 'SHASUMS.txt') + , shasumsUrl = distUrl + '/v' + version + '/SHASUMS.txt' + + log.verbose('`SHASUMS.txt` url', shasumsUrl) + var req = download(shasumsUrl) + if (!req) return + req.on('error', done) + req.on('response', function (res) { + if (res.statusCode !== 200) { + done(new Error(res.statusCode + ' status code downloading SHASUMS.txt')) + return + } + + var chunks = [] + res.on('data', function (chunk) { + chunks.push(chunk) + }) + res.on('end', function () { + var lines = Buffer.concat(chunks).toString().trim().split('\n') + lines.forEach(function (line) { + var items = line.trim().split(/\s+/) + if (items.length !== 2) return + + // 0035d18e2dcf9aad669b1c7c07319e17abfe3762 ./node-v0.11.4.tar.gz + var name = items[1].replace(/^\.\//, '') + expectShasums[name] = items[0] + }) + + log.verbose('`SHASUMS.txt` data', JSON.stringify(expectShasums)) + done() + }) + }) + } + function downloadNodeLib (done) { log.verbose('on Windows; need to download `node.lib`...') var dir32 = path.resolve(devDir, 'ia32') @@ -293,6 +366,11 @@ function install (gyp, argv, callback) { return } + getContentSha(res, function (_, sha1) { + contentShasums['node.lib'] = sha1 + log.verbose('content sha1', 'node.lib', sha1) + }) + var ws = fs.createWriteStream(nodeLibPath32) ws.on('error', cb) req.pipe(ws) @@ -314,6 +392,11 @@ function install (gyp, argv, callback) { return } + getContentSha(res, function (_, sha1) { + contentShasums['x64/node.lib'] = sha1 + log.verbose('content sha1', 'x64/node.lib', sha1) + }) + var ws = fs.createWriteStream(nodeLibPath64) ws.on('error', cb) req.pipe(ws) diff --git a/node_modules/node-gyp/package.json b/node_modules/node-gyp/package.json index b28964c5e..c9dbae5a8 100644 --- a/node_modules/node-gyp/package.json +++ b/node_modules/node-gyp/package.json @@ -10,7 +10,7 @@ "bindings", "gyp" ], - "version": "0.12.2", + "version": "0.13.0", "installVersion": 9, "author": { "name": "Nathan Rajlich", @@ -50,6 +50,10 @@ "url": "https://github.com/TooTallNate/node-gyp/issues" }, "homepage": "https://github.com/TooTallNate/node-gyp", - "_id": "node-gyp@0.12.2", - "_from": "node-gyp@latest" + "_id": "node-gyp@0.13.0", + "dist": { + "shasum": "97765303203579f1445358d4d25d03645811d87d" + }, + "_from": "node-gyp@latest", + "_resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-0.13.0.tgz" } |