diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000000..dbd053a90b
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,26 @@
+
+
+* **Node Version**:
+* **Platform**:
+* **Compiler**:
+* **Module**:
+
+Verbose output (from npm or node-gyp):
+
+
+
+```
+
+```
+
+
+
+
+
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000000..7a61664287
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,17 @@
+
+
+##### Checklist
+
+
+- [ ] `npm install && npm test` passes
+- [ ] tests are included
+- [ ] documentation is changed or added
+- [ ] commit message follows [commit guidelines](https://github.com/nodejs/node/blob/master/CONTRIBUTING.md#commit-message-guidelines)
+
+##### Description of change
+
+
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000000..f48786bd84
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,34 @@
+# Contributing to node-gyp
+
+## Code of Conduct
+
+Please read the
+[Code of Conduct](https://github.com/nodejs/TSC/blob/master/CODE_OF_CONDUCT.md)
+which explains the minimum behavior expectations for node-gyp contributors.
+
+
+## Developer's Certificate of Origin 1.1
+
+By making a contribution to this project, I certify that:
+
+* (a) The contribution was created in whole or in part by me and I
+ have the right to submit it under the open source license
+ indicated in the file; or
+
+* (b) The contribution is based upon previous work that, to the best
+ of my knowledge, is covered under an appropriate open source
+ license and I have the right under that license to submit that
+ work with modifications, whether created in whole or in part
+ by me, under the same open source license (unless I am
+ permitted to submit under a different license), as indicated
+ in the file; or
+
+* (c) The contribution was provided directly to me by some other
+ person who certified (a), (b) or (c) and I have not modified
+ it.
+
+* (d) I understand and agree that this project and the contribution
+ are public and that a record of the contribution (including all
+ personal information I submit with it, including my sign-off) is
+ maintained indefinitely and may be redistributed consistent with
+ this project or the open source license(s) involved.
diff --git a/README.md b/README.md
index 03db320900..5ab6b5dbda 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
node-gyp
=========
-### Node.js native addon build tool
+## Node.js native addon build tool
`node-gyp` is a cross-platform command-line tool written in Node.js for compiling
native addon modules for Node.js. It bundles the [gyp](https://gyp.gsrc.io)
@@ -14,7 +14,7 @@ Multiple target versions of node are supported (i.e. `0.8`, ..., `4`, `5`, `6`,
etc.), regardless of what version of node is actually installed on your system
(`node-gyp` downloads the necessary development files or headers for the target version).
-#### Features:
+## Features
* Easy to use, consistent interface
* Same commands to build your module on every platform
@@ -32,29 +32,39 @@ $ npm install -g node-gyp
You will also need to install:
- * On Unix:
- * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported)
- * `make`
- * A proper C/C++ compiler toolchain, like [GCC](https://gcc.gnu.org)
- * On Mac OS X:
- * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported) (already installed on Mac OS X)
- * [Xcode](https://developer.apple.com/xcode/download/)
- * You also need to install the `Command Line Tools` via Xcode. You can find this under the menu `Xcode -> Preferences -> Downloads`
- * This step will install `gcc` and the related toolchain containing `make`
- * On Windows:
- * Option 1: Install all the required tools and configurations using Microsoft's [windows-build-tools](https://github.com/felixrieseberg/windows-build-tools) using `npm install --global --production windows-build-tools` from an elevated PowerShell or CMD.exe (run as Administrator).
- * Option 2: Install tools and configuration manually:
- * Visual C++ Build Environment:
- * Option 1: Install [Visual C++ Build Tools](http://landinghub.visualstudio.com/visual-cpp-build-tools) using the **Default Install** option.
-
- * Option 2: Install [Visual Studio 2015](https://www.visualstudio.com/products/visual-studio-community-vs) (or modify an existing installation) and select *Common Tools for Visual C++* during setup. This also works with the free Community and Express for Desktop editions.
-
- > :bulb: [Windows Vista / 7 only] requires [.NET Framework 4.5.1](http://www.microsoft.com/en-us/download/details.aspx?id=40773)
-
- * Install [Python 2.7](https://www.python.org/downloads/) (`v3.x.x` is not supported), and run `npm config set python python2.7` (or see below for further instructions on specifying the proper Python version and path.)
- * Launch cmd, `npm config set msvs_version 2015`
-
- If the above steps didn't work for you, please visit [Microsoft's Node.js Guidelines for Windows](https://github.com/Microsoft/nodejs-guidelines/blob/master/windows-environment.md#compiling-native-addon-modules) for additional tips.
+### On Unix
+
+ * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported)
+ * `make`
+ * A proper C/C++ compiler toolchain, like [GCC](https://gcc.gnu.org)
+
+### On Mac OS X
+
+ * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported) (already installed on Mac OS X)
+ * [Xcode](https://developer.apple.com/xcode/download/)
+ * You also need to install the `Command Line Tools` via Xcode. You can find this under the menu `Xcode -> Preferences -> Downloads`
+ * This step will install `gcc` and the related toolchain containing `make`
+
+### On Windows
+
+#### Option 1
+
+Install all the required tools and configurations using Microsoft's [windows-build-tools](https://github.com/felixrieseberg/windows-build-tools) using `npm install --global --production windows-build-tools` from an elevated PowerShell or CMD.exe (run as Administrator).
+
+#### Option 2
+
+Install tools and configuration manually:
+ * Visual C++ Build Environment:
+ * Option 1: Install [Visual C++ Build Tools](http://landinghub.visualstudio.com/visual-cpp-build-tools) using the **Default Install** option.
+
+ * Option 2: Install [Visual Studio 2015](https://www.visualstudio.com/products/visual-studio-community-vs) (or modify an existing installation) and select *Common Tools for Visual C++* during setup. This also works with the free Community and Express for Desktop editions.
+
+ > :bulb: [Windows Vista / 7 only] requires [.NET Framework 4.5.1](http://www.microsoft.com/en-us/download/details.aspx?id=40773)
+
+ * Install [Python 2.7](https://www.python.org/downloads/) (`v3.x.x` is not supported), and run `npm config set python python2.7` (or see below for further instructions on specifying the proper Python version and path.)
+ * Launch cmd, `npm config set msvs_version 2015`
+
+ If the above steps didn't work for you, please visit [Microsoft's Node.js Guidelines for Windows](https://github.com/Microsoft/nodejs-guidelines/blob/master/windows-environment.md#compiling-native-addon-modules) for additional tips.
If you have multiple Python versions installed, you can identify which Python
version `node-gyp` uses by setting the '--python' variable:
diff --git a/gyp/AUTHORS b/gyp/AUTHORS
index fecf84a1c4..130c816058 100644
--- a/gyp/AUTHORS
+++ b/gyp/AUTHORS
@@ -1,12 +1,15 @@
# Names should be added to this file like so:
# Name or Organization
-Google Inc.
-Bloomberg Finance L.P.
-Yandex LLC
+Google Inc. <*@google.com>
+Bloomberg Finance L.P. <*@bloomberg.net>
+IBM Inc. <*@*.ibm.com>
+Yandex LLC <*@yandex-team.ru>
Steven Knight
Ryan Norton
David J. Sankel
Eric N. Vander Weele
Tom Freudenberg
+Julien Brianceau
+Refael Ackermann
diff --git a/gyp/DEPS b/gyp/DEPS
index 2e1120f274..167fb779b0 100644
--- a/gyp/DEPS
+++ b/gyp/DEPS
@@ -3,8 +3,7 @@
# (You don't need to use gclient for normal GYP development work.)
vars = {
- "chrome_trunk": "http://src.chromium.org/svn/trunk",
- "googlecode_url": "http://%s.googlecode.com/svn",
+ "chromium_git": "https://chromium.googlesource.com/",
}
deps = {
@@ -13,12 +12,12 @@ deps = {
deps_os = {
"win": {
"third_party/cygwin":
- Var("chrome_trunk") + "/deps/third_party/cygwin@66844",
+ Var("chromium_git") + "chromium/deps/cygwin@4fbd5b9",
"third_party/python_26":
- Var("chrome_trunk") + "/tools/third_party/python_26@89111",
+ Var("chromium_git") + "chromium/deps/python_26@5bb4080",
"src/third_party/pefile":
- (Var("googlecode_url") % "pefile") + "/trunk@63",
+ Var("chromium_git") + "external/pefile@72c6ae4",
},
}
diff --git a/gyp/PRESUBMIT.py b/gyp/PRESUBMIT.py
index dde025383c..4bc1b8ca26 100644
--- a/gyp/PRESUBMIT.py
+++ b/gyp/PRESUBMIT.py
@@ -73,23 +73,15 @@
]
-def CheckChangeOnUpload(input_api, output_api):
- report = []
- report.extend(input_api.canned_checks.PanProjectChecks(
- input_api, output_api))
- return report
-
-
-def CheckChangeOnCommit(input_api, output_api):
- report = []
-
+def _LicenseHeader(input_api):
# Accept any year number from 2009 to the current year.
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
+
years_re = '(' + '|'.join(allowed_years) + ')'
# The (c) is deprecated, but tolerate it until it's removed from all files.
- license = (
+ return (
r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
@@ -98,8 +90,18 @@ def CheckChangeOnCommit(input_api, output_api):
'year': years_re,
}
+def CheckChangeOnUpload(input_api, output_api):
+ report = []
report.extend(input_api.canned_checks.PanProjectChecks(
- input_api, output_api, license_header=license))
+ input_api, output_api, license_header=_LicenseHeader(input_api)))
+ return report
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ report = []
+
+ report.extend(input_api.canned_checks.PanProjectChecks(
+ input_api, output_api, license_header=_LicenseHeader(input_api)))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
'http://gyp-status.appspot.com/status',
@@ -122,16 +124,3 @@ def CheckChangeOnCommit(input_api, output_api):
finally:
sys.path = old_sys_path
return report
-
-
-TRYBOTS = [
- 'linux_try',
- 'mac_try',
- 'win_try',
-]
-
-
-def GetPreferredTryMasters(_, change):
- return {
- 'client.gyp': { t: set(['defaulttests']) for t in TRYBOTS },
- }
diff --git a/gyp/README.md b/gyp/README.md
new file mode 100644
index 0000000000..c0d73ac958
--- /dev/null
+++ b/gyp/README.md
@@ -0,0 +1,4 @@
+GYP can Generate Your Projects.
+===================================
+
+Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline.
diff --git a/gyp/buildbot/aosp_manifest.xml b/gyp/buildbot/aosp_manifest.xml
deleted file mode 100644
index bd73b303c6..0000000000
--- a/gyp/buildbot/aosp_manifest.xml
+++ /dev/null
@@ -1,466 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/gyp/codereview.settings b/gyp/codereview.settings
index faf37f1145..27fb9f99e2 100644
--- a/gyp/codereview.settings
+++ b/gyp/codereview.settings
@@ -1,10 +1,6 @@
-# This file is used by gcl to get repository specific information.
-CODE_REVIEW_SERVER: codereview.chromium.org
+# This file is used by git cl to get repository specific information.
CC_LIST: gyp-developer@googlegroups.com
-VIEW_VC: https://chromium.googlesource.com/external/gyp/+/
-TRY_ON_UPLOAD: False
-TRYSERVER_PROJECT: gyp
-TRYSERVER_PATCHLEVEL: 1
-TRYSERVER_ROOT: gyp
-TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
+CODE_REVIEW_SERVER: codereview.chromium.org
+GERRIT_HOST: True
PROJECT: gyp
+VIEW_VC: https://chromium.googlesource.com/external/gyp/+/
diff --git a/gyp/gyptest.py b/gyp/gyptest.py
index 8e4fc47d5c..9930e78c7b 100755
--- a/gyp/gyptest.py
+++ b/gyp/gyptest.py
@@ -1,133 +1,19 @@
#!/usr/bin/env python
-
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-__doc__ = """
-gyptest.py -- test runner for GYP tests.
-"""
+"""gyptest.py -- test runner for GYP tests."""
+
+from __future__ import print_function
+import argparse
+import math
import os
-import optparse
+import platform
import subprocess
import sys
-
-class CommandRunner(object):
- """
- Executor class for commands, including "commands" implemented by
- Python functions.
- """
- verbose = True
- active = True
-
- def __init__(self, dictionary={}):
- self.subst_dictionary(dictionary)
-
- def subst_dictionary(self, dictionary):
- self._subst_dictionary = dictionary
-
- def subst(self, string, dictionary=None):
- """
- Substitutes (via the format operator) the values in the specified
- dictionary into the specified command.
-
- The command can be an (action, string) tuple. In all cases, we
- perform substitution on strings and don't worry if something isn't
- a string. (It's probably a Python function to be executed.)
- """
- if dictionary is None:
- dictionary = self._subst_dictionary
- if dictionary:
- try:
- string = string % dictionary
- except TypeError:
- pass
- return string
-
- def display(self, command, stdout=None, stderr=None):
- if not self.verbose:
- return
- if type(command) == type(()):
- func = command[0]
- args = command[1:]
- s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args)))
- if type(command) == type([]):
- # TODO: quote arguments containing spaces
- # TODO: handle meta characters?
- s = ' '.join(command)
- else:
- s = self.subst(command)
- if not s.endswith('\n'):
- s += '\n'
- sys.stdout.write(s)
- sys.stdout.flush()
-
- def execute(self, command, stdout=None, stderr=None):
- """
- Executes a single command.
- """
- if not self.active:
- return 0
- if type(command) == type(''):
- command = self.subst(command)
- cmdargs = shlex.split(command)
- if cmdargs[0] == 'cd':
- command = (os.chdir,) + tuple(cmdargs[1:])
- if type(command) == type(()):
- func = command[0]
- args = command[1:]
- return func(*args)
- else:
- if stdout is sys.stdout:
- # Same as passing sys.stdout, except python2.4 doesn't fail on it.
- subout = None
- else:
- # Open pipe for anything else so Popen works on python2.4.
- subout = subprocess.PIPE
- if stderr is sys.stderr:
- # Same as passing sys.stderr, except python2.4 doesn't fail on it.
- suberr = None
- elif stderr is None:
- # Merge with stdout if stderr isn't specified.
- suberr = subprocess.STDOUT
- else:
- # Open pipe for anything else so Popen works on python2.4.
- suberr = subprocess.PIPE
- p = subprocess.Popen(command,
- shell=(sys.platform == 'win32'),
- stdout=subout,
- stderr=suberr)
- p.wait()
- if stdout is None:
- self.stdout = p.stdout.read()
- elif stdout is not sys.stdout:
- stdout.write(p.stdout.read())
- if stderr not in (None, sys.stderr):
- stderr.write(p.stderr.read())
- return p.returncode
-
- def run(self, command, display=None, stdout=None, stderr=None):
- """
- Runs a single command, displaying it first.
- """
- if display is None:
- display = command
- self.display(display)
- return self.execute(command, stdout, stderr)
-
-
-class Unbuffered(object):
- def __init__(self, fp):
- self.fp = fp
- def write(self, arg):
- self.fp.write(arg)
- self.fp.flush()
- def __getattr__(self, attr):
- return getattr(self.fp, attr)
-
-sys.stdout = Unbuffered(sys.stdout)
-sys.stderr = Unbuffered(sys.stderr)
+import time
def is_test_name(f):
@@ -137,8 +23,6 @@ def is_test_name(f):
def find_all_gyptest_files(directory):
result = []
for root, dirs, files in os.walk(directory):
- if '.svn' in dirs:
- dirs.remove('.svn')
result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ])
result.sort()
return result
@@ -148,73 +32,68 @@ def main(argv=None):
if argv is None:
argv = sys.argv
- usage = "gyptest.py [-ahlnq] [-f formats] [test ...]"
- parser = optparse.OptionParser(usage=usage)
- parser.add_option("-a", "--all", action="store_true",
- help="run all tests")
- parser.add_option("-C", "--chdir", action="store", default=None,
- help="chdir to the specified directory")
- parser.add_option("-f", "--format", action="store", default='',
- help="run tests with the specified formats")
- parser.add_option("-G", '--gyp_option', action="append", default=[],
- help="Add -G options to the gyp command line")
- parser.add_option("-l", "--list", action="store_true",
- help="list available tests and exit")
- parser.add_option("-n", "--no-exec", action="store_true",
- help="no execute, just print the command line")
- parser.add_option("--passed", action="store_true",
- help="report passed tests")
- parser.add_option("--path", action="append", default=[],
- help="additional $PATH directory")
- parser.add_option("-q", "--quiet", action="store_true",
- help="quiet, don't print test command lines")
- opts, args = parser.parse_args(argv[1:])
-
- if opts.chdir:
- os.chdir(opts.chdir)
-
- if opts.path:
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-a", "--all", action="store_true",
+ help="run all tests")
+ parser.add_argument("-C", "--chdir", action="store",
+ help="change to directory")
+ parser.add_argument("-f", "--format", action="store", default='',
+ help="run tests with the specified formats")
+ parser.add_argument("-G", '--gyp_option', action="append", default=[],
+ help="Add -G options to the gyp command line")
+ parser.add_argument("-l", "--list", action="store_true",
+ help="list available tests and exit")
+ parser.add_argument("-n", "--no-exec", action="store_true",
+ help="no execute, just print the command line")
+ parser.add_argument("--path", action="append", default=[],
+ help="additional $PATH directory")
+ parser.add_argument("-q", "--quiet", action="store_true",
+ help="quiet, don't print anything unless there are failures")
+ parser.add_argument("-v", "--verbose", action="store_true",
+ help="print configuration info and test results.")
+ parser.add_argument('tests', nargs='*')
+ args = parser.parse_args(argv[1:])
+
+ if args.chdir:
+ os.chdir(args.chdir)
+
+ if args.path:
extra_path = [os.path.abspath(p) for p in opts.path]
extra_path = os.pathsep.join(extra_path)
os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH']
- if not args:
- if not opts.all:
+ if not args.tests:
+ if not args.all:
sys.stderr.write('Specify -a to get all tests.\n')
return 1
- args = ['test']
+ args.tests = ['test']
tests = []
- for arg in args:
+ for arg in args.tests:
if os.path.isdir(arg):
tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
else:
if not is_test_name(os.path.basename(arg)):
- print >>sys.stderr, arg, 'is not a valid gyp test name.'
+ print(arg, 'is not a valid gyp test name.', file=sys.stderr)
sys.exit(1)
tests.append(arg)
- if opts.list:
+ if args.list:
for test in tests:
- print test
+ print(test)
sys.exit(0)
- CommandRunner.verbose = not opts.quiet
- CommandRunner.active = not opts.no_exec
- cr = CommandRunner()
-
os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
- if not opts.quiet:
- sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH'])
- passed = []
- failed = []
- no_result = []
+ if args.verbose:
+ print_configuration_info()
+
+ if args.gyp_option and not args.quiet:
+ print('Extra Gyp options: %s\n' % args.gyp_option)
- if opts.format:
- format_list = opts.format.split(',')
+ if args.format:
+ format_list = args.format.split(',')
else:
- # TODO: not duplicate this mapping from pylib/gyp/__init__.py
format_list = {
'aix5': ['make'],
'freebsd7': ['make'],
@@ -222,53 +101,143 @@ def main(argv=None):
'openbsd5': ['make'],
'cygwin': ['msvs'],
'win32': ['msvs', 'ninja'],
+ 'linux': ['make', 'ninja'],
'linux2': ['make', 'ninja'],
'linux3': ['make', 'ninja'],
- 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
+
+ # TODO: Re-enable xcode-ninja.
+ # https://bugs.chromium.org/p/gyp/issues/detail?id=530
+ # 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
+ 'darwin': ['make', 'ninja', 'xcode'],
}[sys.platform]
- for format in format_list:
- os.environ['TESTGYP_FORMAT'] = format
- if not opts.quiet:
- sys.stdout.write('TESTGYP_FORMAT=%s\n' % format)
+ gyp_options = []
+ for option in args.gyp_option:
+ gyp_options += ['-G', option]
- gyp_options = []
- for option in opts.gyp_option:
- gyp_options += ['-G', option]
- if gyp_options and not opts.quiet:
- sys.stdout.write('Extra Gyp options: %s\n' % gyp_options)
+ runner = Runner(format_list, tests, gyp_options, args.verbose)
+ runner.run()
- for test in tests:
- status = cr.run([sys.executable, test] + gyp_options,
- stdout=sys.stdout,
- stderr=sys.stderr)
- if status == 2:
- no_result.append(test)
- elif status:
- failed.append(test)
- else:
- passed.append(test)
-
- if not opts.quiet:
- def report(description, tests):
- if tests:
- if len(tests) == 1:
- sys.stdout.write("\n%s the following test:\n" % description)
- else:
- fmt = "\n%s the following %d tests:\n"
- sys.stdout.write(fmt % (description, len(tests)))
- sys.stdout.write("\t" + "\n\t".join(tests) + "\n")
-
- if opts.passed:
- report("Passed", passed)
- report("Failed", failed)
- report("No result from", no_result)
-
- if failed:
+ if not args.quiet:
+ runner.print_results()
+
+ if runner.failures:
return 1
else:
return 0
+def print_configuration_info():
+ print('Test configuration:')
+ if sys.platform == 'darwin':
+ sys.path.append(os.path.abspath('test/lib'))
+ import TestMac
+ print(' Mac %s %s' % (platform.mac_ver()[0], platform.mac_ver()[2]))
+ print(' Xcode %s' % TestMac.Xcode.Version())
+ elif sys.platform == 'win32':
+ sys.path.append(os.path.abspath('pylib'))
+ import gyp.MSVSVersion
+ print(' Win %s %s\n' % platform.win32_ver()[0:2])
+ print(' MSVS %s' %
+ gyp.MSVSVersion.SelectVisualStudioVersion().Description())
+ elif sys.platform in ('linux', 'linux2'):
+ print(' Linux %s' % ' '.join(platform.linux_distribution()))
+ print(' Python %s' % platform.python_version())
+ print(' PYTHONPATH=%s' % os.environ['PYTHONPATH'])
+ print()
+
+
+class Runner(object):
+ def __init__(self, formats, tests, gyp_options, verbose):
+ self.formats = formats
+ self.tests = tests
+ self.verbose = verbose
+ self.gyp_options = gyp_options
+ self.failures = []
+ self.num_tests = len(formats) * len(tests)
+ num_digits = len(str(self.num_tests))
+ self.fmt_str = '[%%%dd/%%%dd] (%%s) %%s' % (num_digits, num_digits)
+ self.isatty = sys.stdout.isatty() and not self.verbose
+ self.env = os.environ.copy()
+ self.hpos = 0
+
+ def run(self):
+ run_start = time.time()
+
+ i = 1
+ for fmt in self.formats:
+ for test in self.tests:
+ self.run_test(test, fmt, i)
+ i += 1
+
+ if self.isatty:
+ self.erase_current_line()
+
+ self.took = time.time() - run_start
+
+ def run_test(self, test, fmt, i):
+ if self.isatty:
+ self.erase_current_line()
+
+ msg = self.fmt_str % (i, self.num_tests, fmt, test)
+ self.print_(msg)
+
+ start = time.time()
+ cmd = [sys.executable, test] + self.gyp_options
+ self.env['TESTGYP_FORMAT'] = fmt
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, env=self.env)
+ proc.wait()
+ took = time.time() - start
+
+ stdout = proc.stdout.read().decode('utf8')
+ if proc.returncode == 2:
+ res = 'skipped'
+ elif proc.returncode:
+ res = 'failed'
+ self.failures.append('(%s) %s' % (test, fmt))
+ else:
+ res = 'passed'
+ res_msg = ' %s %.3fs' % (res, took)
+ self.print_(res_msg)
+
+ if (stdout and
+ not stdout.endswith('PASSED\n') and
+ not (stdout.endswith('NO RESULT\n'))):
+ print()
+ for l in stdout.splitlines():
+ print(' %s' % l)
+ elif not self.isatty:
+ print()
+
+ def print_(self, msg):
+ print(msg, end='')
+ index = msg.rfind('\n')
+ if index == -1:
+ self.hpos += len(msg)
+ else:
+ self.hpos = len(msg) - index
+ sys.stdout.flush()
+
+ def erase_current_line(self):
+ print('\b' * self.hpos + ' ' * self.hpos + '\b' * self.hpos, end='')
+ sys.stdout.flush()
+ self.hpos = 0
+
+ def print_results(self):
+ num_failures = len(self.failures)
+ if num_failures:
+ print()
+ if num_failures == 1:
+ print("Failed the following test:")
+ else:
+ print("Failed the following %d tests:" % num_failures)
+ print("\t" + "\n\t".join(sorted(self.failures)))
+ print()
+ print('Ran %d tests in %.3fs, %d failed.' % (self.num_tests, self.took,
+ num_failures))
+ print()
+
+
if __name__ == "__main__":
sys.exit(main())
diff --git a/gyp/pylib/gyp/MSVSSettings.py b/gyp/pylib/gyp/MSVSSettings.py
index 4985756bdd..8ae19180ea 100644
--- a/gyp/pylib/gyp/MSVSSettings.py
+++ b/gyp/pylib/gyp/MSVSSettings.py
@@ -592,6 +592,7 @@ def _ValidateSettings(validators, settings, stderr):
_Same(_compile, 'UseFullPaths', _boolean) # /FC
_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL
_Same(_compile, 'XMLDocumentationFileName', _file_name)
+_Same(_compile, 'CompileAsWinRT', _boolean) # /ZW
_Same(_compile, 'AssemblerOutput',
_Enumeration(['NoListing',
diff --git a/gyp/pylib/gyp/MSVSUtil.py b/gyp/pylib/gyp/MSVSUtil.py
index 0b32e91180..96dea6c2c9 100644
--- a/gyp/pylib/gyp/MSVSUtil.py
+++ b/gyp/pylib/gyp/MSVSUtil.py
@@ -14,6 +14,7 @@
'loadable_module': 'dll',
'shared_library': 'dll',
'static_library': 'lib',
+ 'windows_driver': 'sys',
}
@@ -110,7 +111,7 @@ def ShardTargets(target_list, target_dicts):
else:
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
- for t in new_target_dicts:
+ for t in sorted(new_target_dicts):
for deptype in ('dependencies', 'dependencies_original'):
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
new_dependencies = []
diff --git a/gyp/pylib/gyp/MSVSVersion.py b/gyp/pylib/gyp/MSVSVersion.py
index d9bfa684fa..44b958d5b3 100644
--- a/gyp/pylib/gyp/MSVSVersion.py
+++ b/gyp/pylib/gyp/MSVSVersion.py
@@ -13,12 +13,16 @@
import glob
+def JoinPath(*args):
+ return os.path.normpath(os.path.join(*args))
+
+
class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
- path, sdk_based, default_toolset=None):
+ path, sdk_based, default_toolset=None, compatible_sdks=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
@@ -28,6 +32,9 @@ def __init__(self, short_name, description,
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
+ compatible_sdks = compatible_sdks or []
+ compatible_sdks.sort(key=lambda v: float(v.replace('v', '')), reverse=True)
+ self.compatible_sdks = compatible_sdks
def ShortName(self):
return self.short_name
@@ -68,43 +75,67 @@ def DefaultToolset(self):
of a user override."""
return self.default_toolset
- def SetupScript(self, target_arch):
+
+ def _SetupScriptInternal(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
- # Check if we are running in the SDK command line environment and use
- # the setup script from the SDK if so. |target_arch| should be either
- # 'x86' or 'x64'.
- assert target_arch in ('x86', 'x64')
- sdk_dir = os.environ.get('WindowsSDKDir')
- if self.sdk_based and sdk_dir:
- return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
- '/' + target_arch]
- else:
- # We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
- # vcvars32, which it can only find if VS??COMNTOOLS is set, which it
- # isn't always.
- if target_arch == 'x86':
- if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
- os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
- os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
- # VS2013 and later, non-Express have a x64-x86 cross that we want
- # to prefer.
- return [os.path.normpath(
- os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
- # Otherwise, the standard x86 compiler.
- return [os.path.normpath(
- os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
+ assert target_arch in ('x86', 'x64'), "target_arch not supported"
+ # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
+ # depot_tools build tools and should run SetEnv.Cmd to set up the
+ # environment. The check for WindowsSDKDir alone is not sufficient because
+ # this is set by running vcvarsall.bat.
+ sdk_dir = os.environ.get('WindowsSDKDir', '')
+ setup_path = JoinPath(sdk_dir, 'Bin', 'SetEnv.Cmd')
+ if self.sdk_based and sdk_dir and os.path.exists(setup_path):
+ return [setup_path, '/' + target_arch]
+
+ is_host_arch_x64 = (
+ os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
+ os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'
+ )
+
+ # For VS2017 (and newer) it's fairly easy
+ if self.short_name >= '2017':
+ script_path = JoinPath(self.path,
+ 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')
+
+ # Always use a native executable, cross-compiling if necessary.
+ host_arch = 'amd64' if is_host_arch_x64 else 'x86'
+ msvc_target_arch = 'amd64' if target_arch == 'x64' else 'x86'
+ arg = host_arch
+ if host_arch != msvc_target_arch:
+ arg += '_' + msvc_target_arch
+
+ return [script_path, arg]
+
+ # We try to find the best version of the env setup batch.
+ vcvarsall = JoinPath(self.path, 'VC', 'vcvarsall.bat')
+ if target_arch == 'x86':
+ if self.short_name >= '2013' and self.short_name[-1] != 'e' and \
+ is_host_arch_x64:
+ # VS2013 and later, non-Express have a x64-x86 cross that we want
+ # to prefer.
+ return [vcvarsall, 'amd64_x86']
else:
- assert target_arch == 'x64'
- arg = 'x86_amd64'
- # Use the 64-on-64 compiler if we're not using an express
- # edition and we're running on a 64bit OS.
- if self.short_name[-1] != 'e' and (
- os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
- os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
- arg = 'amd64'
- return [os.path.normpath(
- os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
+ # Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat
+ # for x86 because vcvarsall calls vcvars32, which it can only find if
+ # VS??COMNTOOLS is set, which isn't guaranteed.
+ return [JoinPath(self.path, 'Common7', 'Tools', 'vsvars32.bat')]
+ elif target_arch == 'x64':
+ arg = 'x86_amd64'
+ # Use the 64-on-64 compiler if we're not using an express edition and
+ # we're running on a 64bit OS.
+ if self.short_name[-1] != 'e' and is_host_arch_x64:
+ arg = 'amd64'
+ return [vcvarsall, arg]
+
+ def SetupScript(self, target_arch):
+ script_data = self._SetupScriptInternal(target_arch)
+ script_path = script_data[0]
+ if not os.path.exists(script_path):
+ raise Exception('%s is missing - make sure VC++ tools are installed.' %
+ script_path)
+ return script_data
def _RegistryQueryBase(sysdir, key, value):
@@ -226,6 +257,16 @@ def _CreateVersion(name, path, sdk_based=False):
if path:
path = os.path.normpath(path)
versions = {
+ '2017': VisualStudioVersion('2017',
+ 'Visual Studio 2017',
+ solution_version='12.00',
+ project_version='15.0',
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v141',
+ compatible_sdks=['v8.1', 'v10.0']),
'2015': VisualStudioVersion('2015',
'Visual Studio 2015',
solution_version='12.00',
@@ -338,7 +379,6 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
- Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
@@ -346,6 +386,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (12)
2015 - Visual Studio 2015 (14)
+ 2017 - Visual Studio 2017 (15)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
@@ -355,6 +396,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
'11.0': '2012',
'12.0': '2013',
'14.0': '2015',
+ '15.0': '2017'
}
versions = []
for version in versions_to_check:
@@ -385,13 +427,18 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
- r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
+ r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7',
+ r'HKLM\Software\Microsoft\VisualStudio\SxS\VS7',
+ r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
- if version != '14.0': # There is no Express edition for 2015.
+ if version == '15.0':
+ if os.path.exists(path):
+ versions.append(_CreateVersion('2017', path))
+ elif version != '14.0': # There is no Express edition for 2015.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
@@ -410,7 +457,7 @@ def SelectVisualStudioVersion(version='auto', allow_fallback=True):
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
- 'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
+ 'auto': ('15.0', '14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
@@ -422,6 +469,7 @@ def SelectVisualStudioVersion(version='auto', allow_fallback=True):
'2013': ('12.0',),
'2013e': ('12.0',),
'2015': ('14.0',),
+ '2017': ('15.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
diff --git a/gyp/pylib/gyp/common.py b/gyp/pylib/gyp/common.py
index 256e3f3a6b..a1e1db5f12 100644
--- a/gyp/pylib/gyp/common.py
+++ b/gyp/pylib/gyp/common.py
@@ -433,7 +433,7 @@ def GetFlavor(params):
return 'linux'
-def CopyTool(flavor, out_path):
+def CopyTool(flavor, out_path, generator_flags={}):
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
to |out_path|."""
# aix and solaris just need flock emulation. mac and win use more complicated
@@ -453,11 +453,18 @@ def CopyTool(flavor, out_path):
with open(source_path) as source_file:
source = source_file.readlines()
+ # Set custom header flags.
+ header = '# Generated by gyp. Do not edit.\n'
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if flavor == 'mac' and mac_toolchain_dir:
+ header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" \
+ % mac_toolchain_dir
+
# Add header and write it out.
tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
with open(tool_path, 'w') as tool_file:
tool_file.write(
- ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
+ ''.join([source[0], header] + source[1:]))
# Make file executable.
os.chmod(tool_path, 0755)
diff --git a/gyp/pylib/gyp/easy_xml.py b/gyp/pylib/gyp/easy_xml.py
index 2b0bb60cb4..2522efb244 100644
--- a/gyp/pylib/gyp/easy_xml.py
+++ b/gyp/pylib/gyp/easy_xml.py
@@ -4,6 +4,7 @@
import re
import os
+import locale
def XmlToString(content, encoding='utf-8', pretty=False):
@@ -115,11 +116,10 @@ def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
xml_string = XmlToString(content, encoding, pretty)
if win32 and os.linesep != '\r\n':
xml_string = xml_string.replace('\n', '\r\n')
-
- try:
- xml_string = xml_string.encode(encoding)
- except Exception:
- xml_string = unicode(xml_string, 'latin-1').encode(encoding)
+
+ default_encoding = locale.getdefaultlocale()[1]
+ if default_encoding and default_encoding.upper() != encoding.upper():
+ xml_string = xml_string.decode(default_encoding).encode(encoding)
# Get the old content
try:
diff --git a/gyp/pylib/gyp/generator/cmake.py b/gyp/pylib/gyp/generator/cmake.py
index 17f5e6396c..a2b96291aa 100644
--- a/gyp/pylib/gyp/generator/cmake.py
+++ b/gyp/pylib/gyp/generator/cmake.py
@@ -34,6 +34,7 @@
import string
import subprocess
import gyp.common
+import gyp.xcode_emulation
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
@@ -608,8 +609,8 @@ def CreateCMakeTargetName(self, qualified_target):
def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
- options, generator_flags, all_qualified_targets, output):
-
+ options, generator_flags, all_qualified_targets, flavor,
+ output):
# The make generator does this always.
# TODO: It would be nice to be able to tell CMake all dependencies.
circular_libs = generator_flags.get('circular', True)
@@ -633,6 +634,10 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
spec = target_dicts.get(qualified_target, {})
config = spec.get('configurations', {}).get(config_to_use, {})
+ xcode_settings = None
+ if flavor == 'mac':
+ xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+
target_name = spec.get('target_name', '')
target_type = spec.get('type', '')
target_toolset = spec.get('toolset')
@@ -904,10 +909,10 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
defines = config.get('defines')
if defines is not None:
SetTargetProperty(output,
- cmake_target_name,
- 'COMPILE_DEFINITIONS',
- defines,
- ';')
+ cmake_target_name,
+ 'COMPILE_DEFINITIONS',
+ defines,
+ ';')
# Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
# CMake currently does not have target C and CXX flags.
@@ -927,6 +932,13 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cxx = config.get('cflags_cc', [])
+ if xcode_settings:
+ cflags = xcode_settings.GetCflags(config_to_use)
+ cflags_c = xcode_settings.GetCflagsC(config_to_use)
+ cflags_cxx = xcode_settings.GetCflagsCC(config_to_use)
+ #cflags_objc = xcode_settings.GetCflagsObjC(config_to_use)
+ #cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use)
+
if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
@@ -965,6 +977,13 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
if ldflags is not None:
SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
+ # XCode settings
+ xcode_settings = config.get('xcode_settings', {})
+ for xcode_setting, xcode_value in xcode_settings.viewitems():
+ SetTargetProperty(output, cmake_target_name,
+ "XCODE_ATTRIBUTE_%s" % xcode_setting, xcode_value,
+ '' if isinstance(xcode_value, str) else ' ')
+
# Note on Dependencies and Libraries:
# CMake wants to handle link order, resolving the link line up front.
# Gyp does not retain or enforce specifying enough information to do so.
@@ -1029,7 +1048,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
output.write(cmake_target_name)
output.write('\n')
if static_deps:
- write_group = circular_libs and len(static_deps) > 1
+ write_group = circular_libs and len(static_deps) > 1 and flavor != 'mac'
if write_group:
output.write('-Wl,--start-group\n')
for dep in gyp.common.uniquer(static_deps):
@@ -1045,9 +1064,9 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
output.write('\n')
if external_libs:
for lib in gyp.common.uniquer(external_libs):
- output.write(' ')
- output.write(lib)
- output.write('\n')
+ output.write(' "')
+ output.write(RemovePrefix(lib, "$(SDKROOT)"))
+ output.write('"\n')
output.write(')\n')
@@ -1059,6 +1078,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
params, config_to_use):
options = params['options']
generator_flags = params['generator_flags']
+ flavor = gyp.common.GetFlavor(params)
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to cmake easier, cmake doesn't put anything here.
@@ -1141,7 +1161,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
# Force ninja to use rsp files. Otherwise link and ar lines can get too long,
# resulting in 'Argument list too long' errors.
- output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
+ # However, rsp files don't work correctly on Mac.
+ if flavor != 'mac':
+ output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
output.write('\n')
namer = CMakeNamer(target_list)
@@ -1156,8 +1178,13 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
all_qualified_targets.add(qualified_target)
for qualified_target in target_list:
+ if flavor == 'mac':
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ spec = target_dicts[qualified_target]
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec)
+
WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
- options, generator_flags, all_qualified_targets, output)
+ options, generator_flags, all_qualified_targets, flavor, output)
output.close()
diff --git a/gyp/pylib/gyp/generator/make.py b/gyp/pylib/gyp/generator/make.py
index 64b9dd267b..ca1d55ebee 100644
--- a/gyp/pylib/gyp/generator/make.py
+++ b/gyp/pylib/gyp/generator/make.py
@@ -31,6 +31,8 @@
from gyp.common import GetEnvironFallback
from gyp.common import GypError
+import hashlib
+
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
@@ -90,7 +92,10 @@ def CalculateVariables(default_variables, params):
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
- default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
+ if flavor == 'aix':
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.a')
+ else:
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)')
default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)')
@@ -365,7 +370,7 @@ def CalculateGeneratorInputInfo(params):
quiet_cmd_copy = COPY $@
# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@"
+cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@")
%(link_commands)s
"""
@@ -1347,7 +1352,10 @@ def ComputeOutputBasename(self, spec):
if target[:3] == 'lib':
target = target[3:]
target_prefix = 'lib'
- target_ext = '.so'
+ if self.flavor == 'aix':
+ target_ext = '.a'
+ else:
+ target_ext = '.so'
elif self.type == 'none':
target = '%s.stamp' % target
elif self.type != 'executable':
@@ -1743,7 +1751,10 @@ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
# actual command.
# - The intermediate recipe will 'touch' the intermediate file.
# - The multi-output rule will have an do-nothing recipe.
- intermediate = "%s.intermediate" % (command if command else self.target)
+
+ # Hash the target name to avoid generating overlong filenames.
+ cmddigest = hashlib.sha1(command if command else self.target).hexdigest()
+ intermediate = "%s.intermediate" % (cmddigest)
self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
self.WriteLn('\t%s' % '@:');
self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
@@ -1917,13 +1928,11 @@ def _InstallableTargetInstallPath(self):
"""Returns the location of the final output for an installable target."""
# Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
# rely on this. Emulate this behavior for mac.
-
- # XXX(TooTallNate): disabling this code since we don't want this behavior...
- #if (self.type == 'shared_library' and
- # (self.flavor != 'mac' or self.toolset != 'target')):
- # # Install all shared libs into a common directory (per toolset) for
- # # convenient access with LD_LIBRARY_PATH.
- # return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
+ if (self.type == 'shared_library' and
+ (self.flavor != 'mac' or self.toolset != 'target')):
+ # Install all shared libs into a common directory (per toolset) for
+ # convenient access with LD_LIBRARY_PATH.
+ return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
return '$(builddir)/' + self.alias
diff --git a/gyp/pylib/gyp/generator/msvs.py b/gyp/pylib/gyp/generator/msvs.py
index 6bfad0f3bd..8fe9e5af23 100644
--- a/gyp/pylib/gyp/generator/msvs.py
+++ b/gyp/pylib/gyp/generator/msvs.py
@@ -46,6 +46,8 @@ def _import_OrderedDict():
generator_default_variables = {
+ 'DRIVER_PREFIX': '',
+ 'DRIVER_SUFFIX': '.sys',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '.exe',
'STATIC_LIB_PREFIX': '',
@@ -91,6 +93,7 @@ def _import_OrderedDict():
'msvs_target_platform_minversion',
]
+generator_filelist_paths = None
# List of precompiled header related keys.
precomp_keys = [
@@ -256,6 +259,8 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
if not tools.get(tool_name):
tools[tool_name] = dict()
tool = tools[tool_name]
+ if 'CompileAsWinRT' == setting:
+ return
if tool.get(setting):
if only_if_unset: return
if type(tool[setting]) == list and type(value) == list:
@@ -269,6 +274,10 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
tool[setting] = value
+def _ConfigTargetVersion(config_data):
+ return config_data.get('msvs_target_version', 'Windows7')
+
+
def _ConfigPlatform(config_data):
return config_data.get('msvs_configuration_platform', 'Win32')
@@ -285,20 +294,31 @@ def _ConfigFullName(config_name, config_data):
return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
-def _ConfigWindowsTargetPlatformVersion(config_data):
- ver = config_data.get('msvs_windows_target_platform_version')
- if not ver or re.match(r'^\d+', ver):
- return ver
- for key in [r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s',
- r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']:
- sdkdir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder')
- if not sdkdir:
- continue
- version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or ''
- # find a matching entry in sdkdir\include
- names = sorted([x for x in os.listdir(r'%s\include' % sdkdir) \
- if x.startswith(version)], reverse = True)
- return names[0]
+def _ConfigWindowsTargetPlatformVersion(config_data, version):
+ config_ver = config_data.get('msvs_windows_sdk_version')
+ vers = [config_ver] if config_ver else version.compatible_sdks
+ for ver in vers:
+ for key in [
+ r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s',
+ r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']:
+ sdk_dir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder')
+ if not sdk_dir:
+ continue
+ version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or ''
+ # Find a matching entry in sdk_dir\include.
+ expected_sdk_dir=r'%s\include' % sdk_dir
+ names = sorted([x for x in (os.listdir(expected_sdk_dir)
+ if os.path.isdir(expected_sdk_dir)
+ else []
+ )
+ if x.startswith(version)], reverse=True)
+ if names:
+ return names[0]
+ else:
+ print >> sys.stdout, (
+ 'Warning: No include files found for '
+ 'detected Windows SDK version %s' % (version)
+ )
def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
@@ -917,6 +937,8 @@ def _GetMsbuildToolsetOfProject(proj_path, spec, version):
toolset = default_config.get('msbuild_toolset')
if not toolset and version.DefaultToolset():
toolset = version.DefaultToolset()
+ if spec['type'] == 'windows_driver':
+ toolset = 'WindowsKernelModeDriver10.0'
return toolset
@@ -1100,6 +1122,7 @@ def _GetMSVSConfigurationType(spec, build_file):
'shared_library': '2', # .dll
'loadable_module': '2', # .dll
'static_library': '4', # .lib
+ 'windows_driver': '5', # .sys
'none': '10', # Utility type
}[spec['type']]
except KeyError:
@@ -1284,6 +1307,7 @@ def _GetOutputFilePathAndTool(spec, msbuild):
'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'),
'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
+ 'windows_driver': ('VCLinkerTool', 'Link', '$(OutDir)', '.sys'),
'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'),
}
output_file_props = output_file_map.get(spec['type'])
@@ -1346,7 +1370,8 @@ def _GetDisabledWarnings(config):
def _GetModuleDefinition(spec):
def_file = ''
- if spec['type'] in ['shared_library', 'loadable_module', 'executable']:
+ if spec['type'] in ['shared_library', 'loadable_module', 'executable',
+ 'windows_driver']:
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
def_file = _FixPath(def_files[0])
@@ -1702,14 +1727,17 @@ def _GetCopies(spec):
src_bare = src[:-1]
base_dir = posixpath.split(src_bare)[0]
outer_dir = posixpath.split(src_bare)[1]
- cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % (
- _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir)
+ fixed_dst = _FixPath(dst)
+ full_dst = '"%s\\%s\\"' % (fixed_dst, outer_dir)
+ cmd = 'mkdir %s 2>nul & cd "%s" && xcopy /e /f /y "%s" %s' % (
+ full_dst, _FixPath(base_dir), outer_dir, full_dst)
copies.append(([src], ['dummy_copies', dst], cmd,
- 'Copying %s to %s' % (src, dst)))
+ 'Copying %s to %s' % (src, fixed_dst)))
else:
+ fix_dst = _FixPath(cpy['destination'])
cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
- _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst))
- copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst)))
+ fix_dst, _FixPath(src), _FixPath(dst))
+ copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, fix_dst)))
return copies
@@ -1953,6 +1981,19 @@ def PerformBuild(data, configurations, params):
rtn = subprocess.check_call(arguments)
+def CalculateGeneratorInputInfo(params):
+ if params.get('flavor') == 'ninja':
+ toplevel = params['options'].toplevel_dir
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, ninja_generator.ComputeOutputDir(params),
+ 'gypfiles-msvs-ninja'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate .sln and .vcproj files.
@@ -2638,7 +2679,7 @@ def _GetMSBuildProjectConfigurations(configurations):
return [group]
-def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
+def _GetMSBuildGlobalProperties(spec, version, guid, gyp_file_name):
namespace = os.path.splitext(gyp_file_name)[0]
properties = [
['PropertyGroup', {'Label': 'Globals'},
@@ -2653,6 +2694,18 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
properties[0].append(['PreferredToolArchitecture', 'x64'])
+ if spec.get('msvs_target_platform_version'):
+ target_platform_version = spec.get('msvs_target_platform_version')
+ properties[0].append(['WindowsTargetPlatformVersion',
+ target_platform_version])
+ if spec.get('msvs_target_platform_minversion'):
+ target_platform_minversion = spec.get('msvs_target_platform_minversion')
+ properties[0].append(['WindowsTargetPlatformMinVersion',
+ target_platform_minversion])
+ else:
+ properties[0].append(['WindowsTargetPlatformMinVersion',
+ target_platform_version])
+
if spec.get('msvs_enable_winrt'):
properties[0].append(['DefaultLanguage', 'en-US'])
properties[0].append(['AppContainerApplication', 'true'])
@@ -2661,49 +2714,45 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
properties[0].append(['ApplicationTypeRevision', app_type_revision])
else:
properties[0].append(['ApplicationTypeRevision', '8.1'])
-
- if spec.get('msvs_target_platform_version'):
- target_platform_version = spec.get('msvs_target_platform_version')
- properties[0].append(['WindowsTargetPlatformVersion',
- target_platform_version])
- if spec.get('msvs_target_platform_minversion'):
- target_platform_minversion = spec.get('msvs_target_platform_minversion')
- properties[0].append(['WindowsTargetPlatformMinVersion',
- target_platform_minversion])
- else:
- properties[0].append(['WindowsTargetPlatformMinVersion',
- target_platform_version])
if spec.get('msvs_enable_winphone'):
properties[0].append(['ApplicationType', 'Windows Phone'])
else:
properties[0].append(['ApplicationType', 'Windows Store'])
platform_name = None
- msvs_windows_target_platform_version = None
+ msvs_windows_sdk_version = None
for configuration in spec['configurations'].itervalues():
platform_name = platform_name or _ConfigPlatform(configuration)
- msvs_windows_target_platform_version = \
- msvs_windows_target_platform_version or \
- _ConfigWindowsTargetPlatformVersion(configuration)
- if platform_name and msvs_windows_target_platform_version:
+ msvs_windows_sdk_version = (msvs_windows_sdk_version or
+ _ConfigWindowsTargetPlatformVersion(configuration, version))
+ if platform_name and msvs_windows_sdk_version:
break
+ if msvs_windows_sdk_version:
+ properties[0].append(['WindowsTargetPlatformVersion',
+ str(msvs_windows_sdk_version)])
+ elif version.compatible_sdks:
+ raise GypError('%s requires any SDK of %s version, but none were found' %
+ (version.description, version.compatible_sdks))
if platform_name == 'ARM':
properties[0].append(['WindowsSDKDesktopARMSupport', 'true'])
- if msvs_windows_target_platform_version:
- properties[0].append(['WindowsTargetPlatformVersion', \
- str(msvs_windows_target_platform_version)])
return properties
+
def _GetMSBuildConfigurationDetails(spec, build_file):
properties = {}
for name, settings in spec['configurations'].iteritems():
msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
condition = _GetConfigurationCondition(name, settings)
character_set = msbuild_attributes.get('CharacterSet')
+ config_type = msbuild_attributes.get('ConfigurationType')
_AddConditionalProperty(properties, condition, 'ConfigurationType',
- msbuild_attributes['ConfigurationType'])
+ config_type)
+ if config_type == 'Driver':
+ _AddConditionalProperty(properties, condition, 'DriverType', 'WDM')
+ _AddConditionalProperty(properties, condition, 'TargetVersion',
+ _ConfigTargetVersion(settings))
if character_set:
if 'msvs_enable_winrt' not in spec :
_AddConditionalProperty(properties, condition, 'CharacterSet',
@@ -2802,6 +2851,7 @@ def _ConvertMSVSConfigurationType(config_type):
'1': 'Application',
'2': 'DynamicLibrary',
'4': 'StaticLibrary',
+ '5': 'Driver',
'10': 'Utility'
}[config_type]
return config_type
@@ -2844,6 +2894,7 @@ def _GetMSBuildAttributes(spec, config, build_file):
'executable': 'Link',
'shared_library': 'Link',
'loadable_module': 'Link',
+ 'windows_driver': 'Link',
'static_library': 'Lib',
}
msbuild_tool = msbuild_tool_map.get(spec['type'])
@@ -3336,7 +3387,8 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
}]
content += _GetMSBuildProjectConfigurations(configurations)
- content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
+ content += _GetMSBuildGlobalProperties(spec, version, project.guid,
+ project_file_name)
content += import_default_section
content += _GetMSBuildConfigurationDetails(spec, project.build_file)
if spec.get('msvs_enable_winphone'):
diff --git a/gyp/pylib/gyp/generator/ninja.py b/gyp/pylib/gyp/generator/ninja.py
index 841067ed34..0555a4a90d 100644
--- a/gyp/pylib/gyp/generator/ninja.py
+++ b/gyp/pylib/gyp/generator/ninja.py
@@ -148,6 +148,9 @@ def __init__(self, type):
# because dependents only link against the lib (not both the lib and the
# dll) we keep track of the import library here.
self.import_lib = None
+ # Track if this target contains any C++ files, to decide if gcc or g++
+ # should be used for linking.
+ self.uses_cpp = False
def Linkable(self):
"""Return true if this is a target that can be linked against."""
@@ -375,14 +378,17 @@ def WriteSpec(self, spec, config_name, generator_flags):
self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
- # Track if this target contains any C++ files, to decide if gcc or g++
- # should be used for linking.
- self.uses_cpp = False
+
+ self.target_rpath = generator_flags.get('target_rpath', r'\$$ORIGIN/lib/')
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if mac_toolchain_dir:
+ self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir
+
if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags)
@@ -419,6 +425,8 @@ def WriteSpec(self, spec, config_name, generator_flags):
target = self.target_outputs[dep]
actions_depends.append(target.PreActionInput(self.flavor))
compile_depends.append(target.PreCompileInput())
+ if target.uses_cpp:
+ self.target.uses_cpp = True
actions_depends = filter(None, actions_depends)
compile_depends = filter(None, compile_depends)
actions_depends = self.WriteCollapsedDependencies('actions_depends',
@@ -444,7 +452,12 @@ def WriteSpec(self, spec, config_name, generator_flags):
# Write out the compilation steps, if any.
link_deps = []
- sources = extra_sources + spec.get('sources', [])
+ try:
+ sources = extra_sources + spec.get('sources', [])
+ except TypeError:
+ print 'extra_sources: ', str(extra_sources)
+ print 'spec.get("sources"): ', str(spec.get('sources'))
+ raise
if sources:
if self.flavor == 'mac' and len(self.archs) > 1:
# Write subninja file containing compile and link commands scoped to
@@ -559,6 +572,9 @@ def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild)
+ if self.xcode_settings and self.xcode_settings.IsIosFramework():
+ self.WriteiOSFrameworkHeaders(spec, outputs, prebuild)
+
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
@@ -656,6 +672,7 @@ def WriteRules(self, rules, extra_sources, prebuild,
for var in special_locals:
if '${%s}' % var in argument:
needed_variables.add(var)
+ needed_variables = sorted(needed_variables)
def cygwin_munge(path):
# pylint: disable=cell-var-from-loop
@@ -729,6 +746,7 @@ def cygwin_munge(path):
# WriteNewNinjaRule uses unique_name for creating an rsp file on win.
extra_bindings.append(('unique_name',
hashlib.md5(outputs[0]).hexdigest()))
+
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
@@ -740,7 +758,11 @@ def cygwin_munge(path):
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
- env = self.GetToolchainEnv()
+ if self.xcode_settings:
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetToolchainEnv(additional_settings=extra_env)
+ else:
+ env = self.GetToolchainEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
@@ -762,18 +784,38 @@ def WriteCopies(self, copies, prebuild, mac_bundle_depends):
return outputs
+ def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild):
+ """Prebuild steps to generate hmap files and copy headers to destination."""
+ framework = self.ComputeMacBundleOutput()
+ all_sources = spec['sources']
+ copy_headers = spec['mac_framework_headers']
+ output = self.GypPathToUniqueOutput('headers.hmap')
+ self.xcode_settings.header_map_path = output
+ all_headers = map(self.GypPathToNinja,
+ filter(lambda x:x.endswith(('.h')), all_sources))
+ variables = [('framework', framework),
+ ('copy_headers', map(self.GypPathToNinja, copy_headers))]
+ outputs.extend(self.ninja.build(
+ output, 'compile_ios_framework_headers', all_headers,
+ variables=variables, order_only=prebuild))
+
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
xcassets = []
+
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
+
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
if os.path.splitext(output)[-1] != '.xcassets':
- isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource'), \
- ('binary', isBinary)])
+ ('env', env), ('binary', isBinary)])
bundle_depends.append(output)
else:
xcassets.append(res)
@@ -992,7 +1034,7 @@ def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
- self.uses_cpp = True
+ self.target.uses_cpp = True
elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
@@ -1007,7 +1049,7 @@ def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
- self.uses_cpp = True
+ self.target.uses_cpp = True
elif self.flavor == 'win' and ext == 'rc':
command = 'rc'
obj_ext = '.res'
@@ -1058,16 +1100,16 @@ def WritePchTargets(self, ninja_file, pch_commands):
cmd = map.get(lang)
ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
- def WriteLink(self, spec, config_name, config, link_deps):
+ def WriteLink(self, spec, config_name, config, link_deps, compile_deps):
"""Write out a link step. Fills out target.binary. """
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteLinkForArch(
- self.ninja, spec, config_name, config, link_deps)
+ self.ninja, spec, config_name, config, link_deps, compile_deps)
else:
output = self.ComputeOutput(spec)
inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
config_name, config, link_deps[arch],
- arch=arch)
+ compile_deps, arch=arch)
for arch in self.archs]
extra_bindings = []
build_output = output
@@ -1086,7 +1128,7 @@ def WriteLink(self, spec, config_name, config, link_deps):
return output
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
- link_deps, arch=None):
+ link_deps, compile_deps, arch=None):
"""Write out a link step. Fills out target.binary. """
command = {
'executable': 'link',
@@ -1099,6 +1141,14 @@ def WriteLinkForArch(self, ninja_file, spec, config_name, config,
solibs = set()
order_deps = set()
+ if compile_deps:
+ # Normally, the compiles of the target already depend on compile_deps,
+ # but a shared_library target might have no sources and only link together
+ # a few static_library deps, so the link step also needs to depend
+ # on compile_deps to make sure actions in the shared_library target
+ # get run before the link.
+ order_deps.add(compile_deps)
+
if 'dependencies' in spec:
# Two kinds of dependencies:
# - Linkable dependencies (like a .a or a .so): add them to the link line.
@@ -1135,7 +1185,7 @@ def WriteLinkForArch(self, ninja_file, spec, config_name, config,
implicit_deps.add(final_output)
extra_bindings = []
- if self.uses_cpp and self.flavor != 'win':
+ if self.target.uses_cpp and self.flavor != 'win':
extra_bindings.append(('ld', '$ldxx'))
output = self.ComputeOutput(spec, arch)
@@ -1178,7 +1228,9 @@ def WriteLinkForArch(self, ninja_file, spec, config_name, config,
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
- ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
+ ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
+ else:
+ ldflags.append('-Wl,-rpath=%s' % self.target_rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList(ninja_file, 'ldflags',
map(self.ExpandSpecial, ldflags))
@@ -1252,10 +1304,11 @@ def WriteLinkForArch(self, ninja_file, spec, config_name, config,
if len(solibs):
- extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
+ extra_bindings.append(('solibs',
+ gyp.common.EncodePOSIXShellList(sorted(solibs))))
ninja_file.build(output, command + command_suffix, link_deps,
- implicit=list(implicit_deps),
+ implicit=sorted(implicit_deps),
order_only=list(order_deps),
variables=extra_bindings)
return linked_binary
@@ -1308,7 +1361,8 @@ def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
# needed.
variables=variables)
else:
- self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
+ self.target.binary = self.WriteLink(spec, config_name, config, link_deps,
+ compile_deps)
return self.target.binary
def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
@@ -1321,9 +1375,13 @@ def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
is_command_start=not package_framework)
if package_framework and not is_empty:
- variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
- self.ninja.build(output, 'package_framework', mac_bundle_depends,
- variables=variables)
+ if spec['type'] == 'shared_library' and self.xcode_settings.isIOS:
+ self.ninja.build(output, 'package_ios_framework', mac_bundle_depends,
+ variables=variables)
+ else:
+ variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
+ self.ninja.build(output, 'package_framework', mac_bundle_depends,
+ variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
@@ -1810,7 +1868,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
# Put build-time support tools in out/{config_name}.
- gyp.common.CopyTool(flavor, toplevel_build)
+ gyp.common.CopyTool(flavor, toplevel_build, generator_flags)
# Grab make settings for CC/CXX.
# The rules are
@@ -1836,7 +1894,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
ld_host = '$cc_host'
ldxx_host = '$cxx_host'
- ar_host = 'ar'
+ ar_host = ar
cc_host = None
cxx_host = None
cc_host_global_setting = None
@@ -1891,6 +1949,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
wrappers[key_prefix] = os.path.join(build_to_root, value)
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if mac_toolchain_dir:
+ wrappers['LINK'] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
+
if flavor == 'win':
configs = [target_dicts[qualified_target]['configurations'][config_name]
for qualified_target in target_list]
@@ -1901,7 +1963,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
configs, generator_flags)
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, shared_system_includes, OpenOutput)
- for arch, path in cl_paths.iteritems():
+ for arch, path in sorted(cl_paths.iteritems()):
if clang_cl:
# If we have selected clang-cl, use that instead.
path = clang_cl
@@ -2224,6 +2286,12 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
'compile_xcassets',
description='COMPILE XCASSETS $in',
command='$env ./gyp-mac-tool compile-xcassets $keys $in')
+ master_ninja.rule(
+ 'compile_ios_framework_headers',
+ description='COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in',
+ command='$env ./gyp-mac-tool compile-ios-framework-header-map $out '
+ '$framework $in && $env ./gyp-mac-tool '
+ 'copy-ios-framework-headers $framework $copy_headers')
master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
@@ -2233,6 +2301,11 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out')
+ master_ninja.rule(
+ 'package_ios_framework',
+ description='PACKAGE IOS FRAMEWORK $out, POSTBUILDS',
+ command='./gyp-mac-tool package-ios-framework $out $postbuilds '
+ '&& touch $out')
if flavor == 'win':
master_ninja.rule(
'stamp',
@@ -2250,7 +2323,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja.rule(
'copy',
description='COPY $in $out',
- command='rm -rf $out && cp -af $in $out')
+ command='ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)')
master_ninja.newline()
all_targets = set()
@@ -2337,7 +2410,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# able to run actions and build libraries by their short name.
master_ninja.newline()
master_ninja.comment('Short names for targets.')
- for short_name in target_short_names:
+ for short_name in sorted(target_short_names):
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
@@ -2353,7 +2426,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
if all_outputs:
master_ninja.newline()
- master_ninja.build('all', 'phony', list(all_outputs))
+ master_ninja.build('all', 'phony', sorted(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
master_ninja_file.close()
diff --git a/gyp/pylib/gyp/generator/xcode.py b/gyp/pylib/gyp/generator/xcode.py
index 0e3fb9301e..db99d6ab81 100644
--- a/gyp/pylib/gyp/generator/xcode.py
+++ b/gyp/pylib/gyp/generator/xcode.py
@@ -77,6 +77,7 @@
'mac_framework_headers',
'mac_framework_private_headers',
'mac_xctest_bundle',
+ 'mac_xcuitest_bundle',
'xcode_create_dependents_test_runner',
]
@@ -691,6 +692,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
'executable+bundle': 'com.apple.product-type.application',
'loadable_module+bundle': 'com.apple.product-type.bundle',
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
+ 'loadable_module+xcuitest': 'com.apple.product-type.bundle.ui-testing',
'shared_library+bundle': 'com.apple.product-type.framework',
'executable+extension+bundle': 'com.apple.product-type.app-extension',
'executable+watch+extension+bundle':
@@ -707,13 +709,19 @@ def GenerateOutput(target_list, target_dicts, data, params):
type = spec['type']
is_xctest = int(spec.get('mac_xctest_bundle', 0))
+ is_xcuitest = int(spec.get('mac_xcuitest_bundle', 0))
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
is_app_extension = int(spec.get('ios_app_extension', 0))
is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
is_watch_app = int(spec.get('ios_watch_app', 0))
if type != 'none':
type_bundle_key = type
- if is_xctest:
+ if is_xcuitest:
+ type_bundle_key += '+xcuitest'
+ assert type == 'loadable_module', (
+ 'mac_xcuitest_bundle targets must have type loadable_module '
+ '(target %s)' % target_name)
+ elif is_xctest:
type_bundle_key += '+xctest'
assert type == 'loadable_module', (
'mac_xctest_bundle targets must have type loadable_module '
@@ -745,6 +753,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
assert not is_bundle, (
'mac_bundle targets cannot have type none (target "%s")' %
target_name)
+ assert not is_xcuitest, (
+ 'mac_xcuitest_bundle targets cannot have type none (target "%s")' %
+ target_name)
assert not is_xctest, (
'mac_xctest_bundle targets cannot have type none (target "%s")' %
target_name)
diff --git a/gyp/pylib/gyp/input.py b/gyp/pylib/gyp/input.py
index 7567d0a05b..56cdece0aa 100644
--- a/gyp/pylib/gyp/input.py
+++ b/gyp/pylib/gyp/input.py
@@ -33,6 +33,7 @@
'shared_library',
'loadable_module',
'mac_kernel_extension',
+ 'windows_driver',
]
# A list of sections that contain links to other targets.
@@ -1542,11 +1543,15 @@ def FlattenToList(self):
# dependents.
flat_list = OrderedSet()
+ def ExtractNodeRef(node):
+ """Extracts the object that the node represents from the given node."""
+ return node.ref
+
# in_degree_zeros is the list of DependencyGraphNodes that have no
# dependencies not in flat_list. Initially, it is a copy of the children
# of this node, because when the graph was built, nodes with no
# dependencies were made implicit dependents of the root node.
- in_degree_zeros = set(self.dependents[:])
+ in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef)
while in_degree_zeros:
# Nodes in in_degree_zeros have no dependencies not in flat_list, so they
@@ -1558,12 +1563,13 @@ def FlattenToList(self):
# Look at dependents of the node just added to flat_list. Some of them
# may now belong in in_degree_zeros.
- for node_dependent in node.dependents:
+ for node_dependent in sorted(node.dependents, key=ExtractNodeRef):
is_in_degree_zero = True
# TODO: We want to check through the
# node_dependent.dependencies list but if it's long and we
# always start at the beginning, then we get O(n^2) behaviour.
- for node_dependent_dependency in node_dependent.dependencies:
+ for node_dependent_dependency in (sorted(node_dependent.dependencies,
+ key=ExtractNodeRef)):
if not node_dependent_dependency.ref in flat_list:
# The dependent one or more dependencies not in flat_list. There
# will be more chances to add it to flat_list when examining
@@ -1576,7 +1582,7 @@ def FlattenToList(self):
# All of the dependent's dependencies are already in flat_list. Add
# it to in_degree_zeros where it will be processed in a future
# iteration of the outer loop.
- in_degree_zeros.add(node_dependent)
+ in_degree_zeros += [node_dependent]
return list(flat_list)
@@ -1732,12 +1738,13 @@ def _LinkDependenciesInternal(self, targets, include_shared_libraries,
dependencies.add(self.ref)
return dependencies
- # Executables, mac kernel extensions and loadable modules are already fully
- # and finally linked. Nothing else can be a link dependency of them, there
- # can only be dependencies in the sense that a dependent target might run
- # an executable or load the loadable_module.
+ # Executables, mac kernel extensions, windows drivers and loadable modules
+ # are already fully and finally linked. Nothing else can be a link
+ # dependency of them, there can only be dependencies in the sense that a
+ # dependent target might run an executable or load the loadable_module.
if not initial and target_type in ('executable', 'loadable_module',
- 'mac_kernel_extension'):
+ 'mac_kernel_extension',
+ 'windows_driver'):
return dependencies
# Shared libraries are already fully linked. They should only be included
@@ -2488,7 +2495,7 @@ def ValidateTargetType(target, target_dict):
"""
VALID_TARGET_TYPES = ('executable', 'loadable_module',
'static_library', 'shared_library',
- 'mac_kernel_extension', 'none')
+ 'mac_kernel_extension', 'none', 'windows_driver')
target_type = target_dict.get('type', None)
if target_type not in VALID_TARGET_TYPES:
raise GypError("Target %s has an invalid target type '%s'. "
diff --git a/gyp/pylib/gyp/mac_tool.py b/gyp/pylib/gyp/mac_tool.py
index eeeaceb0c7..0ad7e7a1b6 100755
--- a/gyp/pylib/gyp/mac_tool.py
+++ b/gyp/pylib/gyp/mac_tool.py
@@ -17,6 +17,7 @@
import re
import shutil
import string
+import struct
import subprocess
import sys
import tempfile
@@ -48,6 +49,7 @@ def _CommandifyName(self, name_string):
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
"""Copies a resource file to the bundle/Resources directory, performing any
necessary compilation on each resource."""
+ convert_to_binary = convert_to_binary == 'True'
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
# Copy tree.
@@ -61,11 +63,16 @@ def ExecCopyBundleResource(self, source, dest, convert_to_binary):
return self._CopyXIBFile(source, dest)
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
+ elif extension == '.strings' and not convert_to_binary:
+ self._CopyStringsFile(source, dest)
else:
+ if os.path.exists(dest):
+ os.unlink(dest)
shutil.copy(source, dest)
+ if convert_to_binary and extension in ('.plist', '.strings'):
+ self._ConvertToBinary(dest)
+
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
@@ -76,27 +83,49 @@ def _CopyXIBFile(self, source, dest):
if os.path.relpath(dest):
dest = os.path.join(base, dest)
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
+ args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices']
+
+ if os.environ['XCODE_VERSION_ACTUAL'] > '0700':
+ args.extend(['--auto-activate-custom-fonts'])
+ if 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ:
+ args.extend([
+ '--target-device', 'iphone', '--target-device', 'ipad',
+ '--minimum-deployment-target',
+ os.environ['IPHONEOS_DEPLOYMENT_TARGET'],
+ ])
+ else:
+ args.extend([
+ '--target-device', 'mac',
+ '--minimum-deployment-target',
+ os.environ['MACOSX_DEPLOYMENT_TARGET'],
+ ])
+
+ args.extend(['--output-format', 'human-readable-text', '--compile', dest,
+ source])
+
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
+ try:
+ stdout = subprocess.check_output(args)
+ except subprocess.CalledProcessError as e:
+ print(e.output)
+ raise
current_section_header = None
- for line in ibtoolout.stdout:
+ for line in stdout.splitlines():
if ibtool_section_re.match(line):
current_section_header = line
elif not ibtool_re.match(line):
if current_section_header:
- sys.stdout.write(current_section_header)
+ print(current_section_header)
current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
+ print(line)
+ return 0
def _ConvertToBinary(self, dest):
subprocess.check_call([
'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
- def _CopyStringsFile(self, source, dest, convert_to_binary):
+ def _CopyStringsFile(self, source, dest):
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
input_code = self._DetectInputEncoding(source) or "UTF-8"
@@ -116,16 +145,13 @@ def _CopyStringsFile(self, source, dest, convert_to_binary):
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
encoding. Returns None as a guess if it can't detect it."""
fp = open(file_name, 'rb')
try:
header = fp.read(3)
- except e:
+ except:
fp.close()
return None
fp.close()
@@ -153,7 +179,7 @@ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
# Go through all the environment variables and replace them as variables in
# the file.
- IDENT_RE = re.compile(r'[/\s]')
+ IDENT_RE = re.compile(r'[_/\s]')
for key in os.environ:
if key.startswith('_'):
continue
@@ -228,7 +254,8 @@ def ExecFlock(self, lockfile, *cmd_list):
def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
+ libtool_re = re.compile(r'^.*libtool: (?:for architecture: \S* )?'
+ r'file: .* has no symbols$')
libtool_re5 = re.compile(
r'^.*libtool: warning for library: ' +
r'.* the table of contents is empty ' +
@@ -253,6 +280,23 @@ def ExecFilterLibtool(self, *cmd_list):
break
return libtoolout.returncode
+ def ExecPackageIosFramework(self, framework):
+ # Find the name of the binary based on the part before the ".framework".
+ binary = os.path.basename(framework).split('.')[0]
+ module_path = os.path.join(framework, 'Modules');
+ if not os.path.exists(module_path):
+ os.mkdir(module_path)
+ module_template = 'framework module %s {\n' \
+ ' umbrella header "%s.h"\n' \
+ '\n' \
+ ' export *\n' \
+ ' module * { export * }\n' \
+ '}\n' % (binary, binary)
+
+ module_file = open(os.path.join(module_path, 'module.modulemap'), "w")
+ module_file.write(module_template)
+ module_file.close()
+
def ExecPackageFramework(self, framework, version):
"""Takes a path to Something.framework and the Current version of that and
sets up all the symlinks."""
@@ -289,6 +333,23 @@ def _Relink(self, dest, link):
os.remove(link)
os.symlink(dest, link)
+ def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
+ framework_name = os.path.basename(framework).split('.')[0]
+ all_headers = map(os.path.abspath, all_headers)
+ filelist = {}
+ for header in all_headers:
+ filename = os.path.basename(header)
+ filelist[filename] = header
+ filelist[os.path.join(framework_name, filename)] = header
+ WriteHmap(out, filelist)
+
+ def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
+ header_path = os.path.join(framework, 'Headers');
+ if not os.path.exists(header_path):
+ os.makedirs(header_path)
+ for header in copy_headers:
+ shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
+
def ExecCompileXcassets(self, keys, *inputs):
"""Compiles multiple .xcassets files into a single .car file.
@@ -349,49 +410,28 @@ def ExecMergeInfoPlist(self, output, *inputs):
self._MergePlist(merged_plist, plist)
plistlib.writePlist(merged_plist, output)
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
+ def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
+ 1. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
+ 2. copy Entitlements.plist from user or SDK next to the bundle,
+ 3. code sign the bundle.
"""
- resource_rules_path = self._InstallResourceRules(resource_rules)
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier())
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
+ args = ['codesign', '--force', '--sign', key]
+ if preserve == 'True':
+ args.extend(['--deep', '--preserve-metadata=identifier,entitlements'])
+ else:
+ args.extend(['--entitlements', entitlements_path])
+ args.extend(['--timestamp=none', path])
+ subprocess.check_call(args)
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
@@ -606,5 +646,71 @@ def _ExpandVariables(self, data, substitutions):
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
return data
+def NextGreaterPowerOf2(x):
+ return 2**(x).bit_length()
+
+def WriteHmap(output_name, filelist):
+ """Generates a header map based on |filelist|.
+
+ Per Mark Mentovai:
+ A header map is structured essentially as a hash table, keyed by names used
+ in #includes, and providing pathnames to the actual files.
+
+ The implementation below and the comment above comes from inspecting:
+ http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+ while also looking at the implementation in clang in:
+ https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+ """
+ magic = 1751998832
+ version = 1
+ _reserved = 0
+ count = len(filelist)
+ capacity = NextGreaterPowerOf2(count)
+ strings_offset = 24 + (12 * capacity)
+ max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1])
+
+ out = open(output_name, "wb")
+ out.write(struct.pack(' 0 or arg.count('/') > 1:
+ arg = os.path.normpath(arg)
+
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
@@ -269,7 +273,8 @@ def ConvertVSMacros(self, s, base_to_build=None, config=None):
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
- return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
+ return [lib + '.lib' if not lib.lower().endswith('.lib') else lib
+ for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
@@ -306,7 +311,10 @@ def _TargetConfig(self, config):
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
- # 'Release_x64'), and a second target-specific configuration, which is an
+ # 'Release'), VS2015 and later only use this level
+ if self.vs_version.short_name >= 2015:
+ return config
+ # and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
@@ -468,8 +476,10 @@ def GetCflags(self, config):
prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
- if self.vs_version.short_name in ('2013', '2013e', '2015'):
- # New flag required in 2013 to maintain previous PDB behavior.
+ if self.vs_version.project_version >= 12.0:
+ # New flag introduced in VS2013 (project version 12.0) Forces writes to
+ # the program database (PDB) to be serialized through MSPDBSRV.EXE.
+ # https://msdn.microsoft.com/en-us/library/dn502518.aspx
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
@@ -485,8 +495,9 @@ def _GetPchFlags(self, config, extension):
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
- pch = os.path.split(self.msvs_precompiled_header[config])[1]
- return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
+ pch = self.msvs_precompiled_header[config]
+ pchbase = os.path.split(pch)[1]
+ return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pchbase + '.pch']
return []
def GetCflagsC(self, config):
@@ -528,7 +539,8 @@ def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
- def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
+ def_files = [s for s in spec.get('sources', [])
+ if s.lower().endswith('.def')]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
@@ -888,7 +900,7 @@ def __init__(
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
- return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
+ return self.settings.msvs_precompiled_header[self.config]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
@@ -961,6 +973,10 @@ def _ExtractImportantEnvironment(output_of_set):
'tmp',
)
env = {}
+ # This occasionally happens and leads to misleading SYSTEMROOT error messages
+ # if not caught here.
+ if output_of_set.count('=') == 0:
+ raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set)
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
@@ -1029,6 +1045,8 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
+ if popen.returncode != 0:
+ raise Exception('"%s" failed with error %d' % (args, popen.returncode))
env = _ExtractImportantEnvironment(variables)
# Inject system includes from gyp files into INCLUDE.
diff --git a/gyp/pylib/gyp/win_tool.py b/gyp/pylib/gyp/win_tool.py
index bb6f1ea436..1c843a0b6c 100755
--- a/gyp/pylib/gyp/win_tool.py
+++ b/gyp/pylib/gyp/win_tool.py
@@ -116,11 +116,19 @@ def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
env = self._GetEnv(arch)
if use_separate_mspdbsrv == 'True':
self._UseSeparateMspdbsrv(env, args)
- link = subprocess.Popen([args[0].replace('/', '\\')] + list(args[1:]),
- shell=True,
- env=env,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
+ if sys.platform == 'win32':
+ args = list(args) # *args is a tuple by default, which is read-only.
+ args[0] = args[0].replace('/', '\\')
+ # https://docs.python.org/2/library/subprocess.html:
+ # "On Unix with shell=True [...] if args is a sequence, the first item
+ # specifies the command string, and any additional items will be treated as
+ # additional arguments to the shell itself. That is to say, Popen does the
+ # equivalent of:
+ # Popen(['/bin/sh', '-c', args[0], args[1], ...])"
+ # For that reason, since going through the shell doesn't seem necessary on
+ # non-Windows don't do that there.
+ link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = link.communicate()
for line in out.splitlines():
if (not line.startswith(' Creating library ') and
diff --git a/gyp/pylib/gyp/xcode_emulation.py b/gyp/pylib/gyp/xcode_emulation.py
index b06bdc4e8b..dba8e7699e 100644
--- a/gyp/pylib/gyp/xcode_emulation.py
+++ b/gyp/pylib/gyp/xcode_emulation.py
@@ -147,6 +147,7 @@ class XcodeSettings(object):
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
+ _platform_path_cache = {}
_sdk_root_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
@@ -161,6 +162,8 @@ def __init__(self, spec):
self.spec = spec
self.isIOS = False
+ self.mac_toolchain_dir = None
+ self.header_map_path = None
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
@@ -221,8 +224,19 @@ def IsBinaryOutputFormat(self, configname):
default)
return format == "binary"
+ def IsIosFramework(self):
+ return self.spec['type'] == 'shared_library' and self._IsBundle() and \
+ self.isIOS
+
def _IsBundle(self):
- return int(self.spec.get('mac_bundle', 0)) != 0
+ return int(self.spec.get('mac_bundle', 0)) != 0 or self._IsXCTest() or \
+ self._IsXCUiTest()
+
+ def _IsXCTest(self):
+ return int(self.spec.get('mac_xctest_bundle', 0)) != 0
+
+ def _IsXCUiTest(self):
+ return int(self.spec.get('mac_xcuitest_bundle', 0)) != 0
def _IsIosAppExtension(self):
return int(self.spec.get('ios_app_extension', 0)) != 0
@@ -298,11 +312,62 @@ def GetBundleResourceFolder(self):
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
+ def GetBundleExecutableFolderPath(self):
+ """Returns the qualified path to the bundle's executables folder. E.g.
+ Chromium.app/Contents/MacOS. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.spec['type'] in ('shared_library') or self.isIOS:
+ return self.GetBundleContentsFolderPath()
+ elif self.spec['type'] in ('executable', 'loadable_module'):
+ return os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
+
+ def GetBundleJavaFolderPath(self):
+ """Returns the qualified path to the bundle's Java resource folder.
+ E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleResourceFolder(), 'Java')
+
+ def GetBundleFrameworksFolderPath(self):
+ """Returns the qualified path to the bundle's frameworks folder. E.g,
+ Chromium.app/Contents/Frameworks. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), 'Frameworks')
+
+ def GetBundleSharedFrameworksFolderPath(self):
+ """Returns the qualified path to the bundle's frameworks folder. E.g,
+ Chromium.app/Contents/SharedFrameworks. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(),
+ 'SharedFrameworks')
+
+ def GetBundleSharedSupportFolderPath(self):
+ """Returns the qualified path to the bundle's shared support folder. E.g,
+ Chromium.app/Contents/SharedSupport. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.spec['type'] == 'shared_library':
+ return self.GetBundleResourceFolder()
+ else:
+ return os.path.join(self.GetBundleContentsFolderPath(),
+ 'SharedSupport')
+
+ def GetBundlePlugInsFolderPath(self):
+ """Returns the qualified path to the bundle's plugins folder. E.g,
+ Chromium.app/Contents/PlugIns. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), 'PlugIns')
+
+ def GetBundleXPCServicesFolderPath(self):
+ """Returns the qualified path to the bundle's XPC services folder. E.g,
+ Chromium.app/Contents/XPCServices. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), 'XPCServices')
+
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
- if self.spec['type'] in ('executable', 'loadable_module'):
+ if self.spec['type'] in ('executable', 'loadable_module') or \
+ self.IsIosFramework():
return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
else:
return os.path.join(self.GetBundleContentsFolderPath(),
@@ -322,6 +387,10 @@ def GetProductType(self):
assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
'(target %s)' % self.spec['target_name'])
return 'com.apple.product-type.application.watchapp'
+ if self._IsXCUiTest():
+ assert self._IsBundle(), ('mac_xcuitest_bundle flag requires mac_bundle '
+ '(target %s)' % self.spec['target_name'])
+ return 'com.apple.product-type.bundle.ui-testing'
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
@@ -352,11 +421,8 @@ def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
- if self.spec['type'] in ('shared_library') or self.isIOS:
- path = self.GetBundleContentsFolderPath()
- elif self.spec['type'] in ('executable', 'loadable_module'):
- path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
- return os.path.join(path, self.GetExecutableName())
+ return os.path.join(self.GetBundleExecutableFolderPath(), \
+ self.GetExecutableName())
def _GetStandaloneExecutableSuffix(self):
if 'product_extension' in self.spec:
@@ -407,8 +473,8 @@ def GetExecutableName(self):
return self._GetStandaloneBinaryPath()
def GetExecutablePath(self):
- """Returns the directory name of the bundle represented by this target. E.g.
- Chromium.app/Contents/MacOS/Chromium."""
+ """Returns the qualified path to the primary executable of the bundle
+ represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
@@ -429,7 +495,7 @@ def _GetSdkVersionInfoItem(self, sdk, infoitem):
# Since the CLT has no SDK paths anyway, returning None is the
# most sensible route and should still do the right thing.
try:
- return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
+ return GetStdout(['xcrun', '--sdk', sdk, infoitem])
except:
pass
@@ -438,6 +504,14 @@ def _SdkRoot(self, configname):
configname = self.configname
return self.GetPerConfigSetting('SDKROOT', configname, default='')
+ def _XcodePlatformPath(self, configname=None):
+ sdk_root = self._SdkRoot(configname)
+ if sdk_root not in XcodeSettings._platform_path_cache:
+ platform_path = self._GetSdkVersionInfoItem(sdk_root,
+ '--show-sdk-platform-path')
+ XcodeSettings._platform_path_cache[sdk_root] = platform_path
+ return XcodeSettings._platform_path_cache[sdk_root]
+
def _SdkPath(self, configname=None):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
@@ -446,7 +520,7 @@ def _SdkPath(self, configname=None):
def _XcodeSdkPath(self, sdk_root):
if sdk_root not in XcodeSettings._sdk_path_cache:
- sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
+ sdk_path = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-path')
XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
if sdk_root:
XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
@@ -477,6 +551,9 @@ def GetCflags(self, configname, arch=None):
if 'SDKROOT' in self._Settings() and sdk_root:
cflags.append('-isysroot %s' % sdk_root)
+ if self.header_map_path:
+ cflags.append('-I%s' % self.header_map_path)
+
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
@@ -568,6 +645,10 @@ def GetCflags(self, configname, arch=None):
cflags += self._Settings().get('WARNING_CFLAGS', [])
+ platform_root = self._XcodePlatformPath(configname)
+ if platform_root and self._IsXCTest():
+ cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
+
if sdk_root:
framework_root = sdk_root
else:
@@ -814,7 +895,8 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
ldflags.append('-arch ' + archs[0])
# Xcode adds the product directory by default.
- ldflags.append('-L' + product_dir)
+ # Rewrite -L. to -L./ to work around http://www.openradar.me/25313838
+ ldflags.append('-L' + (product_dir if product_dir != '.' else './'))
install_name = self.GetInstallName()
if install_name and self.spec['type'] != 'loadable_module':
@@ -831,19 +913,24 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
+ platform_root = self._XcodePlatformPath(configname)
+ if sdk_root and platform_root and self._IsXCTest():
+ ldflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
+ ldflags.append('-framework XCTest')
+
is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
if sdk_root and is_extension:
# Adds the link flags for extensions. These flags are common for all
# extensions and provide loader and main function.
# These flags reflect the compilation options used by xcode to compile
# extensions.
- ldflags.append('-lpkstart')
if XcodeVersion() < '0900':
+ ldflags.append('-lpkstart')
ldflags.append(sdk_root +
'/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
+ else:
+ ldflags.append('-e _NSExtensionMain')
ldflags.append('-fapplication-extension')
- ldflags.append('-Xlinker -rpath '
- '-Xlinker @executable_path/../../Frameworks')
self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
@@ -917,7 +1004,8 @@ def _GetStripPostbuilds(self, configname, output_binary, quiet):
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
- if self.spec['type'] == 'loadable_module' and self._IsBundle():
+ if ((self.spec['type'] == 'loadable_module' or self._IsIosAppExtension())
+ and self._IsBundle()):
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
@@ -972,13 +1060,25 @@ def _GetIOSPostbuilds(self, configname, output_binary):
"""Return a shell command to codesign the iOS output binary so it can
be deployed to a device. This should be run as the very last step of the
build."""
- if not (self.isIOS and self.spec['type'] == 'executable'):
+ if not (self.isIOS and
+ (self.spec['type'] == 'executable' or self._IsXCTest()) or
+ self.IsIosFramework()):
return []
+ postbuilds = []
+ product_name = self.GetFullProductName()
settings = self.xcode_settings[configname]
+
+ # Xcode expects XCTests to be copied into the TEST_HOST dir.
+ if self._IsXCTest():
+ source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name)
+ test_host = os.path.dirname(settings.get('TEST_HOST'));
+ xctest_destination = os.path.join(test_host, 'PlugIns', product_name)
+ postbuilds.extend(['ditto %s %s' % (source, xctest_destination)])
+
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
- return []
+ return postbuilds
# Warn for any unimplemented signing xcode keys.
unimpl = ['OTHER_CODE_SIGN_FLAGS']
@@ -987,12 +1087,41 @@ def _GetIOSPostbuilds(self, configname, output_binary):
print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
', '.join(sorted(unimpl)))
- return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
+ if self._IsXCTest():
+ # For device xctests, Xcode copies two extra frameworks into $TEST_HOST.
+ test_host = os.path.dirname(settings.get('TEST_HOST'));
+ frameworks_dir = os.path.join(test_host, 'Frameworks')
+ platform_root = self._XcodePlatformPath(configname)
+ frameworks = \
+ ['Developer/Library/PrivateFrameworks/IDEBundleInjection.framework',
+ 'Developer/Library/Frameworks/XCTest.framework']
+ for framework in frameworks:
+ source = os.path.join(platform_root, framework)
+ destination = os.path.join(frameworks_dir, os.path.basename(framework))
+ postbuilds.extend(['ditto %s %s' % (source, destination)])
+
+ # Then re-sign everything with 'preserve=True'
+ postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
+ os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
+ settings.get('CODE_SIGN_ENTITLEMENTS', ''),
+ settings.get('PROVISIONING_PROFILE', ''), destination, True)
+ ])
+ plugin_dir = os.path.join(test_host, 'PlugIns')
+ targets = [os.path.join(plugin_dir, product_name), test_host]
+ for target in targets:
+ postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
+ os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
+ settings.get('CODE_SIGN_ENTITLEMENTS', ''),
+ settings.get('PROVISIONING_PROFILE', ''), target, True)
+ ])
+
+ postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
- settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
settings.get('CODE_SIGN_ENTITLEMENTS', ''),
- settings.get('PROVISIONING_PROFILE', ''))
- ]
+ settings.get('PROVISIONING_PROFILE', ''),
+ os.path.join("${BUILT_PRODUCTS_DIR}", product_name), False)
+ ])
+ return postbuilds
def _GetIOSCodeSignIdentityKey(self, settings):
identity = settings.get('CODE_SIGN_IDENTITY')
@@ -1074,25 +1203,37 @@ def GetExtraPlistItems(self, configname=None):
xcode, xcode_build = XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
+ compiler = self.xcode_settings[configname].get('GCC_VERSION')
+ if compiler is not None:
+ cache['DTCompiler'] = compiler
sdk_root = self._SdkRoot(configname)
if not sdk_root:
sdk_root = self._DefaultSdkRoot()
- cache['DTSDKName'] = sdk_root
- if xcode >= '0430':
+ sdk_version = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-version')
+ cache['DTSDKName'] = sdk_root + (sdk_version or '')
+ if xcode >= '0720':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
- sdk_root, 'ProductBuildVersion')
+ sdk_root, '--show-sdk-build-version')
+ elif xcode >= '0430':
+ cache['DTSDKBuild'] = sdk_version
else:
cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
if self.isIOS:
- cache['DTPlatformName'] = cache['DTSDKName']
+ cache['MinimumOSVersion'] = self.xcode_settings[configname].get(
+ 'IPHONEOS_DEPLOYMENT_TARGET')
+ cache['DTPlatformName'] = sdk_root
+ cache['DTPlatformVersion'] = sdk_version
+
if configname.endswith("iphoneos"):
- cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
- sdk_root, 'ProductVersion')
cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
+ cache['DTPlatformBuild'] = cache['DTSDKBuild']
else:
cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
+ # This is weird, but Xcode sets DTPlatformBuild to an empty field
+ # for simulator builds.
+ cache['DTPlatformBuild'] = ""
XcodeSettings._plist_cache[configname] = cache
# Include extra plist items that are per-target, not per global
@@ -1334,7 +1475,10 @@ def IsMacBundle(flavor, spec):
Bundles are directories with a certain subdirectory structure, instead of
just a single file. Bundle rules do not produce a binary but also package
resources into that directory."""
- is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
+ is_mac_bundle = int(spec.get('mac_xctest_bundle', 0)) != 0 or \
+ int(spec.get('mac_xcuitest_bundle', 0)) != 0 or \
+ (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
+
if is_mac_bundle:
assert spec['type'] != 'none', (
'mac_bundle targets cannot have type none (target "%s")' %
@@ -1444,13 +1588,14 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings: An optional dict with more values to add to the
result.
"""
+
if not xcode_settings: return {}
# This function is considered a friend of XcodeSettings, so let it reach into
# its implementation details.
spec = xcode_settings.spec
- # These are filled in on a as-needed basis.
+ # These are filled in on an as-needed basis.
env = {
'BUILT_FRAMEWORKS_DIR' : built_products_dir,
'BUILT_PRODUCTS_DIR' : built_products_dir,
@@ -1463,12 +1608,16 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
+ 'XCODE_VERSION_ACTUAL' : XcodeVersion()[0],
}
if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
+ if xcode_settings.mac_toolchain_dir:
+ env['DEVELOPER_DIR'] = xcode_settings.mac_toolchain_dir
+
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
@@ -1479,10 +1628,27 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
env['MACH_O_TYPE'] = mach_o_type
env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
if xcode_settings._IsBundle():
+ # xcodeproj_file.py sets the same Xcode subfolder value for this as for
+ # FRAMEWORKS_FOLDER_PATH so Xcode builds will actually use FFP's value.
+ env['BUILT_FRAMEWORKS_DIR'] = \
+ os.path.join(built_products_dir + os.sep \
+ + xcode_settings.GetBundleFrameworksFolderPath())
env['CONTENTS_FOLDER_PATH'] = \
- xcode_settings.GetBundleContentsFolderPath()
+ xcode_settings.GetBundleContentsFolderPath()
+ env['EXECUTABLE_FOLDER_PATH'] = \
+ xcode_settings.GetBundleExecutableFolderPath()
env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
xcode_settings.GetBundleResourceFolder()
+ env['JAVA_FOLDER_PATH'] = xcode_settings.GetBundleJavaFolderPath()
+ env['FRAMEWORKS_FOLDER_PATH'] = \
+ xcode_settings.GetBundleFrameworksFolderPath()
+ env['SHARED_FRAMEWORKS_FOLDER_PATH'] = \
+ xcode_settings.GetBundleSharedFrameworksFolderPath()
+ env['SHARED_SUPPORT_FOLDER_PATH'] = \
+ xcode_settings.GetBundleSharedSupportFolderPath()
+ env['PLUGINS_FOLDER_PATH'] = xcode_settings.GetBundlePlugInsFolderPath()
+ env['XPCSERVICES_FOLDER_PATH'] = \
+ xcode_settings.GetBundleXPCServicesFolderPath()
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
@@ -1496,8 +1662,6 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
sdk_root = xcode_settings._SdkRoot(configuration)
if not sdk_root:
sdk_root = xcode_settings._XcodeSdkPath('')
- if sdk_root is None:
- sdk_root = ''
env['SDKROOT'] = sdk_root
if not additional_settings:
@@ -1613,11 +1777,12 @@ def _AddIOSDeviceConfigurations(targets):
for target_dict in targets.itervalues():
toolset = target_dict['toolset']
configs = target_dict['configurations']
- for config_name, config_dict in dict(configs).iteritems():
- iphoneos_config_dict = copy.deepcopy(config_dict)
+ for config_name, simulator_config_dict in dict(configs).iteritems():
+ iphoneos_config_dict = copy.deepcopy(simulator_config_dict)
configs[config_name + '-iphoneos'] = iphoneos_config_dict
- configs[config_name + '-iphonesimulator'] = config_dict
+ configs[config_name + '-iphonesimulator'] = simulator_config_dict
if toolset == 'target':
+ simulator_config_dict['xcode_settings']['SDKROOT'] = 'iphonesimulator'
iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
return targets
diff --git a/gyp/pylib/gyp/xcode_ninja.py b/gyp/pylib/gyp/xcode_ninja.py
index 3820d6bf04..bc76ffff4e 100644
--- a/gyp/pylib/gyp/xcode_ninja.py
+++ b/gyp/pylib/gyp/xcode_ninja.py
@@ -92,11 +92,16 @@ def _TargetFromSpec(old_spec, params):
new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
+ for key in ['BUNDLE_LOADER', 'TEST_HOST']:
+ if key in old_xcode_settings:
+ new_xcode_settings[key] = old_xcode_settings[key]
+
ninja_target['configurations'][config] = {}
ninja_target['configurations'][config]['xcode_settings'] = \
new_xcode_settings
ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
+ ninja_target['mac_xctest_bundle'] = old_spec.get('mac_xctest_bundle', 0)
ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0)
ninja_target['ios_watchkit_extension'] = \
old_spec.get('ios_watchkit_extension', 0)
@@ -138,9 +143,10 @@ def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
if target_extras is not None and re.search(target_extras, target_name):
return True
- # Otherwise just show executable targets.
- if spec.get('type', '') == 'executable' and \
- spec.get('product_extension', '') != 'bundle':
+ # Otherwise just show executable targets and xc_tests.
+ if (int(spec.get('mac_xctest_bundle', 0)) != 0 or
+ (spec.get('type', '') == 'executable' and
+ spec.get('product_extension', '') != 'bundle')):
# If there is a filter and the target does not match, exclude the target.
if executable_target_pattern is not None:
@@ -227,13 +233,26 @@ def CreateWrapper(target_list, target_dicts, data, params):
# Tell Xcode to look everywhere for headers.
sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
+ # Put excluded files into the sources target so they can be opened in Xcode.
+ skip_excluded_files = \
+ not generator_flags.get('xcode_ninja_list_excluded_files', True)
+
sources = []
for target, target_dict in target_dicts.iteritems():
base = os.path.dirname(target)
files = target_dict.get('sources', []) + \
target_dict.get('mac_bundle_resources', [])
+
+ if not skip_excluded_files:
+ files.extend(target_dict.get('sources_excluded', []) +
+ target_dict.get('mac_bundle_resources_excluded', []))
+
for action in target_dict.get('actions', []):
files.extend(action.get('inputs', []))
+
+ if not skip_excluded_files:
+ files.extend(action.get('inputs_excluded', []))
+
# Remove files starting with $. These are mostly intermediate files for the
# build system.
files = [ file for file in files if not file.startswith('$')]
diff --git a/gyp/pylib/gyp/xcodeproj_file.py b/gyp/pylib/gyp/xcodeproj_file.py
index d08b7f7770..e69235f724 100644
--- a/gyp/pylib/gyp/xcodeproj_file.py
+++ b/gyp/pylib/gyp/xcodeproj_file.py
@@ -1945,24 +1945,40 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
'name': [0, str, 0, 0],
})
- # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is
- # "DIR", match group 3 is "path" or None.
- path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$')
-
- # path_tree_to_subfolder maps names of Xcode variables to the associated
- # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object.
- path_tree_to_subfolder = {
- 'BUILT_FRAMEWORKS_DIR': 10, # Frameworks Directory
- 'BUILT_PRODUCTS_DIR': 16, # Products Directory
- # Other types that can be chosen via the Xcode UI.
- # TODO(mark): Map Xcode variable names to these.
- # : 1, # Wrapper
- # : 6, # Executables: 6
- # : 7, # Resources
- # : 15, # Java Resources
- # : 11, # Shared Frameworks
- # : 12, # Shared Support
- # : 13, # PlugIns
+ # path_tree_re matches "$(DIR)/path", "$(DIR)/$(DIR2)/path" or just "$(DIR)".
+ # Match group 1 is "DIR", group 3 is "path" or "$(DIR2") or "$(DIR2)/path"
+ # or None. If group 3 is "path", group 4 will be None otherwise group 4 is
+ # "DIR2" and group 6 is "path".
+ path_tree_re = re.compile(r'^\$\((.*?)\)(/(\$\((.*?)\)(/(.*)|)|(.*)|)|)$')
+
+ # path_tree_{first,second}_to_subfolder map names of Xcode variables to the
+ # associated dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase
+ # object.
+ path_tree_first_to_subfolder = {
+ # Types that can be chosen via the Xcode UI.
+ 'BUILT_PRODUCTS_DIR': 16, # Products Directory
+ 'BUILT_FRAMEWORKS_DIR': 10, # Not an official Xcode macro.
+ # Existed before support for the
+ # names below was added. Maps to
+ # "Frameworks".
+ }
+
+ path_tree_second_to_subfolder = {
+ 'WRAPPER_NAME': 1, # Wrapper
+ # Although Xcode's friendly name is "Executables", the destination
+ # is demonstrably the value of the build setting
+ # EXECUTABLE_FOLDER_PATH not EXECUTABLES_FOLDER_PATH.
+ 'EXECUTABLE_FOLDER_PATH': 6, # Executables.
+ 'UNLOCALIZED_RESOURCES_FOLDER_PATH': 7, # Resources
+ 'JAVA_FOLDER_PATH': 15, # Java Resources
+ 'FRAMEWORKS_FOLDER_PATH': 10, # Frameworks
+ 'SHARED_FRAMEWORKS_FOLDER_PATH': 11, # Shared Frameworks
+ 'SHARED_SUPPORT_FOLDER_PATH': 12, # Shared Support
+ 'PLUGINS_FOLDER_PATH': 13, # PlugIns
+ # For XPC Services, Xcode sets both dstPath and dstSubfolderSpec.
+ # Note that it re-uses the BUILT_PRODUCTS_DIR value for
+ # dstSubfolderSpec. dstPath is set below.
+ 'XPCSERVICES_FOLDER_PATH': 16, # XPC Services.
}
def Name(self):
@@ -1983,14 +1999,61 @@ def SetDestination(self, path):
path_tree_match = self.path_tree_re.search(path)
if path_tree_match:
- # Everything else needs to be relative to an Xcode variable.
- path_tree = path_tree_match.group(1)
- relative_path = path_tree_match.group(3)
-
- if path_tree in self.path_tree_to_subfolder:
- subfolder = self.path_tree_to_subfolder[path_tree]
+ path_tree = path_tree_match.group(1);
+ if path_tree in self.path_tree_first_to_subfolder:
+ subfolder = self.path_tree_first_to_subfolder[path_tree]
+ relative_path = path_tree_match.group(3)
if relative_path is None:
relative_path = ''
+
+ if subfolder == 16 and path_tree_match.group(4) is not None:
+ # BUILT_PRODUCTS_DIR (16) is the first element in a path whose
+ # second element is possibly one of the variable names in
+ # path_tree_second_to_subfolder. Xcode sets the values of all these
+ # variables to relative paths so .gyp files must prefix them with
+ # BUILT_PRODUCTS_DIR, e.g.
+ # $(BUILT_PRODUCTS_DIR)/$(PLUGINS_FOLDER_PATH). Then
+ # xcode_emulation.py can export these variables with the same values
+ # as Xcode yet make & ninja files can determine the absolute path
+ # to the target. Xcode uses the dstSubfolderSpec value set here
+ # to determine the full path.
+ #
+ # An alternative of xcode_emulation.py setting the values to absolute
+ # paths when exporting these variables has been ruled out because
+ # then the values would be different depending on the build tool.
+ #
+ # Another alternative is to invent new names for the variables used
+ # to match to the subfolder indices in the second table. .gyp files
+ # then will not need to prepend $(BUILT_PRODUCTS_DIR) because
+ # xcode_emulation.py can set the values of those variables to
+ # the absolute paths when exporting. This is possibly the thinking
+ # behind BUILT_FRAMEWORKS_DIR which is used in exactly this manner.
+ #
+ # Requiring prepending BUILT_PRODUCTS_DIR has been chosen because
+ # this same way could be used to specify destinations in .gyp files
+ # that pre-date this addition to GYP. However they would only work
+ # with the Xcode generator. The previous version of xcode_emulation.py
+ # does not export these variables. Such files will get the benefit
+ # of the Xcode UI showing the proper destination name simply by
+ # regenerating the projects with this version of GYP.
+ path_tree = path_tree_match.group(4)
+ relative_path = path_tree_match.group(6)
+ separator = '/'
+
+ if path_tree in self.path_tree_second_to_subfolder:
+ subfolder = self.path_tree_second_to_subfolder[path_tree]
+ if relative_path is None:
+ relative_path = ''
+ separator = ''
+ if path_tree == 'XPCSERVICES_FOLDER_PATH':
+ relative_path = '$(CONTENTS_FOLDER_PATH)/XPCServices' \
+ + separator + relative_path
+ else:
+ # subfolder = 16 from above
+ # The second element of the path is an unrecognized variable.
+ # Include it and any remaining elements in relative_path.
+ relative_path = path_tree_match.group(3);
+
else:
# The path starts with an unrecognized Xcode variable
# name like $(SRCROOT). Xcode will still handle this
@@ -2261,6 +2324,8 @@ class PBXNativeTarget(XCTarget):
'', ''],
'com.apple.product-type.bundle.unit-test': ['wrapper.cfbundle',
'', '.xctest'],
+ 'com.apple.product-type.bundle.ui-testing': ['wrapper.cfbundle',
+ '', '.xctest'],
'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib',
'', '.so'],
'com.apple.product-type.kernel-extension': ['wrapper.kext',
@@ -2317,7 +2382,9 @@ def __init__(self, properties=None, id=None, parent=None,
force_extension = suffix[1:]
if self._properties['productType'] == \
- 'com.apple.product-type-bundle.unit.test':
+ 'com.apple.product-type-bundle.unit.test' or \
+ self._properties['productType'] == \
+ 'com.apple.product-type-bundle.ui-testing':
if force_extension is None:
force_extension = suffix[1:]
diff --git a/gyp/tools/pretty_gyp.py b/gyp/tools/pretty_gyp.py
index c51d35872c..d5736bbd4a 100755
--- a/gyp/tools/pretty_gyp.py
+++ b/gyp/tools/pretty_gyp.py
@@ -118,24 +118,23 @@ def prettyprint_input(lines):
basic_offset = 2
last_line = ""
for line in lines:
- if COMMENT_RE.match(line):
- print line
- else:
- line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
- if len(line) > 0:
+ line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
+ if len(line) > 0:
+ brace_diff = 0
+ if not COMMENT_RE.match(line):
(brace_diff, after) = count_braces(line)
- if brace_diff != 0:
- if after:
- print " " * (basic_offset * indent) + line
- indent += brace_diff
- else:
- indent += brace_diff
- print " " * (basic_offset * indent) + line
+ if brace_diff != 0:
+ if after:
+ print " " * (basic_offset * indent) + line
+ indent += brace_diff
else:
+ indent += brace_diff
print " " * (basic_offset * indent) + line
else:
- print ""
- last_line = line
+ print " " * (basic_offset * indent) + line
+ else:
+ print ""
+ last_line = line
def main():
diff --git a/lib/configure.js b/lib/configure.js
index 1351576d12..19374b7275 100644
--- a/lib/configure.js
+++ b/lib/configure.js
@@ -72,8 +72,10 @@ function configure (gyp, argv, callback) {
return callback(new Error('Invalid version number: ' + release.version))
}
- // ensure that the target node version's dev files are installed
- gyp.opts.ensure = true
+ // If the tarball option is set, always remove and reinstall the headers
+ // into devdir. Otherwise only install if they're not already there.
+ gyp.opts.ensure = gyp.opts.tarball ? false : true
+
gyp.commands.install([ release.version ], function (err, version) {
if (err) return callback(err)
log.verbose('get node dir', 'target node version installed:', release.versionDir)
diff --git a/lib/install.js b/lib/install.js
index fa2e1c5430..4176a09578 100644
--- a/lib/install.js
+++ b/lib/install.js
@@ -15,10 +15,8 @@ var fs = require('graceful-fs')
, rm = require('rimraf')
, path = require('path')
, crypto = require('crypto')
- , zlib = require('zlib')
, log = require('npmlog')
, semver = require('semver')
- , fstream = require('fstream')
, request = require('request')
, minimatch = require('minimatch')
, mkdir = require('mkdirp')
@@ -144,41 +142,33 @@ function install (gyp, argv, callback) {
var tarPath = gyp.opts.tarball
var badDownload = false
, extractCount = 0
- , gunzip = zlib.createGunzip()
- , extracter = tar.Extract({ path: devDir, strip: 1, filter: isValid })
var contentShasums = {}
var expectShasums = {}
// checks if a file to be extracted from the tarball is valid.
// only .h header files and the gyp files get extracted
- function isValid () {
- var name = this.path.substring(devDir.length + 1)
- var isValid = valid(name)
- if (name === '' && this.type === 'Directory') {
- // the first directory entry is ok
- return true
- }
+ function isValid (path, entry) {
+ var isValid = valid(path)
if (isValid) {
- log.verbose('extracted file from tarball', name)
+ log.verbose('extracted file from tarball', path)
extractCount++
} else {
// invalid
- log.silly('ignoring from tarball', name)
+ log.silly('ignoring from tarball', path)
}
return isValid
}
- gunzip.on('error', cb)
- extracter.on('error', cb)
- extracter.on('end', afterTarball)
-
- // download the tarball, gunzip and extract!
+ // download the tarball and extract!
if (tarPath) {
- var input = fs.createReadStream(tarPath)
- input.pipe(gunzip).pipe(extracter)
- return
+ return tar.extract({
+ file: tarPath,
+ strip: 1,
+ filter: isValid,
+ cwd: devDir
+ }).then(afterTarball, cb)
}
try {
@@ -218,7 +208,11 @@ function install (gyp, argv, callback) {
})
// start unzipping and untaring
- req.pipe(gunzip).pipe(extracter)
+ res.pipe(tar.extract({
+ strip: 1,
+ cwd: devDir,
+ filter: isValid
+ }).on('close', afterTarball).on('error', cb))
})
// invoked after the tarball has finished being extracted
diff --git a/package.json b/package.json
index a66a546889..29bee3d615 100644
--- a/package.json
+++ b/package.json
@@ -11,7 +11,7 @@
"bindings",
"gyp"
],
- "version": "3.6.2",
+ "version": "4.0.0",
"installVersion": 9,
"author": "Nathan Rajlich (http://tootallnate.net)",
"repository": {
@@ -22,7 +22,6 @@
"bin": "./bin/node-gyp.js",
"main": "./lib/node-gyp.js",
"dependencies": {
- "fstream": "^1.0.0",
"glob": "^7.0.3",
"graceful-fs": "^4.1.2",
"minimatch": "^3.0.2",
@@ -33,11 +32,11 @@
"request": "2",
"rimraf": "2",
"semver": "~5.3.0",
- "tar": "^2.0.0",
+ "tar": "^3.1.3",
"which": "1"
},
"engines": {
- "node": ">= 0.8.0"
+ "node": ">= 4.0.0"
},
"devDependencies": {
"tape": "~4.2.0",
diff --git a/test/fixtures/test-charmap.py b/test/fixtures/test-charmap.py
new file mode 100644
index 0000000000..43e0c5ffc8
--- /dev/null
+++ b/test/fixtures/test-charmap.py
@@ -0,0 +1,19 @@
+import sys
+import locale
+
+reload(sys)
+
+def main():
+ encoding = locale.getdefaultlocale()[1]
+ sys.setdefaultencoding(encoding)
+ textmap = {
+ 'cp936': u'\u4e2d\u6587',
+ 'cp1252': u'Lat\u012Bna',
+ 'cp932': u'\u306b\u307b\u3093\u3054'
+ }
+ if textmap.has_key(encoding):
+ print textmap[encoding]
+ return True
+
+if __name__ == '__main__':
+ print main()
diff --git a/test/test-addon.js b/test/test-addon.js
index c2a71f4498..7ace1caf6a 100644
--- a/test/test-addon.js
+++ b/test/test-addon.js
@@ -1,10 +1,35 @@
'use strict'
var test = require('tape')
-var execFile = require('child_process').execFile
var path = require('path')
+var fs = require('graceful-fs')
+var child_process = require('child_process')
var addonPath = path.resolve(__dirname, 'node_modules', 'hello_world')
var nodeGyp = path.resolve(__dirname, '..', 'bin', 'node-gyp.js')
+var execFileSync = child_process.execFileSync
+var execFile = child_process.execFile
+
+function runHello() {
+ var testCode = "console.log(require('hello_world').hello())"
+ return execFileSync('node', ['-e', testCode], { cwd: __dirname }).toString()
+}
+
+function getEncoding() {
+ var code = 'import locale;print locale.getdefaultlocale()[1]'
+ return execFileSync('python', ['-c', code]).toString().trim()
+}
+
+function checkCharmapValid() {
+ var data
+ try {
+ data = execFileSync('python', ['fixtures/test-charmap.py'],
+ { cwd: __dirname })
+ } catch (err) {
+ return false
+ }
+ var lines = data.toString().trim().split('\n')
+ return lines.pop() === 'True'
+}
test('build simple addon', function (t) {
t.plan(3)
@@ -16,12 +41,72 @@ test('build simple addon', function (t) {
var lastLine = logLines[logLines.length-1]
t.strictEqual(err, null)
t.strictEqual(lastLine, 'gyp info ok', 'should end in ok')
+ t.strictEqual(runHello().trim(), 'world')
+ })
+ proc.stdout.setEncoding('utf-8')
+ proc.stderr.setEncoding('utf-8')
+})
+
+test('build simple addon in path with non-ascii characters', function (t) {
+ t.plan(1)
+
+ if (!checkCharmapValid()) {
+ return t.skip('python console app can\'t encode non-ascii character.')
+ }
+
+ var testDirNames = {
+ 'cp936': '文件夹',
+ 'cp1252': 'Latīna',
+ 'cp932': 'フォルダ'
+ }
+ // Select non-ascii characters by current encoding
+ var testDirName = testDirNames[getEncoding()]
+ // If encoding is UTF-8 or other then no need to test
+ if (!testDirName) {
+ return t.skip('no need to test')
+ }
+
+ t.plan(3)
+
+ var data, configPath = path.join(addonPath, 'build', 'config.gypi')
+ try {
+ data = fs.readFileSync(configPath, 'utf8')
+ } catch (err) {
+ t.error(err)
+ return
+ }
+ var config = JSON.parse(data.replace(/\#.+\n/, ''))
+ var nodeDir = config.variables.nodedir
+ var testNodeDir = path.join(addonPath, testDirName)
+ // Create symbol link to path with non-ascii characters
+ try {
+ fs.symlinkSync(nodeDir, testNodeDir, 'dir')
+ } catch (err) {
+ switch (err.code) {
+ case 'EEXIST': break
+ case 'EPERM':
+ t.error(err, 'Please try to running console as an administrator')
+ return
+ default:
+ t.error(err)
+ return
+ }
+ }
+
+ var cmd = [nodeGyp, 'rebuild', '-C', addonPath,
+ '--loglevel=verbose', '-nodedir=' + testNodeDir]
+ var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) {
try {
- var binding = require('hello_world')
- t.strictEqual(binding.hello(), 'world')
- } catch (error) {
- t.error(error, 'load module')
+ fs.unlink(testNodeDir)
+ } catch (err) {
+ t.error(err)
}
+
+ var logLines = stderr.toString().trim().split(/\r?\n/)
+ var lastLine = logLines[logLines.length-1]
+ t.strictEqual(err, null)
+ t.strictEqual(lastLine, 'gyp info ok', 'should end in ok')
+ t.strictEqual(runHello().trim(), 'world')
})
proc.stdout.setEncoding('utf-8')
proc.stderr.setEncoding('utf-8')