Skip to content

Commit 8632af3

Browse files
committed
tools: update gyp to r1601
Among other things, this should make it easier for people to build node.js on openbsd.
1 parent 329b538 commit 8632af3

23 files changed

+722
-500
lines changed

tools/gyp/AUTHORS

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,5 +2,7 @@
22
# Name or Organization <email address>
33

44
Google Inc.
5+
Bloomberg Finance L.P.
6+
57
Steven Knight <[email protected]>
68
Ryan Norton <[email protected]>

tools/gyp/PRESUBMIT.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -75,13 +75,20 @@ def CheckChangeOnUpload(input_api, output_api):
7575

7676
def CheckChangeOnCommit(input_api, output_api):
7777
report = []
78+
79+
# Accept any year number from 2009 to the current year.
80+
current_year = int(input_api.time.strftime('%Y'))
81+
allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
82+
years_re = '(' + '|'.join(allowed_years) + ')'
83+
84+
# The (c) is deprecated, but tolerate it until it's removed from all files.
7885
license = (
79-
r'.*? Copyright \(c\) %(year)s Google Inc\. All rights reserved\.\n'
86+
r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
8087
r'.*? Use of this source code is governed by a BSD-style license that '
8188
r'can be\n'
8289
r'.*? found in the LICENSE file\.\n'
8390
) % {
84-
'year': input_api.time.strftime('%Y'),
91+
'year': years_re,
8592
}
8693

8794
report.extend(input_api.canned_checks.PanProjectChecks(
@@ -106,4 +113,4 @@ def CheckChangeOnCommit(input_api, output_api):
106113

107114

108115
def GetPreferredTrySlaves():
109-
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac']
116+
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac', 'gyp-android']

tools/gyp/buildbot/buildbot_run.py

Lines changed: 0 additions & 98 deletions
This file was deleted.

tools/gyp/data/win/large-pdb-shim.cc

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
// Copyright (c) 2013 Google Inc. All rights reserved.
2+
// Use of this source code is governed by a BSD-style license that can be
3+
// found in the LICENSE file.
4+
5+
// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
6+
// then used during the final link for modules that have large PDBs. Otherwise,
7+
// the linker will generate a pdb with a page size of 1KB, which imposes a limit
8+
// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
9+
// (rather than the linker), this limit is avoided. With this in place PDBs may
10+
// grow to 2GB.
11+
//
12+
// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.

tools/gyp/gyptest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -212,6 +212,7 @@ def main(argv=None):
212212
format_list = {
213213
'freebsd7': ['make'],
214214
'freebsd8': ['make'],
215+
'openbsd5': ['make'],
215216
'cygwin': ['msvs'],
216217
'win32': ['msvs', 'ninja'],
217218
'linux2': ['make', 'ninja'],

tools/gyp/pylib/gyp/MSVSUtil.py

Lines changed: 212 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,212 @@
1+
# Copyright (c) 2013 Google Inc. All rights reserved.
2+
# Use of this source code is governed by a BSD-style license that can be
3+
# found in the LICENSE file.
4+
5+
"""Utility functions shared amongst the Windows generators."""
6+
7+
import copy
8+
import os
9+
10+
11+
_TARGET_TYPE_EXT = {
12+
'executable': '.exe',
13+
'shared_library': '.dll'
14+
}
15+
16+
17+
def _GetLargePdbShimCcPath():
18+
"""Returns the path of the large_pdb_shim.cc file."""
19+
this_dir = os.path.abspath(os.path.dirname(__file__))
20+
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
21+
win_data_dir = os.path.join(src_dir, 'data', 'win')
22+
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
23+
return large_pdb_shim_cc
24+
25+
26+
def _DeepCopySomeKeys(in_dict, keys):
27+
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
28+
29+
Arguments:
30+
in_dict: The dictionary to copy.
31+
keys: The keys to be copied. If a key is in this list and doesn't exist in
32+
|in_dict| this is not an error.
33+
Returns:
34+
The partially deep-copied dictionary.
35+
"""
36+
d = {}
37+
for key in keys:
38+
if key not in in_dict:
39+
continue
40+
d[key] = copy.deepcopy(in_dict[key])
41+
return d
42+
43+
44+
def _SuffixName(name, suffix):
45+
"""Add a suffix to the end of a target.
46+
47+
Arguments:
48+
name: name of the target (foo#target)
49+
suffix: the suffix to be added
50+
Returns:
51+
Target name with suffix added (foo_suffix#target)
52+
"""
53+
parts = name.rsplit('#', 1)
54+
parts[0] = '%s_%s' % (parts[0], suffix)
55+
return '#'.join(parts)
56+
57+
58+
def _ShardName(name, number):
59+
"""Add a shard number to the end of a target.
60+
61+
Arguments:
62+
name: name of the target (foo#target)
63+
number: shard number
64+
Returns:
65+
Target name with shard added (foo_1#target)
66+
"""
67+
return _SuffixName(name, str(number))
68+
69+
70+
def ShardTargets(target_list, target_dicts):
71+
"""Shard some targets apart to work around the linkers limits.
72+
73+
Arguments:
74+
target_list: List of target pairs: 'base/base.gyp:base'.
75+
target_dicts: Dict of target properties keyed on target pair.
76+
Returns:
77+
Tuple of the new sharded versions of the inputs.
78+
"""
79+
# Gather the targets to shard, and how many pieces.
80+
targets_to_shard = {}
81+
for t in target_dicts:
82+
shards = int(target_dicts[t].get('msvs_shard', 0))
83+
if shards:
84+
targets_to_shard[t] = shards
85+
# Shard target_list.
86+
new_target_list = []
87+
for t in target_list:
88+
if t in targets_to_shard:
89+
for i in range(targets_to_shard[t]):
90+
new_target_list.append(_ShardName(t, i))
91+
else:
92+
new_target_list.append(t)
93+
# Shard target_dict.
94+
new_target_dicts = {}
95+
for t in target_dicts:
96+
if t in targets_to_shard:
97+
for i in range(targets_to_shard[t]):
98+
name = _ShardName(t, i)
99+
new_target_dicts[name] = copy.copy(target_dicts[t])
100+
new_target_dicts[name]['target_name'] = _ShardName(
101+
new_target_dicts[name]['target_name'], i)
102+
sources = new_target_dicts[name].get('sources', [])
103+
new_sources = []
104+
for pos in range(i, len(sources), targets_to_shard[t]):
105+
new_sources.append(sources[pos])
106+
new_target_dicts[name]['sources'] = new_sources
107+
else:
108+
new_target_dicts[t] = target_dicts[t]
109+
# Shard dependencies.
110+
for t in new_target_dicts:
111+
dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
112+
new_dependencies = []
113+
for d in dependencies:
114+
if d in targets_to_shard:
115+
for i in range(targets_to_shard[d]):
116+
new_dependencies.append(_ShardName(d, i))
117+
else:
118+
new_dependencies.append(d)
119+
new_target_dicts[t]['dependencies'] = new_dependencies
120+
121+
return (new_target_list, new_target_dicts)
122+
123+
124+
def InsertLargePdbShims(target_list, target_dicts, vars):
125+
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
126+
127+
This is a workaround for targets with PDBs greater than 1GB in size, the
128+
limit for the 1KB pagesize PDBs created by the linker by default.
129+
130+
Arguments:
131+
target_list: List of target pairs: 'base/base.gyp:base'.
132+
target_dicts: Dict of target properties keyed on target pair.
133+
vars: A dictionary of common GYP variables with generator-specific values.
134+
Returns:
135+
Tuple of the shimmed version of the inputs.
136+
"""
137+
# Determine which targets need shimming.
138+
targets_to_shim = []
139+
for t in target_dicts:
140+
target_dict = target_dicts[t]
141+
# We only want to shim targets that have msvs_large_pdb enabled.
142+
if not int(target_dict.get('msvs_large_pdb', 0)):
143+
continue
144+
# This is intended for executable, shared_library and loadable_module
145+
# targets where every configuration is set up to produce a PDB output.
146+
# If any of these conditions is not true then the shim logic will fail
147+
# below.
148+
targets_to_shim.append(t)
149+
150+
large_pdb_shim_cc = _GetLargePdbShimCcPath()
151+
152+
for t in targets_to_shim:
153+
target_dict = target_dicts[t]
154+
target_name = target_dict.get('target_name')
155+
156+
base_dict = _DeepCopySomeKeys(target_dict,
157+
['configurations', 'default_configuration', 'toolset'])
158+
159+
# This is the dict for copying the source file (part of the GYP tree)
160+
# to the intermediate directory of the project. This is necessary because
161+
# we can't always build a relative path to the shim source file (on Windows
162+
# GYP and the project may be on different drives), and Ninja hates absolute
163+
# paths (it ends up generating the .obj and .obj.d alongside the source
164+
# file, polluting GYPs tree).
165+
copy_suffix = '_large_pdb_copy'
166+
copy_target_name = target_name + '_' + copy_suffix
167+
full_copy_target_name = _SuffixName(t, copy_suffix)
168+
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
169+
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
170+
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
171+
copy_dict = copy.deepcopy(base_dict)
172+
copy_dict['target_name'] = copy_target_name
173+
copy_dict['type'] = 'none'
174+
copy_dict['sources'] = [ large_pdb_shim_cc ]
175+
copy_dict['copies'] = [{
176+
'destination': shim_cc_dir,
177+
'files': [ large_pdb_shim_cc ]
178+
}]
179+
180+
# This is the dict for the PDB generating shim target. It depends on the
181+
# copy target.
182+
shim_suffix = '_large_pdb_shim'
183+
shim_target_name = target_name + '_' + shim_suffix
184+
full_shim_target_name = _SuffixName(t, shim_suffix)
185+
shim_dict = copy.deepcopy(base_dict)
186+
shim_dict['target_name'] = shim_target_name
187+
shim_dict['type'] = 'static_library'
188+
shim_dict['sources'] = [ shim_cc_path ]
189+
shim_dict['dependencies'] = [ full_copy_target_name ]
190+
191+
# Set up the shim to output its PDB to the same location as the final linker
192+
# target.
193+
for config in shim_dict.get('configurations').itervalues():
194+
msvs = config.setdefault('msvs_settings')
195+
196+
linker = msvs.pop('VCLinkerTool') # We want to clear this dict.
197+
pdb_path = linker.get('ProgramDatabaseFile')
198+
199+
compiler = msvs.setdefault('VCCLCompilerTool', {})
200+
compiler.setdefault('DebugInformationFormat', '3')
201+
compiler.setdefault('ProgramDataBaseFileName', pdb_path)
202+
203+
# Add the new targets.
204+
target_list.append(full_copy_target_name)
205+
target_list.append(full_shim_target_name)
206+
target_dicts[full_copy_target_name] = copy_dict
207+
target_dicts[full_shim_target_name] = shim_dict
208+
209+
# Update the original target to depend on the shim target.
210+
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
211+
212+
return (target_list, target_dicts)

tools/gyp/pylib/gyp/MSVSVersion.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright (c) 2012 Google Inc. All rights reserved.
1+
# Copyright (c) 2013 Google Inc. All rights reserved.
22
# Use of this source code is governed by a BSD-style license that can be
33
# found in the LICENSE file.
44

@@ -355,6 +355,13 @@ def SelectVisualStudioVersion(version='auto'):
355355
'2012': ('11.0',),
356356
'2012e': ('11.0',),
357357
}
358+
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
359+
if override_path:
360+
msvs_version = os.environ.get('GYP_MSVS_VERSION')
361+
if not msvs_version or 'e' not in msvs_version:
362+
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
363+
'set to an "e" version (e.g. 2010e)')
364+
return _CreateVersion(msvs_version, override_path, sdk_based=True)
358365
version = str(version)
359366
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
360367
if not versions:

0 commit comments

Comments
 (0)