# Copyright (c) 2013 Google Inc. All rights reserved.
|
# Use of this source code is governed by a BSD-style license that can be
|
# found in the LICENSE file.
|
|
|
import collections
|
import copy
|
import hashlib
|
import json
|
import multiprocessing
|
import os.path
|
import re
|
import signal
|
import subprocess
|
import sys
|
import gyp
|
import gyp.common
|
import gyp.msvs_emulation
|
import gyp.MSVSUtil as MSVSUtil
|
import gyp.xcode_emulation
|
|
from io import StringIO
|
|
from gyp.common import GetEnvironFallback
|
import gyp.ninja_syntax as ninja_syntax
|
|
generator_default_variables = {
|
"EXECUTABLE_PREFIX": "",
|
"EXECUTABLE_SUFFIX": "",
|
"STATIC_LIB_PREFIX": "lib",
|
"STATIC_LIB_SUFFIX": ".a",
|
"SHARED_LIB_PREFIX": "lib",
|
# Gyp expects the following variables to be expandable by the build
|
# system to the appropriate locations. Ninja prefers paths to be
|
# known at gyp time. To resolve this, introduce special
|
# variables starting with $! and $| (which begin with a $ so gyp knows it
|
# should be treated specially, but is otherwise an invalid
|
# ninja/shell variable) that are passed to gyp here but expanded
|
# before writing out into the target .ninja files; see
|
# ExpandSpecial.
|
# $! is used for variables that represent a path and that can only appear at
|
# the start of a string, while $| is used for variables that can appear
|
# anywhere in a string.
|
"INTERMEDIATE_DIR": "$!INTERMEDIATE_DIR",
|
"SHARED_INTERMEDIATE_DIR": "$!PRODUCT_DIR/gen",
|
"PRODUCT_DIR": "$!PRODUCT_DIR",
|
"CONFIGURATION_NAME": "$|CONFIGURATION_NAME",
|
# Special variables that may be used by gyp 'rule' targets.
|
# We generate definitions for these variables on the fly when processing a
|
# rule.
|
"RULE_INPUT_ROOT": "${root}",
|
"RULE_INPUT_DIRNAME": "${dirname}",
|
"RULE_INPUT_PATH": "${source}",
|
"RULE_INPUT_EXT": "${ext}",
|
"RULE_INPUT_NAME": "${name}",
|
}
|
|
# Placates pylint.
|
generator_additional_non_configuration_keys = []
|
generator_additional_path_sections = []
|
generator_extra_sources_for_rules = []
|
generator_filelist_paths = None
|
|
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
|
|
|
def StripPrefix(arg, prefix):
|
if arg.startswith(prefix):
|
return arg[len(prefix) :]
|
return arg
|
|
|
def QuoteShellArgument(arg, flavor):
|
"""Quote a string such that it will be interpreted as a single argument
|
by the shell."""
|
# Rather than attempting to enumerate the bad shell characters, just
|
# allow common OK ones and quote anything else.
|
if re.match(r"^[a-zA-Z0-9_=.\\/-]+$", arg):
|
return arg # No quoting necessary.
|
if flavor == "win":
|
return gyp.msvs_emulation.QuoteForRspFile(arg)
|
return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
|
|
|
def Define(d, flavor):
|
"""Takes a preprocessor define and returns a -D parameter that's ninja- and
|
shell-escaped."""
|
if flavor == "win":
|
# cl.exe replaces literal # characters with = in preprocessor definitions for
|
# some reason. Octal-encode to work around that.
|
d = d.replace("#", "\\%03o" % ord("#"))
|
return QuoteShellArgument(ninja_syntax.escape("-D" + d), flavor)
|
|
|
def AddArch(output, arch):
|
"""Adds an arch string to an output path."""
|
output, extension = os.path.splitext(output)
|
return f"{output}.{arch}{extension}"
|
|
|
class Target:
|
"""Target represents the paths used within a single gyp target.
|
|
Conceptually, building a single target A is a series of steps:
|
|
1) actions/rules/copies generates source/resources/etc.
|
2) compiles generates .o files
|
3) link generates a binary (library/executable)
|
4) bundle merges the above in a mac bundle
|
|
(Any of these steps can be optional.)
|
|
From a build ordering perspective, a dependent target B could just
|
depend on the last output of this series of steps.
|
|
But some dependent commands sometimes need to reach inside the box.
|
For example, when linking B it needs to get the path to the static
|
library generated by A.
|
|
This object stores those paths. To keep things simple, member
|
variables only store concrete paths to single files, while methods
|
compute derived values like "the last output of the target".
|
"""
|
|
def __init__(self, type):
|
# Gyp type ("static_library", etc.) of this target.
|
self.type = type
|
# File representing whether any input dependencies necessary for
|
# dependent actions have completed.
|
self.preaction_stamp = None
|
# File representing whether any input dependencies necessary for
|
# dependent compiles have completed.
|
self.precompile_stamp = None
|
# File representing the completion of actions/rules/copies, if any.
|
self.actions_stamp = None
|
# Path to the output of the link step, if any.
|
self.binary = None
|
# Path to the file representing the completion of building the bundle,
|
# if any.
|
self.bundle = None
|
# On Windows, incremental linking requires linking against all the .objs
|
# that compose a .lib (rather than the .lib itself). That list is stored
|
# here. In this case, we also need to save the compile_deps for the target,
|
# so that the target that directly depends on the .objs can also depend
|
# on those.
|
self.component_objs = None
|
self.compile_deps = None
|
# Windows only. The import .lib is the output of a build step, but
|
# because dependents only link against the lib (not both the lib and the
|
# dll) we keep track of the import library here.
|
self.import_lib = None
|
# Track if this target contains any C++ files, to decide if gcc or g++
|
# should be used for linking.
|
self.uses_cpp = False
|
|
def Linkable(self):
|
"""Return true if this is a target that can be linked against."""
|
return self.type in ("static_library", "shared_library")
|
|
def UsesToc(self, flavor):
|
"""Return true if the target should produce a restat rule based on a TOC
|
file."""
|
# For bundles, the .TOC should be produced for the binary, not for
|
# FinalOutput(). But the naive approach would put the TOC file into the
|
# bundle, so don't do this for bundles for now.
|
if flavor == "win" or self.bundle:
|
return False
|
return self.type in ("shared_library", "loadable_module")
|
|
def PreActionInput(self, flavor):
|
"""Return the path, if any, that should be used as a dependency of
|
any dependent action step."""
|
if self.UsesToc(flavor):
|
return self.FinalOutput() + ".TOC"
|
return self.FinalOutput() or self.preaction_stamp
|
|
def PreCompileInput(self):
|
"""Return the path, if any, that should be used as a dependency of
|
any dependent compile step."""
|
return self.actions_stamp or self.precompile_stamp
|
|
def FinalOutput(self):
|
"""Return the last output of the target, which depends on all prior
|
steps."""
|
return self.bundle or self.binary or self.actions_stamp
|
|
|
# A small discourse on paths as used within the Ninja build:
|
# All files we produce (both at gyp and at build time) appear in the
|
# build directory (e.g. out/Debug).
|
#
|
# Paths within a given .gyp file are always relative to the directory
|
# containing the .gyp file. Call these "gyp paths". This includes
|
# sources as well as the starting directory a given gyp rule/action
|
# expects to be run from. We call the path from the source root to
|
# the gyp file the "base directory" within the per-.gyp-file
|
# NinjaWriter code.
|
#
|
# All paths as written into the .ninja files are relative to the build
|
# directory. Call these paths "ninja paths".
|
#
|
# We translate between these two notions of paths with two helper
|
# functions:
|
#
|
# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
|
# into the equivalent ninja path.
|
#
|
# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
|
# an output file; the result can be namespaced such that it is unique
|
# to the input file name as well as the output target name.
|
|
|
class NinjaWriter:
|
def __init__(
|
self,
|
hash_for_rules,
|
target_outputs,
|
base_dir,
|
build_dir,
|
output_file,
|
toplevel_build,
|
output_file_name,
|
flavor,
|
toplevel_dir=None,
|
):
|
"""
|
base_dir: path from source root to directory containing this gyp file,
|
by gyp semantics, all input paths are relative to this
|
build_dir: path from source root to build output
|
toplevel_dir: path to the toplevel directory
|
"""
|
|
self.hash_for_rules = hash_for_rules
|
self.target_outputs = target_outputs
|
self.base_dir = base_dir
|
self.build_dir = build_dir
|
self.ninja = ninja_syntax.Writer(output_file)
|
self.toplevel_build = toplevel_build
|
self.output_file_name = output_file_name
|
|
self.flavor = flavor
|
self.abs_build_dir = None
|
if toplevel_dir is not None:
|
self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir))
|
self.obj_ext = ".obj" if flavor == "win" else ".o"
|
if flavor == "win":
|
# See docstring of msvs_emulation.GenerateEnvironmentFiles().
|
self.win_env = {}
|
for arch in ("x86", "x64"):
|
self.win_env[arch] = "environment." + arch
|
|
# Relative path from build output dir to base dir.
|
build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
|
self.build_to_base = os.path.join(build_to_top, base_dir)
|
# Relative path from base dir to build dir.
|
base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
|
self.base_to_build = os.path.join(base_to_top, build_dir)
|
|
def ExpandSpecial(self, path, product_dir=None):
|
"""Expand specials like $!PRODUCT_DIR in |path|.
|
|
If |product_dir| is None, assumes the cwd is already the product
|
dir. Otherwise, |product_dir| is the relative path to the product
|
dir.
|
"""
|
|
PRODUCT_DIR = "$!PRODUCT_DIR"
|
if PRODUCT_DIR in path:
|
if product_dir:
|
path = path.replace(PRODUCT_DIR, product_dir)
|
else:
|
path = path.replace(PRODUCT_DIR + "/", "")
|
path = path.replace(PRODUCT_DIR + "\\", "")
|
path = path.replace(PRODUCT_DIR, ".")
|
|
INTERMEDIATE_DIR = "$!INTERMEDIATE_DIR"
|
if INTERMEDIATE_DIR in path:
|
int_dir = self.GypPathToUniqueOutput("gen")
|
# GypPathToUniqueOutput generates a path relative to the product dir,
|
# so insert product_dir in front if it is provided.
|
path = path.replace(
|
INTERMEDIATE_DIR, os.path.join(product_dir or "", int_dir)
|
)
|
|
CONFIGURATION_NAME = "$|CONFIGURATION_NAME"
|
path = path.replace(CONFIGURATION_NAME, self.config_name)
|
|
return path
|
|
def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
|
if self.flavor == "win":
|
path = self.msvs_settings.ConvertVSMacros(path, config=self.config_name)
|
path = path.replace(generator_default_variables["RULE_INPUT_ROOT"], root)
|
path = path.replace(generator_default_variables["RULE_INPUT_DIRNAME"], dirname)
|
path = path.replace(generator_default_variables["RULE_INPUT_PATH"], source)
|
path = path.replace(generator_default_variables["RULE_INPUT_EXT"], ext)
|
path = path.replace(generator_default_variables["RULE_INPUT_NAME"], name)
|
return path
|
|
def GypPathToNinja(self, path, env=None):
|
"""Translate a gyp path to a ninja path, optionally expanding environment
|
variable references in |path| with |env|.
|
|
See the above discourse on path conversions."""
|
if env:
|
if self.flavor == "mac":
|
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
|
elif self.flavor == "win":
|
path = gyp.msvs_emulation.ExpandMacros(path, env)
|
if path.startswith("$!"):
|
expanded = self.ExpandSpecial(path)
|
if self.flavor == "win":
|
expanded = os.path.normpath(expanded)
|
return expanded
|
if "$|" in path:
|
path = self.ExpandSpecial(path)
|
assert "$" not in path, path
|
return os.path.normpath(os.path.join(self.build_to_base, path))
|
|
def GypPathToUniqueOutput(self, path, qualified=True):
|
"""Translate a gyp path to a ninja path for writing output.
|
|
If qualified is True, qualify the resulting filename with the name
|
of the target. This is necessary when e.g. compiling the same
|
path twice for two separate output targets.
|
|
See the above discourse on path conversions."""
|
|
path = self.ExpandSpecial(path)
|
assert not path.startswith("$"), path
|
|
# Translate the path following this scheme:
|
# Input: foo/bar.gyp, target targ, references baz/out.o
|
# Output: obj/foo/baz/targ.out.o (if qualified)
|
# obj/foo/baz/out.o (otherwise)
|
# (and obj.host instead of obj for cross-compiles)
|
#
|
# Why this scheme and not some other one?
|
# 1) for a given input, you can compute all derived outputs by matching
|
# its path, even if the input is brought via a gyp file with '..'.
|
# 2) simple files like libraries and stamps have a simple filename.
|
|
obj = "obj"
|
if self.toolset != "target":
|
obj += "." + self.toolset
|
|
path_dir, path_basename = os.path.split(path)
|
assert not os.path.isabs(path_dir), (
|
"'%s' can not be absolute path (see crbug.com/462153)." % path_dir
|
)
|
|
if qualified:
|
path_basename = self.name + "." + path_basename
|
return os.path.normpath(
|
os.path.join(obj, self.base_dir, path_dir, path_basename)
|
)
|
|
def WriteCollapsedDependencies(self, name, targets, order_only=None):
|
"""Given a list of targets, return a path for a single file
|
representing the result of building all the targets or None.
|
|
Uses a stamp file if necessary."""
|
|
assert targets == [item for item in targets if item], targets
|
if len(targets) == 0:
|
assert not order_only
|
return None
|
if len(targets) > 1 or order_only:
|
stamp = self.GypPathToUniqueOutput(name + ".stamp")
|
targets = self.ninja.build(stamp, "stamp", targets, order_only=order_only)
|
self.ninja.newline()
|
return targets[0]
|
|
def _SubninjaNameForArch(self, arch):
|
output_file_base = os.path.splitext(self.output_file_name)[0]
|
return f"{output_file_base}.{arch}.ninja"
|
|
def WriteSpec(self, spec, config_name, generator_flags):
|
"""The main entry point for NinjaWriter: write the build rules for a spec.
|
|
Returns a Target object, which represents the output paths for this spec.
|
Returns None if there are no outputs (e.g. a settings-only 'none' type
|
target)."""
|
|
self.config_name = config_name
|
self.name = spec["target_name"]
|
self.toolset = spec["toolset"]
|
config = spec["configurations"][config_name]
|
self.target = Target(spec["type"])
|
self.is_standalone_static_library = bool(
|
spec.get("standalone_static_library", 0)
|
)
|
|
self.target_rpath = generator_flags.get("target_rpath", r"\$$ORIGIN/lib/")
|
|
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
|
self.xcode_settings = self.msvs_settings = None
|
if self.flavor == "mac":
|
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
|
mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
|
if mac_toolchain_dir:
|
self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir
|
|
if self.flavor == "win":
|
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags)
|
arch = self.msvs_settings.GetArch(config_name)
|
self.ninja.variable("arch", self.win_env[arch])
|
self.ninja.variable("cc", "$cl_" + arch)
|
self.ninja.variable("cxx", "$cl_" + arch)
|
self.ninja.variable("cc_host", "$cl_" + arch)
|
self.ninja.variable("cxx_host", "$cl_" + arch)
|
self.ninja.variable("asm", "$ml_" + arch)
|
|
if self.flavor == "mac":
|
self.archs = self.xcode_settings.GetActiveArchs(config_name)
|
if len(self.archs) > 1:
|
self.arch_subninjas = {
|
arch: ninja_syntax.Writer(
|
OpenOutput(
|
os.path.join(
|
self.toplevel_build, self._SubninjaNameForArch(arch)
|
),
|
"w",
|
)
|
)
|
for arch in self.archs
|
}
|
|
# Compute predepends for all rules.
|
# actions_depends is the dependencies this target depends on before running
|
# any of its action/rule/copy steps.
|
# compile_depends is the dependencies this target depends on before running
|
# any of its compile steps.
|
actions_depends = []
|
compile_depends = []
|
# TODO(evan): it is rather confusing which things are lists and which
|
# are strings. Fix these.
|
if "dependencies" in spec:
|
for dep in spec["dependencies"]:
|
if dep in self.target_outputs:
|
target = self.target_outputs[dep]
|
actions_depends.append(target.PreActionInput(self.flavor))
|
compile_depends.append(target.PreCompileInput())
|
if target.uses_cpp:
|
self.target.uses_cpp = True
|
actions_depends = [item for item in actions_depends if item]
|
compile_depends = [item for item in compile_depends if item]
|
actions_depends = self.WriteCollapsedDependencies(
|
"actions_depends", actions_depends
|
)
|
compile_depends = self.WriteCollapsedDependencies(
|
"compile_depends", compile_depends
|
)
|
self.target.preaction_stamp = actions_depends
|
self.target.precompile_stamp = compile_depends
|
|
# Write out actions, rules, and copies. These must happen before we
|
# compile any sources, so compute a list of predependencies for sources
|
# while we do it.
|
extra_sources = []
|
mac_bundle_depends = []
|
self.target.actions_stamp = self.WriteActionsRulesCopies(
|
spec, extra_sources, actions_depends, mac_bundle_depends
|
)
|
|
# If we have actions/rules/copies, we depend directly on those, but
|
# otherwise we depend on dependent target's actions/rules/copies etc.
|
# We never need to explicitly depend on previous target's link steps,
|
# because no compile ever depends on them.
|
compile_depends_stamp = self.target.actions_stamp or compile_depends
|
|
# Write out the compilation steps, if any.
|
link_deps = []
|
try:
|
sources = extra_sources + spec.get("sources", [])
|
except TypeError:
|
print("extra_sources: ", str(extra_sources))
|
print('spec.get("sources"): ', str(spec.get("sources")))
|
raise
|
if sources:
|
if self.flavor == "mac" and len(self.archs) > 1:
|
# Write subninja file containing compile and link commands scoped to
|
# a single arch if a fat binary is being built.
|
for arch in self.archs:
|
self.ninja.subninja(self._SubninjaNameForArch(arch))
|
|
pch = None
|
if self.flavor == "win":
|
gyp.msvs_emulation.VerifyMissingSources(
|
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja
|
)
|
pch = gyp.msvs_emulation.PrecompiledHeader(
|
self.msvs_settings,
|
config_name,
|
self.GypPathToNinja,
|
self.GypPathToUniqueOutput,
|
self.obj_ext,
|
)
|
else:
|
pch = gyp.xcode_emulation.MacPrefixHeader(
|
self.xcode_settings,
|
self.GypPathToNinja,
|
lambda path, lang: self.GypPathToUniqueOutput(path + "-" + lang),
|
)
|
link_deps = self.WriteSources(
|
self.ninja,
|
config_name,
|
config,
|
sources,
|
compile_depends_stamp,
|
pch,
|
spec,
|
)
|
# Some actions/rules output 'sources' that are already object files.
|
obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
|
if obj_outputs:
|
if self.flavor != "mac" or len(self.archs) == 1:
|
link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
|
else:
|
print(
|
"Warning: Actions/rules writing object files don't work with "
|
"multiarch targets, dropping. (target %s)" % spec["target_name"]
|
)
|
elif self.flavor == "mac" and len(self.archs) > 1:
|
link_deps = collections.defaultdict(list)
|
|
compile_deps = self.target.actions_stamp or actions_depends
|
if self.flavor == "win" and self.target.type == "static_library":
|
self.target.component_objs = link_deps
|
self.target.compile_deps = compile_deps
|
|
# Write out a link step, if needed.
|
output = None
|
is_empty_bundle = not link_deps and not mac_bundle_depends
|
if link_deps or self.target.actions_stamp or actions_depends:
|
output = self.WriteTarget(
|
spec, config_name, config, link_deps, compile_deps
|
)
|
if self.is_mac_bundle:
|
mac_bundle_depends.append(output)
|
|
# Bundle all of the above together, if needed.
|
if self.is_mac_bundle:
|
output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
|
|
if not output:
|
return None
|
|
assert self.target.FinalOutput(), output
|
return self.target
|
|
def _WinIdlRule(self, source, prebuild, outputs):
|
"""Handle the implicit VS .idl rule for one source file. Fills |outputs|
|
with files that are generated."""
|
outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
|
source, self.config_name
|
)
|
outdir = self.GypPathToNinja(outdir)
|
|
def fix_path(path, rel=None):
|
path = os.path.join(outdir, path)
|
dirname, basename = os.path.split(source)
|
root, ext = os.path.splitext(basename)
|
path = self.ExpandRuleVariables(path, root, dirname, source, ext, basename)
|
if rel:
|
path = os.path.relpath(path, rel)
|
return path
|
|
vars = [(name, fix_path(value, outdir)) for name, value in vars]
|
output = [fix_path(p) for p in output]
|
vars.append(("outdir", outdir))
|
vars.append(("idlflags", flags))
|
input = self.GypPathToNinja(source)
|
self.ninja.build(output, "idl", input, variables=vars, order_only=prebuild)
|
outputs.extend(output)
|
|
def WriteWinIdlFiles(self, spec, prebuild):
|
"""Writes rules to match MSVS's implicit idl handling."""
|
assert self.flavor == "win"
|
if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
|
return []
|
outputs = []
|
for source in filter(lambda x: x.endswith(".idl"), spec["sources"]):
|
self._WinIdlRule(source, prebuild, outputs)
|
return outputs
|
|
def WriteActionsRulesCopies(
|
self, spec, extra_sources, prebuild, mac_bundle_depends
|
):
|
"""Write out the Actions, Rules, and Copies steps. Return a path
|
representing the outputs of these steps."""
|
outputs = []
|
if self.is_mac_bundle:
|
mac_bundle_resources = spec.get("mac_bundle_resources", [])[:]
|
else:
|
mac_bundle_resources = []
|
extra_mac_bundle_resources = []
|
|
if "actions" in spec:
|
outputs += self.WriteActions(
|
spec["actions"], extra_sources, prebuild, extra_mac_bundle_resources
|
)
|
if "rules" in spec:
|
outputs += self.WriteRules(
|
spec["rules"],
|
extra_sources,
|
prebuild,
|
mac_bundle_resources,
|
extra_mac_bundle_resources,
|
)
|
if "copies" in spec:
|
outputs += self.WriteCopies(spec["copies"], prebuild, mac_bundle_depends)
|
|
if "sources" in spec and self.flavor == "win":
|
outputs += self.WriteWinIdlFiles(spec, prebuild)
|
|
if self.xcode_settings and self.xcode_settings.IsIosFramework():
|
self.WriteiOSFrameworkHeaders(spec, outputs, prebuild)
|
|
stamp = self.WriteCollapsedDependencies("actions_rules_copies", outputs)
|
|
if self.is_mac_bundle:
|
xcassets = self.WriteMacBundleResources(
|
extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends
|
)
|
partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
|
self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
|
|
return stamp
|
|
def GenerateDescription(self, verb, message, fallback):
|
"""Generate and return a description of a build step.
|
|
|verb| is the short summary, e.g. ACTION or RULE.
|
|message| is a hand-written description, or None if not available.
|
|fallback| is the gyp-level name of the step, usable as a fallback.
|
"""
|
if self.toolset != "target":
|
verb += "(%s)" % self.toolset
|
if message:
|
return f"{verb} {self.ExpandSpecial(message)}"
|
else:
|
return f"{verb} {self.name}: {fallback}"
|
|
def WriteActions(
|
self, actions, extra_sources, prebuild, extra_mac_bundle_resources
|
):
|
# Actions cd into the base directory.
|
env = self.GetToolchainEnv()
|
all_outputs = []
|
for action in actions:
|
# First write out a rule for the action.
|
name = "{}_{}".format(action["action_name"], self.hash_for_rules)
|
description = self.GenerateDescription(
|
"ACTION", action.get("message", None), name
|
)
|
win_shell_flags = (
|
self.msvs_settings.GetRuleShellFlags(action)
|
if self.flavor == "win"
|
else None
|
)
|
args = action["action"]
|
depfile = action.get("depfile", None)
|
if depfile:
|
depfile = self.ExpandSpecial(depfile, self.base_to_build)
|
pool = "console" if int(action.get("ninja_use_console", 0)) else None
|
rule_name, _ = self.WriteNewNinjaRule(
|
name, args, description, win_shell_flags, env, pool, depfile=depfile
|
)
|
|
inputs = [self.GypPathToNinja(i, env) for i in action["inputs"]]
|
if int(action.get("process_outputs_as_sources", False)):
|
extra_sources += action["outputs"]
|
if int(action.get("process_outputs_as_mac_bundle_resources", False)):
|
extra_mac_bundle_resources += action["outputs"]
|
outputs = [self.GypPathToNinja(o, env) for o in action["outputs"]]
|
|
# Then write out an edge using the rule.
|
self.ninja.build(outputs, rule_name, inputs, order_only=prebuild)
|
all_outputs += outputs
|
|
self.ninja.newline()
|
|
return all_outputs
|
|
def WriteRules(
|
self,
|
rules,
|
extra_sources,
|
prebuild,
|
mac_bundle_resources,
|
extra_mac_bundle_resources,
|
):
|
env = self.GetToolchainEnv()
|
all_outputs = []
|
for rule in rules:
|
# Skip a rule with no action and no inputs.
|
if "action" not in rule and not rule.get("rule_sources", []):
|
continue
|
|
# First write out a rule for the rule action.
|
name = "{}_{}".format(rule["rule_name"], self.hash_for_rules)
|
|
args = rule["action"]
|
description = self.GenerateDescription(
|
"RULE",
|
rule.get("message", None),
|
("%s " + generator_default_variables["RULE_INPUT_PATH"]) % name,
|
)
|
win_shell_flags = (
|
self.msvs_settings.GetRuleShellFlags(rule)
|
if self.flavor == "win"
|
else None
|
)
|
pool = "console" if int(rule.get("ninja_use_console", 0)) else None
|
rule_name, args = self.WriteNewNinjaRule(
|
name, args, description, win_shell_flags, env, pool
|
)
|
|
# TODO: if the command references the outputs directly, we should
|
# simplify it to just use $out.
|
|
# Rules can potentially make use of some special variables which
|
# must vary per source file.
|
# Compute the list of variables we'll need to provide.
|
special_locals = ("source", "root", "dirname", "ext", "name")
|
needed_variables = {"source"}
|
for argument in args:
|
for var in special_locals:
|
if "${%s}" % var in argument:
|
needed_variables.add(var)
|
needed_variables = sorted(needed_variables)
|
|
def cygwin_munge(path):
|
# pylint: disable=cell-var-from-loop
|
if win_shell_flags and win_shell_flags.cygwin:
|
return path.replace("\\", "/")
|
return path
|
|
inputs = [self.GypPathToNinja(i, env) for i in rule.get("inputs", [])]
|
|
# If there are n source files matching the rule, and m additional rule
|
# inputs, then adding 'inputs' to each build edge written below will
|
# write m * n inputs. Collapsing reduces this to m + n.
|
sources = rule.get("rule_sources", [])
|
num_inputs = len(inputs)
|
if prebuild:
|
num_inputs += 1
|
if num_inputs > 2 and len(sources) > 2:
|
inputs = [
|
self.WriteCollapsedDependencies(
|
rule["rule_name"], inputs, order_only=prebuild
|
)
|
]
|
prebuild = []
|
|
# For each source file, write an edge that generates all the outputs.
|
for source in sources:
|
source = os.path.normpath(source)
|
dirname, basename = os.path.split(source)
|
root, ext = os.path.splitext(basename)
|
|
# Gather the list of inputs and outputs, expanding $vars if possible.
|
outputs = [
|
self.ExpandRuleVariables(o, root, dirname, source, ext, basename)
|
for o in rule["outputs"]
|
]
|
|
if int(rule.get("process_outputs_as_sources", False)):
|
extra_sources += outputs
|
|
was_mac_bundle_resource = source in mac_bundle_resources
|
if was_mac_bundle_resource or int(
|
rule.get("process_outputs_as_mac_bundle_resources", False)
|
):
|
extra_mac_bundle_resources += outputs
|
# Note: This is n_resources * n_outputs_in_rule.
|
# Put to-be-removed items in a set and
|
# remove them all in a single pass
|
# if this becomes a performance issue.
|
if was_mac_bundle_resource:
|
mac_bundle_resources.remove(source)
|
|
extra_bindings = []
|
for var in needed_variables:
|
if var == "root":
|
extra_bindings.append(("root", cygwin_munge(root)))
|
elif var == "dirname":
|
# '$dirname' is a parameter to the rule action, which means
|
# it shouldn't be converted to a Ninja path. But we don't
|
# want $!PRODUCT_DIR in there either.
|
dirname_expanded = self.ExpandSpecial(
|
dirname, self.base_to_build
|
)
|
extra_bindings.append(
|
("dirname", cygwin_munge(dirname_expanded))
|
)
|
elif var == "source":
|
# '$source' is a parameter to the rule action, which means
|
# it shouldn't be converted to a Ninja path. But we don't
|
# want $!PRODUCT_DIR in there either.
|
source_expanded = self.ExpandSpecial(source, self.base_to_build)
|
extra_bindings.append(("source", cygwin_munge(source_expanded)))
|
elif var == "ext":
|
extra_bindings.append(("ext", ext))
|
elif var == "name":
|
extra_bindings.append(("name", cygwin_munge(basename)))
|
else:
|
assert var is None, repr(var)
|
|
outputs = [self.GypPathToNinja(o, env) for o in outputs]
|
if self.flavor == "win":
|
# WriteNewNinjaRule uses unique_name to create a rsp file on win.
|
extra_bindings.append(
|
("unique_name", hashlib.md5(outputs[0]).hexdigest())
|
)
|
|
self.ninja.build(
|
outputs,
|
rule_name,
|
self.GypPathToNinja(source),
|
implicit=inputs,
|
order_only=prebuild,
|
variables=extra_bindings,
|
)
|
|
all_outputs.extend(outputs)
|
|
return all_outputs
|
|
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
|
outputs = []
|
if self.xcode_settings:
|
extra_env = self.xcode_settings.GetPerTargetSettings()
|
env = self.GetToolchainEnv(additional_settings=extra_env)
|
else:
|
env = self.GetToolchainEnv()
|
for to_copy in copies:
|
for path in to_copy["files"]:
|
# Normalize the path so trailing slashes don't confuse us.
|
path = os.path.normpath(path)
|
basename = os.path.split(path)[1]
|
src = self.GypPathToNinja(path, env)
|
dst = self.GypPathToNinja(
|
os.path.join(to_copy["destination"], basename), env
|
)
|
outputs += self.ninja.build(dst, "copy", src, order_only=prebuild)
|
if self.is_mac_bundle:
|
# gyp has mac_bundle_resources to copy things into a bundle's
|
# Resources folder, but there's no built-in way to copy files
|
# to other places in the bundle.
|
# Hence, some targets use copies for this.
|
# Check if this file is copied into the current bundle,
|
# and if so add it to the bundle depends so
|
# that dependent targets get rebuilt if the copy input changes.
|
if dst.startswith(
|
self.xcode_settings.GetBundleContentsFolderPath()
|
):
|
mac_bundle_depends.append(dst)
|
|
return outputs
|
|
def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild):
|
"""Prebuild steps to generate hmap files and copy headers to destination."""
|
framework = self.ComputeMacBundleOutput()
|
all_sources = spec["sources"]
|
copy_headers = spec["mac_framework_headers"]
|
output = self.GypPathToUniqueOutput("headers.hmap")
|
self.xcode_settings.header_map_path = output
|
all_headers = map(
|
self.GypPathToNinja, filter(lambda x: x.endswith(".h"), all_sources)
|
)
|
variables = [
|
("framework", framework),
|
("copy_headers", map(self.GypPathToNinja, copy_headers)),
|
]
|
outputs.extend(
|
self.ninja.build(
|
output,
|
"compile_ios_framework_headers",
|
all_headers,
|
variables=variables,
|
order_only=prebuild,
|
)
|
)
|
|
def WriteMacBundleResources(self, resources, bundle_depends):
|
"""Writes ninja edges for 'mac_bundle_resources'."""
|
xcassets = []
|
|
extra_env = self.xcode_settings.GetPerTargetSettings()
|
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
|
env = self.ComputeExportEnvString(env)
|
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
|
|
for output, res in gyp.xcode_emulation.GetMacBundleResources(
|
generator_default_variables["PRODUCT_DIR"],
|
self.xcode_settings,
|
map(self.GypPathToNinja, resources),
|
):
|
output = self.ExpandSpecial(output)
|
if os.path.splitext(output)[-1] != ".xcassets":
|
self.ninja.build(
|
output,
|
"mac_tool",
|
res,
|
variables=[
|
("mactool_cmd", "copy-bundle-resource"),
|
("env", env),
|
("binary", isBinary),
|
],
|
)
|
bundle_depends.append(output)
|
else:
|
xcassets.append(res)
|
return xcassets
|
|
def WriteMacXCassets(self, xcassets, bundle_depends):
|
"""Writes ninja edges for 'mac_bundle_resources' .xcassets files.
|
|
This add an invocation of 'actool' via the 'mac_tool.py' helper script.
|
It assumes that the assets catalogs define at least one imageset and
|
thus an Assets.car file will be generated in the application resources
|
directory. If this is not the case, then the build will probably be done
|
at each invocation of ninja."""
|
if not xcassets:
|
return
|
|
extra_arguments = {}
|
settings_to_arg = {
|
"XCASSETS_APP_ICON": "app-icon",
|
"XCASSETS_LAUNCH_IMAGE": "launch-image",
|
}
|
settings = self.xcode_settings.xcode_settings[self.config_name]
|
for settings_key, arg_name in settings_to_arg.items():
|
value = settings.get(settings_key)
|
if value:
|
extra_arguments[arg_name] = value
|
|
partial_info_plist = None
|
if extra_arguments:
|
partial_info_plist = self.GypPathToUniqueOutput(
|
"assetcatalog_generated_info.plist"
|
)
|
extra_arguments["output-partial-info-plist"] = partial_info_plist
|
|
outputs = []
|
outputs.append(
|
os.path.join(self.xcode_settings.GetBundleResourceFolder(), "Assets.car")
|
)
|
if partial_info_plist:
|
outputs.append(partial_info_plist)
|
|
keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
|
extra_env = self.xcode_settings.GetPerTargetSettings()
|
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
|
env = self.ComputeExportEnvString(env)
|
|
bundle_depends.extend(
|
self.ninja.build(
|
outputs,
|
"compile_xcassets",
|
xcassets,
|
variables=[("env", env), ("keys", keys)],
|
)
|
)
|
return partial_info_plist
|
|
def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
|
"""Write build rules for bundle Info.plist files."""
|
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
|
generator_default_variables["PRODUCT_DIR"],
|
self.xcode_settings,
|
self.GypPathToNinja,
|
)
|
if not info_plist:
|
return
|
out = self.ExpandSpecial(out)
|
if defines:
|
# Create an intermediate file to store preprocessed results.
|
intermediate_plist = self.GypPathToUniqueOutput(
|
os.path.basename(info_plist)
|
)
|
defines = " ".join([Define(d, self.flavor) for d in defines])
|
info_plist = self.ninja.build(
|
intermediate_plist,
|
"preprocess_infoplist",
|
info_plist,
|
variables=[("defines", defines)],
|
)
|
|
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
|
env = self.ComputeExportEnvString(env)
|
|
if partial_info_plist:
|
intermediate_plist = self.GypPathToUniqueOutput("merged_info.plist")
|
info_plist = self.ninja.build(
|
intermediate_plist, "merge_infoplist", [partial_info_plist, info_plist]
|
)
|
|
keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
|
keys = QuoteShellArgument(json.dumps(keys), self.flavor)
|
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
|
self.ninja.build(
|
out,
|
"copy_infoplist",
|
info_plist,
|
variables=[("env", env), ("keys", keys), ("binary", isBinary)],
|
)
|
bundle_depends.append(out)
|
|
def WriteSources(
|
self,
|
ninja_file,
|
config_name,
|
config,
|
sources,
|
predepends,
|
precompiled_header,
|
spec,
|
):
|
"""Write build rules to compile all of |sources|."""
|
if self.toolset == "host":
|
self.ninja.variable("ar", "$ar_host")
|
self.ninja.variable("cc", "$cc_host")
|
self.ninja.variable("cxx", "$cxx_host")
|
self.ninja.variable("ld", "$ld_host")
|
self.ninja.variable("ldxx", "$ldxx_host")
|
self.ninja.variable("nm", "$nm_host")
|
self.ninja.variable("readelf", "$readelf_host")
|
|
if self.flavor != "mac" or len(self.archs) == 1:
|
return self.WriteSourcesForArch(
|
self.ninja,
|
config_name,
|
config,
|
sources,
|
predepends,
|
precompiled_header,
|
spec,
|
)
|
else:
|
return {
|
arch: self.WriteSourcesForArch(
|
self.arch_subninjas[arch],
|
config_name,
|
config,
|
sources,
|
predepends,
|
precompiled_header,
|
spec,
|
arch=arch,
|
)
|
for arch in self.archs
|
}
|
|
def WriteSourcesForArch(
|
self,
|
ninja_file,
|
config_name,
|
config,
|
sources,
|
predepends,
|
precompiled_header,
|
spec,
|
arch=None,
|
):
|
"""Write build rules to compile all of |sources|."""
|
|
extra_defines = []
|
if self.flavor == "mac":
|
cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
|
cflags_c = self.xcode_settings.GetCflagsC(config_name)
|
cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
|
cflags_objc = ["$cflags_c"] + self.xcode_settings.GetCflagsObjC(config_name)
|
cflags_objcc = ["$cflags_cc"] + self.xcode_settings.GetCflagsObjCC(
|
config_name
|
)
|
elif self.flavor == "win":
|
asmflags = self.msvs_settings.GetAsmflags(config_name)
|
cflags = self.msvs_settings.GetCflags(config_name)
|
cflags_c = self.msvs_settings.GetCflagsC(config_name)
|
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
|
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
|
# See comment at cc_command for why there's two .pdb files.
|
pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
|
config_name, self.ExpandSpecial
|
)
|
if not pdbpath_c:
|
obj = "obj"
|
if self.toolset != "target":
|
obj += "." + self.toolset
|
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
|
pdbpath_c = pdbpath + ".c.pdb"
|
pdbpath_cc = pdbpath + ".cc.pdb"
|
self.WriteVariableList(ninja_file, "pdbname_c", [pdbpath_c])
|
self.WriteVariableList(ninja_file, "pdbname_cc", [pdbpath_cc])
|
self.WriteVariableList(ninja_file, "pchprefix", [self.name])
|
else:
|
cflags = config.get("cflags", [])
|
cflags_c = config.get("cflags_c", [])
|
cflags_cc = config.get("cflags_cc", [])
|
|
# Respect environment variables related to build, but target-specific
|
# flags can still override them.
|
if self.toolset == "target":
|
cflags_c = (
|
os.environ.get("CPPFLAGS", "").split()
|
+ os.environ.get("CFLAGS", "").split()
|
+ cflags_c
|
)
|
cflags_cc = (
|
os.environ.get("CPPFLAGS", "").split()
|
+ os.environ.get("CXXFLAGS", "").split()
|
+ cflags_cc
|
)
|
elif self.toolset == "host":
|
cflags_c = (
|
os.environ.get("CPPFLAGS_host", "").split()
|
+ os.environ.get("CFLAGS_host", "").split()
|
+ cflags_c
|
)
|
cflags_cc = (
|
os.environ.get("CPPFLAGS_host", "").split()
|
+ os.environ.get("CXXFLAGS_host", "").split()
|
+ cflags_cc
|
)
|
|
defines = config.get("defines", []) + extra_defines
|
self.WriteVariableList(
|
ninja_file, "defines", [Define(d, self.flavor) for d in defines]
|
)
|
if self.flavor == "win":
|
self.WriteVariableList(
|
ninja_file, "asmflags", map(self.ExpandSpecial, asmflags)
|
)
|
self.WriteVariableList(
|
ninja_file,
|
"rcflags",
|
[
|
QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
|
for f in self.msvs_settings.GetRcflags(
|
config_name, self.GypPathToNinja
|
)
|
],
|
)
|
|
include_dirs = config.get("include_dirs", [])
|
|
env = self.GetToolchainEnv()
|
if self.flavor == "win":
|
include_dirs = self.msvs_settings.AdjustIncludeDirs(
|
include_dirs, config_name
|
)
|
self.WriteVariableList(
|
ninja_file,
|
"includes",
|
[
|
QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor)
|
for i in include_dirs
|
],
|
)
|
|
if self.flavor == "win":
|
midl_include_dirs = config.get("midl_include_dirs", [])
|
midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
|
midl_include_dirs, config_name
|
)
|
self.WriteVariableList(
|
ninja_file,
|
"midl_includes",
|
[
|
QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor)
|
for i in midl_include_dirs
|
],
|
)
|
|
pch_commands = precompiled_header.GetPchBuildCommands(arch)
|
if self.flavor == "mac":
|
# Most targets use no precompiled headers, so only write these if needed.
|
for ext, var in [
|
("c", "cflags_pch_c"),
|
("cc", "cflags_pch_cc"),
|
("m", "cflags_pch_objc"),
|
("mm", "cflags_pch_objcc"),
|
]:
|
include = precompiled_header.GetInclude(ext, arch)
|
if include:
|
ninja_file.variable(var, include)
|
|
arflags = config.get("arflags", [])
|
|
self.WriteVariableList(ninja_file, "cflags", map(self.ExpandSpecial, cflags))
|
self.WriteVariableList(
|
ninja_file, "cflags_c", map(self.ExpandSpecial, cflags_c)
|
)
|
self.WriteVariableList(
|
ninja_file, "cflags_cc", map(self.ExpandSpecial, cflags_cc)
|
)
|
if self.flavor == "mac":
|
self.WriteVariableList(
|
ninja_file, "cflags_objc", map(self.ExpandSpecial, cflags_objc)
|
)
|
self.WriteVariableList(
|
ninja_file, "cflags_objcc", map(self.ExpandSpecial, cflags_objcc)
|
)
|
self.WriteVariableList(ninja_file, "arflags", map(self.ExpandSpecial, arflags))
|
ninja_file.newline()
|
outputs = []
|
has_rc_source = False
|
for source in sources:
|
filename, ext = os.path.splitext(source)
|
ext = ext[1:]
|
obj_ext = self.obj_ext
|
if ext in ("cc", "cpp", "cxx"):
|
command = "cxx"
|
self.target.uses_cpp = True
|
elif ext == "c" or (ext == "S" and self.flavor != "win"):
|
command = "cc"
|
elif ext == "s" and self.flavor != "win": # Doesn't generate .o.d files.
|
command = "cc_s"
|
elif (
|
self.flavor == "win"
|
and ext in ("asm", "S")
|
and not self.msvs_settings.HasExplicitAsmRules(spec)
|
):
|
command = "asm"
|
# Add the _asm suffix as msvs is capable of handling .cc and
|
# .asm files of the same name without collision.
|
obj_ext = "_asm.obj"
|
elif self.flavor == "mac" and ext == "m":
|
command = "objc"
|
elif self.flavor == "mac" and ext == "mm":
|
command = "objcxx"
|
self.target.uses_cpp = True
|
elif self.flavor == "win" and ext == "rc":
|
command = "rc"
|
obj_ext = ".res"
|
has_rc_source = True
|
else:
|
# Ignore unhandled extensions.
|
continue
|
input = self.GypPathToNinja(source)
|
output = self.GypPathToUniqueOutput(filename + obj_ext)
|
if arch is not None:
|
output = AddArch(output, arch)
|
implicit = precompiled_header.GetObjDependencies([input], [output], arch)
|
variables = []
|
if self.flavor == "win":
|
variables, output, implicit = precompiled_header.GetFlagsModifications(
|
input,
|
output,
|
implicit,
|
command,
|
cflags_c,
|
cflags_cc,
|
self.ExpandSpecial,
|
)
|
ninja_file.build(
|
output,
|
command,
|
input,
|
implicit=[gch for _, _, gch in implicit],
|
order_only=predepends,
|
variables=variables,
|
)
|
outputs.append(output)
|
|
if has_rc_source:
|
resource_include_dirs = config.get("resource_include_dirs", include_dirs)
|
self.WriteVariableList(
|
ninja_file,
|
"resource_includes",
|
[
|
QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor)
|
for i in resource_include_dirs
|
],
|
)
|
|
self.WritePchTargets(ninja_file, pch_commands)
|
|
ninja_file.newline()
|
return outputs
|
|
def WritePchTargets(self, ninja_file, pch_commands):
|
"""Writes ninja rules to compile prefix headers."""
|
if not pch_commands:
|
return
|
|
for gch, lang_flag, lang, input in pch_commands:
|
var_name = {
|
"c": "cflags_pch_c",
|
"cc": "cflags_pch_cc",
|
"m": "cflags_pch_objc",
|
"mm": "cflags_pch_objcc",
|
}[lang]
|
|
map = {
|
"c": "cc",
|
"cc": "cxx",
|
"m": "objc",
|
"mm": "objcxx",
|
}
|
cmd = map.get(lang)
|
ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
|
|
def WriteLink(self, spec, config_name, config, link_deps, compile_deps):
|
"""Write out a link step. Fills out target.binary. """
|
if self.flavor != "mac" or len(self.archs) == 1:
|
return self.WriteLinkForArch(
|
self.ninja, spec, config_name, config, link_deps, compile_deps
|
)
|
else:
|
output = self.ComputeOutput(spec)
|
inputs = [
|
self.WriteLinkForArch(
|
self.arch_subninjas[arch],
|
spec,
|
config_name,
|
config,
|
link_deps[arch],
|
compile_deps,
|
arch=arch,
|
)
|
for arch in self.archs
|
]
|
extra_bindings = []
|
build_output = output
|
if not self.is_mac_bundle:
|
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
|
|
# TODO(yyanagisawa): more work needed to fix:
|
# https://code.google.com/p/gyp/issues/detail?id=411
|
if (
|
spec["type"] in ("shared_library", "loadable_module")
|
and not self.is_mac_bundle
|
):
|
extra_bindings.append(("lib", output))
|
self.ninja.build(
|
[output, output + ".TOC"],
|
"solipo",
|
inputs,
|
variables=extra_bindings,
|
)
|
else:
|
self.ninja.build(build_output, "lipo", inputs, variables=extra_bindings)
|
return output
|
|
def WriteLinkForArch(
|
self, ninja_file, spec, config_name, config, link_deps, compile_deps, arch=None
|
):
|
"""Write out a link step. Fills out target.binary. """
|
command = {
|
"executable": "link",
|
"loadable_module": "solink_module",
|
"shared_library": "solink",
|
}[spec["type"]]
|
command_suffix = ""
|
|
implicit_deps = set()
|
solibs = set()
|
order_deps = set()
|
|
if compile_deps:
|
# Normally, the compiles of the target already depend on compile_deps,
|
# but a shared_library target might have no sources and only link together
|
# a few static_library deps, so the link step also needs to depend
|
# on compile_deps to make sure actions in the shared_library target
|
# get run before the link.
|
order_deps.add(compile_deps)
|
|
if "dependencies" in spec:
|
# Two kinds of dependencies:
|
# - Linkable dependencies (like a .a or a .so): add them to the link line.
|
# - Non-linkable dependencies (like a rule that generates a file
|
# and writes a stamp file): add them to implicit_deps
|
extra_link_deps = set()
|
for dep in spec["dependencies"]:
|
target = self.target_outputs.get(dep)
|
if not target:
|
continue
|
linkable = target.Linkable()
|
if linkable:
|
new_deps = []
|
if (
|
self.flavor == "win"
|
and target.component_objs
|
and self.msvs_settings.IsUseLibraryDependencyInputs(config_name)
|
):
|
new_deps = target.component_objs
|
if target.compile_deps:
|
order_deps.add(target.compile_deps)
|
elif self.flavor == "win" and target.import_lib:
|
new_deps = [target.import_lib]
|
elif target.UsesToc(self.flavor):
|
solibs.add(target.binary)
|
implicit_deps.add(target.binary + ".TOC")
|
else:
|
new_deps = [target.binary]
|
for new_dep in new_deps:
|
if new_dep not in extra_link_deps:
|
extra_link_deps.add(new_dep)
|
link_deps.append(new_dep)
|
|
final_output = target.FinalOutput()
|
if not linkable or final_output != target.binary:
|
implicit_deps.add(final_output)
|
|
extra_bindings = []
|
if self.target.uses_cpp and self.flavor != "win":
|
extra_bindings.append(("ld", "$ldxx"))
|
|
output = self.ComputeOutput(spec, arch)
|
if arch is None and not self.is_mac_bundle:
|
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
|
|
is_executable = spec["type"] == "executable"
|
# The ldflags config key is not used on mac or win. On those platforms
|
# linker flags are set via xcode_settings and msvs_settings, respectively.
|
if self.toolset == "target":
|
env_ldflags = os.environ.get("LDFLAGS", "").split()
|
elif self.toolset == "host":
|
env_ldflags = os.environ.get("LDFLAGS_host", "").split()
|
|
if self.flavor == "mac":
|
ldflags = self.xcode_settings.GetLdflags(
|
config_name,
|
self.ExpandSpecial(generator_default_variables["PRODUCT_DIR"]),
|
self.GypPathToNinja,
|
arch,
|
)
|
ldflags = env_ldflags + ldflags
|
elif self.flavor == "win":
|
manifest_base_name = self.GypPathToUniqueOutput(
|
self.ComputeOutputFileName(spec)
|
)
|
(
|
ldflags,
|
intermediate_manifest,
|
manifest_files,
|
) = self.msvs_settings.GetLdflags(
|
config_name,
|
self.GypPathToNinja,
|
self.ExpandSpecial,
|
manifest_base_name,
|
output,
|
is_executable,
|
self.toplevel_build,
|
)
|
ldflags = env_ldflags + ldflags
|
self.WriteVariableList(ninja_file, "manifests", manifest_files)
|
implicit_deps = implicit_deps.union(manifest_files)
|
if intermediate_manifest:
|
self.WriteVariableList(
|
ninja_file, "intermediatemanifest", [intermediate_manifest]
|
)
|
command_suffix = _GetWinLinkRuleNameSuffix(
|
self.msvs_settings.IsEmbedManifest(config_name)
|
)
|
def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
|
if def_file:
|
implicit_deps.add(def_file)
|
else:
|
# Respect environment variables related to build, but target-specific
|
# flags can still override them.
|
ldflags = env_ldflags + config.get("ldflags", [])
|
if is_executable and len(solibs):
|
rpath = "lib/"
|
if self.toolset != "target":
|
rpath += self.toolset
|
ldflags.append(r"-Wl,-rpath=\$$ORIGIN/%s" % rpath)
|
else:
|
ldflags.append("-Wl,-rpath=%s" % self.target_rpath)
|
ldflags.append("-Wl,-rpath-link=%s" % rpath)
|
self.WriteVariableList(ninja_file, "ldflags", map(self.ExpandSpecial, ldflags))
|
|
library_dirs = config.get("library_dirs", [])
|
if self.flavor == "win":
|
library_dirs = [
|
self.msvs_settings.ConvertVSMacros(library_dir, config_name)
|
for library_dir in library_dirs
|
]
|
library_dirs = [
|
"/LIBPATH:"
|
+ QuoteShellArgument(self.GypPathToNinja(library_dir), self.flavor)
|
for library_dir in library_dirs
|
]
|
else:
|
library_dirs = [
|
QuoteShellArgument("-L" + self.GypPathToNinja(library_dir), self.flavor)
|
for library_dir in library_dirs
|
]
|
|
libraries = gyp.common.uniquer(
|
map(self.ExpandSpecial, spec.get("libraries", []))
|
)
|
if self.flavor == "mac":
|
libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
|
elif self.flavor == "win":
|
libraries = self.msvs_settings.AdjustLibraries(libraries)
|
|
self.WriteVariableList(ninja_file, "libs", library_dirs + libraries)
|
|
linked_binary = output
|
|
if command in ("solink", "solink_module"):
|
extra_bindings.append(("soname", os.path.split(output)[1]))
|
extra_bindings.append(("lib", gyp.common.EncodePOSIXShellArgument(output)))
|
if self.flavor != "win":
|
link_file_list = output
|
if self.is_mac_bundle:
|
# 'Dependency Framework.framework/Versions/A/Dependency Framework'
|
# -> 'Dependency Framework.framework.rsp'
|
link_file_list = self.xcode_settings.GetWrapperName()
|
if arch:
|
link_file_list += "." + arch
|
link_file_list += ".rsp"
|
# If an rspfile contains spaces, ninja surrounds the filename with
|
# quotes around it and then passes it to open(), creating a file with
|
# quotes in its name (and when looking for the rsp file, the name
|
# makes it through bash which strips the quotes) :-/
|
link_file_list = link_file_list.replace(" ", "_")
|
extra_bindings.append(
|
(
|
"link_file_list",
|
gyp.common.EncodePOSIXShellArgument(link_file_list),
|
)
|
)
|
if self.flavor == "win":
|
extra_bindings.append(("binary", output))
|
if (
|
"/NOENTRY" not in ldflags
|
and not self.msvs_settings.GetNoImportLibrary(config_name)
|
):
|
self.target.import_lib = output + ".lib"
|
extra_bindings.append(
|
("implibflag", "/IMPLIB:%s" % self.target.import_lib)
|
)
|
pdbname = self.msvs_settings.GetPDBName(
|
config_name, self.ExpandSpecial, output + ".pdb"
|
)
|
output = [output, self.target.import_lib]
|
if pdbname:
|
output.append(pdbname)
|
elif not self.is_mac_bundle:
|
output = [output, output + ".TOC"]
|
else:
|
command = command + "_notoc"
|
elif self.flavor == "win":
|
extra_bindings.append(("binary", output))
|
pdbname = self.msvs_settings.GetPDBName(
|
config_name, self.ExpandSpecial, output + ".pdb"
|
)
|
if pdbname:
|
output = [output, pdbname]
|
|
if len(solibs):
|
extra_bindings.append(
|
("solibs", gyp.common.EncodePOSIXShellList(sorted(solibs)))
|
)
|
|
ninja_file.build(
|
output,
|
command + command_suffix,
|
link_deps,
|
implicit=sorted(implicit_deps),
|
order_only=list(order_deps),
|
variables=extra_bindings,
|
)
|
return linked_binary
|
|
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
|
extra_link_deps = any(
|
self.target_outputs.get(dep).Linkable()
|
for dep in spec.get("dependencies", [])
|
if dep in self.target_outputs
|
)
|
if spec["type"] == "none" or (not link_deps and not extra_link_deps):
|
# TODO(evan): don't call this function for 'none' target types, as
|
# it doesn't do anything, and we fake out a 'binary' with a stamp file.
|
self.target.binary = compile_deps
|
self.target.type = "none"
|
elif spec["type"] == "static_library":
|
self.target.binary = self.ComputeOutput(spec)
|
if (
|
self.flavor not in ("ios", "mac", "netbsd", "openbsd", "win")
|
and not self.is_standalone_static_library
|
):
|
self.ninja.build(
|
self.target.binary, "alink_thin", link_deps, order_only=compile_deps
|
)
|
else:
|
variables = []
|
if self.xcode_settings:
|
libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
|
if libtool_flags:
|
variables.append(("libtool_flags", libtool_flags))
|
if self.msvs_settings:
|
libflags = self.msvs_settings.GetLibFlags(
|
config_name, self.GypPathToNinja
|
)
|
variables.append(("libflags", libflags))
|
|
if self.flavor != "mac" or len(self.archs) == 1:
|
self.AppendPostbuildVariable(
|
variables, spec, self.target.binary, self.target.binary
|
)
|
self.ninja.build(
|
self.target.binary,
|
"alink",
|
link_deps,
|
order_only=compile_deps,
|
variables=variables,
|
)
|
else:
|
inputs = []
|
for arch in self.archs:
|
output = self.ComputeOutput(spec, arch)
|
self.arch_subninjas[arch].build(
|
output,
|
"alink",
|
link_deps[arch],
|
order_only=compile_deps,
|
variables=variables,
|
)
|
inputs.append(output)
|
# TODO: It's not clear if
|
# libtool_flags should be passed to the alink
|
# call that combines single-arch .a files into a fat .a file.
|
self.AppendPostbuildVariable(
|
variables, spec, self.target.binary, self.target.binary
|
)
|
self.ninja.build(
|
self.target.binary,
|
"alink",
|
inputs,
|
# FIXME: test proving order_only=compile_deps isn't
|
# needed.
|
variables=variables,
|
)
|
else:
|
self.target.binary = self.WriteLink(
|
spec, config_name, config, link_deps, compile_deps
|
)
|
return self.target.binary
|
|
def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
|
assert self.is_mac_bundle
|
package_framework = spec["type"] in ("shared_library", "loadable_module")
|
output = self.ComputeMacBundleOutput()
|
if is_empty:
|
output += ".stamp"
|
variables = []
|
self.AppendPostbuildVariable(
|
variables,
|
spec,
|
output,
|
self.target.binary,
|
is_command_start=not package_framework,
|
)
|
if package_framework and not is_empty:
|
if spec["type"] == "shared_library" and self.xcode_settings.isIOS:
|
self.ninja.build(
|
output,
|
"package_ios_framework",
|
mac_bundle_depends,
|
variables=variables,
|
)
|
else:
|
variables.append(("version", self.xcode_settings.GetFrameworkVersion()))
|
self.ninja.build(
|
output, "package_framework", mac_bundle_depends, variables=variables
|
)
|
else:
|
self.ninja.build(output, "stamp", mac_bundle_depends, variables=variables)
|
self.target.bundle = output
|
return output
|
|
def GetToolchainEnv(self, additional_settings=None):
|
"""Returns the variables toolchain would set for build steps."""
|
env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
|
if self.flavor == "win":
|
env = self.GetMsvsToolchainEnv(additional_settings=additional_settings)
|
return env
|
|
def GetMsvsToolchainEnv(self, additional_settings=None):
|
"""Returns the variables Visual Studio would set for build steps."""
|
return self.msvs_settings.GetVSMacroEnv(
|
"$!PRODUCT_DIR", config=self.config_name
|
)
|
|
def GetSortedXcodeEnv(self, additional_settings=None):
|
"""Returns the variables Xcode would set for build steps."""
|
assert self.abs_build_dir
|
abs_build_dir = self.abs_build_dir
|
return gyp.xcode_emulation.GetSortedXcodeEnv(
|
self.xcode_settings,
|
abs_build_dir,
|
os.path.join(abs_build_dir, self.build_to_base),
|
self.config_name,
|
additional_settings,
|
)
|
|
def GetSortedXcodePostbuildEnv(self):
|
"""Returns the variables Xcode would set for postbuild steps."""
|
postbuild_settings = {}
|
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
|
# TODO(thakis): It would be nice to have some general mechanism instead.
|
strip_save_file = self.xcode_settings.GetPerTargetSetting(
|
"CHROMIUM_STRIP_SAVE_FILE"
|
)
|
if strip_save_file:
|
postbuild_settings["CHROMIUM_STRIP_SAVE_FILE"] = strip_save_file
|
return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
|
|
def AppendPostbuildVariable(
|
self, variables, spec, output, binary, is_command_start=False
|
):
|
"""Adds a 'postbuild' variable if there is a postbuild for |output|."""
|
postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
|
if postbuild:
|
variables.append(("postbuilds", postbuild))
|
|
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
|
"""Returns a shell command that runs all the postbuilds, and removes
|
|output| if any of them fails. If |is_command_start| is False, then the
|
returned string will start with ' && '."""
|
if not self.xcode_settings or spec["type"] == "none" or not output:
|
return ""
|
output = QuoteShellArgument(output, self.flavor)
|
postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
|
if output_binary is not None:
|
postbuilds = self.xcode_settings.AddImplicitPostbuilds(
|
self.config_name,
|
os.path.normpath(os.path.join(self.base_to_build, output)),
|
QuoteShellArgument(
|
os.path.normpath(os.path.join(self.base_to_build, output_binary)),
|
self.flavor,
|
),
|
postbuilds,
|
quiet=True,
|
)
|
|
if not postbuilds:
|
return ""
|
# Postbuilds expect to be run in the gyp file's directory, so insert an
|
# implicit postbuild to cd to there.
|
postbuilds.insert(
|
0, gyp.common.EncodePOSIXShellList(["cd", self.build_to_base])
|
)
|
env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
|
# G will be non-null if any postbuild fails. Run all postbuilds in a
|
# subshell.
|
commands = (
|
env
|
+ " ("
|
+ " && ".join([ninja_syntax.escape(command) for command in postbuilds])
|
)
|
command_string = (
|
commands
|
+ "); G=$$?; "
|
# Remove the final output if any postbuild failed.
|
"((exit $$G) || rm -rf %s) " % output
|
+ "&& exit $$G)"
|
)
|
if is_command_start:
|
return "(" + command_string + " && "
|
else:
|
return "$ && (" + command_string
|
|
def ComputeExportEnvString(self, env):
|
"""Given an environment, returns a string looking like
|
'export FOO=foo; export BAR="${FOO} bar;'
|
that exports |env| to the shell."""
|
export_str = []
|
for k, v in env:
|
export_str.append(
|
"export %s=%s;"
|
% (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v)))
|
)
|
return " ".join(export_str)
|
|
def ComputeMacBundleOutput(self):
|
"""Return the 'output' (full output path) to a bundle output directory."""
|
assert self.is_mac_bundle
|
path = generator_default_variables["PRODUCT_DIR"]
|
return self.ExpandSpecial(
|
os.path.join(path, self.xcode_settings.GetWrapperName())
|
)
|
|
def ComputeOutputFileName(self, spec, type=None):
|
"""Compute the filename of the final output for the current target."""
|
if not type:
|
type = spec["type"]
|
|
default_variables = copy.copy(generator_default_variables)
|
CalculateVariables(default_variables, {"flavor": self.flavor})
|
|
# Compute filename prefix: the product prefix, or a default for
|
# the product type.
|
DEFAULT_PREFIX = {
|
"loadable_module": default_variables["SHARED_LIB_PREFIX"],
|
"shared_library": default_variables["SHARED_LIB_PREFIX"],
|
"static_library": default_variables["STATIC_LIB_PREFIX"],
|
"executable": default_variables["EXECUTABLE_PREFIX"],
|
}
|
prefix = spec.get("product_prefix", DEFAULT_PREFIX.get(type, ""))
|
|
# Compute filename extension: the product extension, or a default
|
# for the product type.
|
DEFAULT_EXTENSION = {
|
"loadable_module": default_variables["SHARED_LIB_SUFFIX"],
|
"shared_library": default_variables["SHARED_LIB_SUFFIX"],
|
"static_library": default_variables["STATIC_LIB_SUFFIX"],
|
"executable": default_variables["EXECUTABLE_SUFFIX"],
|
}
|
extension = spec.get("product_extension")
|
extension = "." + extension if extension else DEFAULT_EXTENSION.get(type, "")
|
|
if "product_name" in spec:
|
# If we were given an explicit name, use that.
|
target = spec["product_name"]
|
else:
|
# Otherwise, derive a name from the target name.
|
target = spec["target_name"]
|
if prefix == "lib":
|
# Snip out an extra 'lib' from libs if appropriate.
|
target = StripPrefix(target, "lib")
|
|
if type in (
|
"static_library",
|
"loadable_module",
|
"shared_library",
|
"executable",
|
):
|
return f"{prefix}{target}{extension}"
|
elif type == "none":
|
return "%s.stamp" % target
|
else:
|
raise Exception("Unhandled output type %s" % type)
|
|
def ComputeOutput(self, spec, arch=None):
|
"""Compute the path for the final output of the spec."""
|
type = spec["type"]
|
|
if self.flavor == "win":
|
override = self.msvs_settings.GetOutputName(
|
self.config_name, self.ExpandSpecial
|
)
|
if override:
|
return override
|
|
if (
|
arch is None
|
and self.flavor == "mac"
|
and type
|
in ("static_library", "executable", "shared_library", "loadable_module")
|
):
|
filename = self.xcode_settings.GetExecutablePath()
|
else:
|
filename = self.ComputeOutputFileName(spec, type)
|
|
if arch is None and "product_dir" in spec:
|
path = os.path.join(spec["product_dir"], filename)
|
return self.ExpandSpecial(path)
|
|
# Some products go into the output root, libraries go into shared library
|
# dir, and everything else goes into the normal place.
|
type_in_output_root = ["executable", "loadable_module"]
|
if self.flavor == "mac" and self.toolset == "target":
|
type_in_output_root += ["shared_library", "static_library"]
|
elif self.flavor == "win" and self.toolset == "target":
|
type_in_output_root += ["shared_library"]
|
|
if arch is not None:
|
# Make sure partial executables don't end up in a bundle or the regular
|
# output directory.
|
archdir = "arch"
|
if self.toolset != "target":
|
archdir = os.path.join("arch", "%s" % self.toolset)
|
return os.path.join(archdir, AddArch(filename, arch))
|
elif type in type_in_output_root or self.is_standalone_static_library:
|
return filename
|
elif type == "shared_library":
|
libdir = "lib"
|
if self.toolset != "target":
|
libdir = os.path.join("lib", "%s" % self.toolset)
|
return os.path.join(libdir, filename)
|
else:
|
return self.GypPathToUniqueOutput(filename, qualified=False)
|
|
def WriteVariableList(self, ninja_file, var, values):
|
assert not isinstance(values, str)
|
if values is None:
|
values = []
|
ninja_file.variable(var, " ".join(values))
|
|
def WriteNewNinjaRule(
|
self, name, args, description, win_shell_flags, env, pool, depfile=None
|
):
|
"""Write out a new ninja "rule" statement for a given command.
|
|
Returns the name of the new rule, and a copy of |args| with variables
|
expanded."""
|
|
if self.flavor == "win":
|
args = [
|
self.msvs_settings.ConvertVSMacros(
|
arg, self.base_to_build, config=self.config_name
|
)
|
for arg in args
|
]
|
description = self.msvs_settings.ConvertVSMacros(
|
description, config=self.config_name
|
)
|
elif self.flavor == "mac":
|
# |env| is an empty list on non-mac.
|
args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
|
description = gyp.xcode_emulation.ExpandEnvVars(description, env)
|
|
# TODO: we shouldn't need to qualify names; we do it because
|
# currently the ninja rule namespace is global, but it really
|
# should be scoped to the subninja.
|
rule_name = self.name
|
if self.toolset == "target":
|
rule_name += "." + self.toolset
|
rule_name += "." + name
|
rule_name = re.sub("[^a-zA-Z0-9_]", "_", rule_name)
|
|
# Remove variable references, but not if they refer to the magic rule
|
# variables. This is not quite right, as it also protects these for
|
# actions, not just for rules where they are valid. Good enough.
|
protect = ["${root}", "${dirname}", "${source}", "${ext}", "${name}"]
|
protect = "(?!" + "|".join(map(re.escape, protect)) + ")"
|
description = re.sub(protect + r"\$", "_", description)
|
|
# gyp dictates that commands are run from the base directory.
|
# cd into the directory before running, and adjust paths in
|
# the arguments to point to the proper locations.
|
rspfile = None
|
rspfile_content = None
|
args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
|
if self.flavor == "win":
|
rspfile = rule_name + ".$unique_name.rsp"
|
# The cygwin case handles this inside the bash sub-shell.
|
run_in = "" if win_shell_flags.cygwin else " " + self.build_to_base
|
if win_shell_flags.cygwin:
|
rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
|
args, self.build_to_base
|
)
|
else:
|
rspfile_content = gyp.msvs_emulation.EncodeRspFileList(
|
args, win_shell_flags.quote)
|
command = (
|
"%s gyp-win-tool action-wrapper $arch " % sys.executable
|
+ rspfile
|
+ run_in
|
)
|
else:
|
env = self.ComputeExportEnvString(env)
|
command = gyp.common.EncodePOSIXShellList(args)
|
command = "cd %s; " % self.build_to_base + env + command
|
|
# GYP rules/actions express being no-ops by not touching their outputs.
|
# Avoid executing downstream dependencies in this case by specifying
|
# restat=1 to ninja.
|
self.ninja.rule(
|
rule_name,
|
command,
|
description,
|
depfile=depfile,
|
restat=True,
|
pool=pool,
|
rspfile=rspfile,
|
rspfile_content=rspfile_content,
|
)
|
self.ninja.newline()
|
|
return rule_name, args
|
|
|
def CalculateVariables(default_variables, params):
|
"""Calculate additional variables for use in the build (called by gyp)."""
|
global generator_additional_non_configuration_keys
|
global generator_additional_path_sections
|
flavor = gyp.common.GetFlavor(params)
|
if flavor == "mac":
|
default_variables.setdefault("OS", "mac")
|
default_variables.setdefault("SHARED_LIB_SUFFIX", ".dylib")
|
default_variables.setdefault(
|
"SHARED_LIB_DIR", generator_default_variables["PRODUCT_DIR"]
|
)
|
default_variables.setdefault(
|
"LIB_DIR", generator_default_variables["PRODUCT_DIR"]
|
)
|
|
# Copy additional generator configuration data from Xcode, which is shared
|
# by the Mac Ninja generator.
|
import gyp.generator.xcode as xcode_generator
|
|
generator_additional_non_configuration_keys = getattr(
|
xcode_generator, "generator_additional_non_configuration_keys", []
|
)
|
generator_additional_path_sections = getattr(
|
xcode_generator, "generator_additional_path_sections", []
|
)
|
global generator_extra_sources_for_rules
|
generator_extra_sources_for_rules = getattr(
|
xcode_generator, "generator_extra_sources_for_rules", []
|
)
|
elif flavor == "win":
|
exts = gyp.MSVSUtil.TARGET_TYPE_EXT
|
default_variables.setdefault("OS", "win")
|
default_variables["EXECUTABLE_SUFFIX"] = "." + exts["executable"]
|
default_variables["STATIC_LIB_PREFIX"] = ""
|
default_variables["STATIC_LIB_SUFFIX"] = "." + exts["static_library"]
|
default_variables["SHARED_LIB_PREFIX"] = ""
|
default_variables["SHARED_LIB_SUFFIX"] = "." + exts["shared_library"]
|
|
# Copy additional generator configuration data from VS, which is shared
|
# by the Windows Ninja generator.
|
import gyp.generator.msvs as msvs_generator
|
|
generator_additional_non_configuration_keys = getattr(
|
msvs_generator, "generator_additional_non_configuration_keys", []
|
)
|
generator_additional_path_sections = getattr(
|
msvs_generator, "generator_additional_path_sections", []
|
)
|
|
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
else:
|
operating_system = flavor
|
if flavor == "android":
|
operating_system = "linux" # Keep this legacy behavior for now.
|
default_variables.setdefault("OS", operating_system)
|
default_variables.setdefault("SHARED_LIB_SUFFIX", ".so")
|
default_variables.setdefault(
|
"SHARED_LIB_DIR", os.path.join("$!PRODUCT_DIR", "lib")
|
)
|
default_variables.setdefault("LIB_DIR", os.path.join("$!PRODUCT_DIR", "obj"))
|
|
|
def ComputeOutputDir(params):
|
"""Returns the path from the toplevel_dir to the build output directory."""
|
# generator_dir: relative path from pwd to where make puts build files.
|
# Makes migrating from make to ninja easier, ninja doesn't put anything here.
|
generator_dir = os.path.relpath(params["options"].generator_output or ".")
|
|
# output_dir: relative path from generator_dir to the build directory.
|
output_dir = params.get("generator_flags", {}).get("output_dir", "out")
|
|
# Relative path from source root to our output files. e.g. "out"
|
return os.path.normpath(os.path.join(generator_dir, output_dir))
|
|
|
def CalculateGeneratorInputInfo(params):
|
"""Called by __init__ to initialize generator values based on params."""
|
# E.g. "out/gypfiles"
|
toplevel = params["options"].toplevel_dir
|
qualified_out_dir = os.path.normpath(
|
os.path.join(toplevel, ComputeOutputDir(params), "gypfiles")
|
)
|
|
global generator_filelist_paths
|
generator_filelist_paths = {
|
"toplevel": toplevel,
|
"qualified_out_dir": qualified_out_dir,
|
}
|
|
|
def OpenOutput(path, mode="w"):
|
"""Open |path| for writing, creating directories if necessary."""
|
gyp.common.EnsureDirExists(path)
|
return open(path, mode)
|
|
|
def CommandWithWrapper(cmd, wrappers, prog):
|
wrapper = wrappers.get(cmd, "")
|
if wrapper:
|
return wrapper + " " + prog
|
return prog
|
|
|
def GetDefaultConcurrentLinks():
|
"""Returns a best-guess for a number of concurrent links."""
|
pool_size = int(os.environ.get("GYP_LINK_CONCURRENCY", 0))
|
if pool_size:
|
return pool_size
|
|
if sys.platform in ("win32", "cygwin"):
|
import ctypes
|
|
class MEMORYSTATUSEX(ctypes.Structure):
|
_fields_ = [
|
("dwLength", ctypes.c_ulong),
|
("dwMemoryLoad", ctypes.c_ulong),
|
("ullTotalPhys", ctypes.c_ulonglong),
|
("ullAvailPhys", ctypes.c_ulonglong),
|
("ullTotalPageFile", ctypes.c_ulonglong),
|
("ullAvailPageFile", ctypes.c_ulonglong),
|
("ullTotalVirtual", ctypes.c_ulonglong),
|
("ullAvailVirtual", ctypes.c_ulonglong),
|
("sullAvailExtendedVirtual", ctypes.c_ulonglong),
|
]
|
|
stat = MEMORYSTATUSEX()
|
stat.dwLength = ctypes.sizeof(stat)
|
ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
|
|
# VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
|
# on a 64 GiB machine.
|
mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GiB
|
hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX", 2 ** 32)))
|
return min(mem_limit, hard_cap)
|
elif sys.platform.startswith("linux"):
|
if os.path.exists("/proc/meminfo"):
|
with open("/proc/meminfo") as meminfo:
|
memtotal_re = re.compile(r"^MemTotal:\s*(\d*)\s*kB")
|
for line in meminfo:
|
match = memtotal_re.match(line)
|
if not match:
|
continue
|
# Allow 8Gb per link on Linux because Gold is quite memory hungry
|
return max(1, int(match.group(1)) // (8 * (2 ** 20)))
|
return 1
|
elif sys.platform == "darwin":
|
try:
|
avail_bytes = int(subprocess.check_output(["sysctl", "-n", "hw.memsize"]))
|
# A static library debug build of Chromium's unit_tests takes ~2.7GB, so
|
# 4GB per ld process allows for some more bloat.
|
return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB
|
except subprocess.CalledProcessError:
|
return 1
|
else:
|
# TODO(scottmg): Implement this for other platforms.
|
return 1
|
|
|
def _GetWinLinkRuleNameSuffix(embed_manifest):
|
"""Returns the suffix used to select an appropriate linking rule depending on
|
whether the manifest embedding is enabled."""
|
return "_embed" if embed_manifest else ""
|
|
|
def _AddWinLinkRules(master_ninja, embed_manifest):
|
"""Adds link rules for Windows platform to |master_ninja|."""
|
|
def FullLinkCommand(ldcmd, out, binary_type):
|
resource_name = {"exe": "1", "dll": "2"}[binary_type]
|
return (
|
"%(python)s gyp-win-tool link-with-manifests $arch %(embed)s "
|
'%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" '
|
"$manifests"
|
% {
|
"python": sys.executable,
|
"out": out,
|
"ldcmd": ldcmd,
|
"resname": resource_name,
|
"embed": embed_manifest,
|
}
|
)
|
|
rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
|
use_separate_mspdbsrv = int(os.environ.get("GYP_USE_SEPARATE_MSPDBSRV", "0")) != 0
|
dlldesc = "LINK%s(DLL) $binary" % rule_name_suffix.upper()
|
dllcmd = (
|
"%s gyp-win-tool link-wrapper $arch %s "
|
"$ld /nologo $implibflag /DLL /OUT:$binary "
|
"@$binary.rsp" % (sys.executable, use_separate_mspdbsrv)
|
)
|
dllcmd = FullLinkCommand(dllcmd, "$binary", "dll")
|
master_ninja.rule(
|
"solink" + rule_name_suffix,
|
description=dlldesc,
|
command=dllcmd,
|
rspfile="$binary.rsp",
|
rspfile_content="$libs $in_newline $ldflags",
|
restat=True,
|
pool="link_pool",
|
)
|
master_ninja.rule(
|
"solink_module" + rule_name_suffix,
|
description=dlldesc,
|
command=dllcmd,
|
rspfile="$binary.rsp",
|
rspfile_content="$libs $in_newline $ldflags",
|
restat=True,
|
pool="link_pool",
|
)
|
# Note that ldflags goes at the end so that it has the option of
|
# overriding default settings earlier in the command line.
|
exe_cmd = (
|
"%s gyp-win-tool link-wrapper $arch %s "
|
"$ld /nologo /OUT:$binary @$binary.rsp"
|
% (sys.executable, use_separate_mspdbsrv)
|
)
|
exe_cmd = FullLinkCommand(exe_cmd, "$binary", "exe")
|
master_ninja.rule(
|
"link" + rule_name_suffix,
|
description="LINK%s $binary" % rule_name_suffix.upper(),
|
command=exe_cmd,
|
rspfile="$binary.rsp",
|
rspfile_content="$in_newline $libs $ldflags",
|
pool="link_pool",
|
)
|
|
|
def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name):
|
options = params["options"]
|
flavor = gyp.common.GetFlavor(params)
|
generator_flags = params.get("generator_flags", {})
|
|
# build_dir: relative path from source root to our output files.
|
# e.g. "out/Debug"
|
build_dir = os.path.normpath(os.path.join(ComputeOutputDir(params), config_name))
|
|
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
|
|
master_ninja_file = OpenOutput(os.path.join(toplevel_build, "build.ninja"))
|
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
|
|
# Put build-time support tools in out/{config_name}.
|
gyp.common.CopyTool(flavor, toplevel_build, generator_flags)
|
|
# Grab make settings for CC/CXX.
|
# The rules are
|
# - The priority from low to high is gcc/g++, the 'make_global_settings' in
|
# gyp, the environment variable.
|
# - If there is no 'make_global_settings' for CC.host/CXX.host or
|
# 'CC_host'/'CXX_host' environment variable, cc_host/cxx_host should be set
|
# to cc/cxx.
|
if flavor == "win":
|
ar = "lib.exe"
|
# cc and cxx must be set to the correct architecture by overriding with one
|
# of cl_x86 or cl_x64 below.
|
cc = "UNSET"
|
cxx = "UNSET"
|
ld = "link.exe"
|
ld_host = "$ld"
|
else:
|
ar = "ar"
|
cc = "cc"
|
cxx = "c++"
|
ld = "$cc"
|
ldxx = "$cxx"
|
ld_host = "$cc_host"
|
ldxx_host = "$cxx_host"
|
|
ar_host = ar
|
cc_host = None
|
cxx_host = None
|
cc_host_global_setting = None
|
cxx_host_global_setting = None
|
clang_cl = None
|
nm = "nm"
|
nm_host = "nm"
|
readelf = "readelf"
|
readelf_host = "readelf"
|
|
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
make_global_settings = data[build_file].get("make_global_settings", [])
|
build_to_root = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
|
wrappers = {}
|
for key, value in make_global_settings:
|
if key == "AR":
|
ar = os.path.join(build_to_root, value)
|
if key == "AR.host":
|
ar_host = os.path.join(build_to_root, value)
|
if key == "CC":
|
cc = os.path.join(build_to_root, value)
|
if cc.endswith("clang-cl"):
|
clang_cl = cc
|
if key == "CXX":
|
cxx = os.path.join(build_to_root, value)
|
if key == "CC.host":
|
cc_host = os.path.join(build_to_root, value)
|
cc_host_global_setting = value
|
if key == "CXX.host":
|
cxx_host = os.path.join(build_to_root, value)
|
cxx_host_global_setting = value
|
if key == "LD":
|
ld = os.path.join(build_to_root, value)
|
if key == "LD.host":
|
ld_host = os.path.join(build_to_root, value)
|
if key == "LDXX":
|
ldxx = os.path.join(build_to_root, value)
|
if key == "LDXX.host":
|
ldxx_host = os.path.join(build_to_root, value)
|
if key == "NM":
|
nm = os.path.join(build_to_root, value)
|
if key == "NM.host":
|
nm_host = os.path.join(build_to_root, value)
|
if key == "READELF":
|
readelf = os.path.join(build_to_root, value)
|
if key == "READELF.host":
|
readelf_host = os.path.join(build_to_root, value)
|
if key.endswith("_wrapper"):
|
wrappers[key[: -len("_wrapper")]] = os.path.join(build_to_root, value)
|
|
# Support wrappers from environment variables too.
|
for key, value in os.environ.items():
|
if key.lower().endswith("_wrapper"):
|
key_prefix = key[: -len("_wrapper")]
|
key_prefix = re.sub(r"\.HOST$", ".host", key_prefix)
|
wrappers[key_prefix] = os.path.join(build_to_root, value)
|
|
mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
|
if mac_toolchain_dir:
|
wrappers["LINK"] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
|
|
if flavor == "win":
|
configs = [
|
target_dicts[qualified_target]["configurations"][config_name]
|
for qualified_target in target_list
|
]
|
shared_system_includes = None
|
if not generator_flags.get("ninja_use_custom_environment_files", 0):
|
shared_system_includes = gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
|
configs, generator_flags
|
)
|
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
|
toplevel_build, generator_flags, shared_system_includes, OpenOutput
|
)
|
for arch, path in sorted(cl_paths.items()):
|
if clang_cl:
|
# If we have selected clang-cl, use that instead.
|
path = clang_cl
|
command = CommandWithWrapper(
|
"CC", wrappers, QuoteShellArgument(path, "win")
|
)
|
if clang_cl:
|
# Use clang-cl to cross-compile for x86 or x86_64.
|
command += " -m32" if arch == "x86" else " -m64"
|
master_ninja.variable("cl_" + arch, command)
|
|
cc = GetEnvironFallback(["CC_target", "CC"], cc)
|
master_ninja.variable("cc", CommandWithWrapper("CC", wrappers, cc))
|
cxx = GetEnvironFallback(["CXX_target", "CXX"], cxx)
|
master_ninja.variable("cxx", CommandWithWrapper("CXX", wrappers, cxx))
|
|
if flavor == "win":
|
master_ninja.variable("ld", ld)
|
master_ninja.variable("idl", "midl.exe")
|
master_ninja.variable("ar", ar)
|
master_ninja.variable("rc", "rc.exe")
|
master_ninja.variable("ml_x86", "ml.exe")
|
master_ninja.variable("ml_x64", "ml64.exe")
|
master_ninja.variable("mt", "mt.exe")
|
else:
|
master_ninja.variable("ld", CommandWithWrapper("LINK", wrappers, ld))
|
master_ninja.variable("ldxx", CommandWithWrapper("LINK", wrappers, ldxx))
|
master_ninja.variable("ar", GetEnvironFallback(["AR_target", "AR"], ar))
|
if flavor != "mac":
|
# Mac does not use readelf/nm for .TOC generation, so avoiding polluting
|
# the master ninja with extra unused variables.
|
master_ninja.variable("nm", GetEnvironFallback(["NM_target", "NM"], nm))
|
master_ninja.variable(
|
"readelf", GetEnvironFallback(["READELF_target", "READELF"], readelf)
|
)
|
|
if generator_supports_multiple_toolsets:
|
if not cc_host:
|
cc_host = cc
|
if not cxx_host:
|
cxx_host = cxx
|
|
master_ninja.variable("ar_host", GetEnvironFallback(["AR_host"], ar_host))
|
master_ninja.variable("nm_host", GetEnvironFallback(["NM_host"], nm_host))
|
master_ninja.variable(
|
"readelf_host", GetEnvironFallback(["READELF_host"], readelf_host)
|
)
|
cc_host = GetEnvironFallback(["CC_host"], cc_host)
|
cxx_host = GetEnvironFallback(["CXX_host"], cxx_host)
|
|
# The environment variable could be used in 'make_global_settings', like
|
# ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
|
if "$(CC)" in cc_host and cc_host_global_setting:
|
cc_host = cc_host_global_setting.replace("$(CC)", cc)
|
if "$(CXX)" in cxx_host and cxx_host_global_setting:
|
cxx_host = cxx_host_global_setting.replace("$(CXX)", cxx)
|
master_ninja.variable(
|
"cc_host", CommandWithWrapper("CC.host", wrappers, cc_host)
|
)
|
master_ninja.variable(
|
"cxx_host", CommandWithWrapper("CXX.host", wrappers, cxx_host)
|
)
|
if flavor == "win":
|
master_ninja.variable("ld_host", ld_host)
|
else:
|
master_ninja.variable(
|
"ld_host", CommandWithWrapper("LINK", wrappers, ld_host)
|
)
|
master_ninja.variable(
|
"ldxx_host", CommandWithWrapper("LINK", wrappers, ldxx_host)
|
)
|
|
master_ninja.newline()
|
|
master_ninja.pool("link_pool", depth=GetDefaultConcurrentLinks())
|
master_ninja.newline()
|
|
deps = "msvc" if flavor == "win" else "gcc"
|
|
if flavor != "win":
|
master_ninja.rule(
|
"cc",
|
description="CC $out",
|
command=(
|
"$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c "
|
"$cflags_pch_c -c $in -o $out"
|
),
|
depfile="$out.d",
|
deps=deps,
|
)
|
master_ninja.rule(
|
"cc_s",
|
description="CC $out",
|
command=(
|
"$cc $defines $includes $cflags $cflags_c "
|
"$cflags_pch_c -c $in -o $out"
|
),
|
)
|
master_ninja.rule(
|
"cxx",
|
description="CXX $out",
|
command=(
|
"$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc "
|
"$cflags_pch_cc -c $in -o $out"
|
),
|
depfile="$out.d",
|
deps=deps,
|
)
|
else:
|
# TODO(scottmg) Separate pdb names is a test to see if it works around
|
# http://crbug.com/142362. It seems there's a race between the creation of
|
# the .pdb by the precompiled header step for .cc and the compilation of
|
# .c files. This should be handled by mspdbsrv, but rarely errors out with
|
# c1xx : fatal error C1033: cannot open program database
|
# By making the rules target separate pdb files this might be avoided.
|
cc_command = (
|
"ninja -t msvc -e $arch " + "-- "
|
"$cc /nologo /showIncludes /FC "
|
"@$out.rsp /c $in /Fo$out /Fd$pdbname_c "
|
)
|
cxx_command = (
|
"ninja -t msvc -e $arch " + "-- "
|
"$cxx /nologo /showIncludes /FC "
|
"@$out.rsp /c $in /Fo$out /Fd$pdbname_cc "
|
)
|
master_ninja.rule(
|
"cc",
|
description="CC $out",
|
command=cc_command,
|
rspfile="$out.rsp",
|
rspfile_content="$defines $includes $cflags $cflags_c",
|
deps=deps,
|
)
|
master_ninja.rule(
|
"cxx",
|
description="CXX $out",
|
command=cxx_command,
|
rspfile="$out.rsp",
|
rspfile_content="$defines $includes $cflags $cflags_cc",
|
deps=deps,
|
)
|
master_ninja.rule(
|
"idl",
|
description="IDL $in",
|
command=(
|
"%s gyp-win-tool midl-wrapper $arch $outdir "
|
"$tlb $h $dlldata $iid $proxy $in "
|
"$midl_includes $idlflags" % sys.executable
|
),
|
)
|
master_ninja.rule(
|
"rc",
|
description="RC $in",
|
# Note: $in must be last otherwise rc.exe complains.
|
command=(
|
"%s gyp-win-tool rc-wrapper "
|
"$arch $rc $defines $resource_includes $rcflags /fo$out $in"
|
% sys.executable
|
),
|
)
|
master_ninja.rule(
|
"asm",
|
description="ASM $out",
|
command=(
|
"%s gyp-win-tool asm-wrapper "
|
"$arch $asm $defines $includes $asmflags /c /Fo $out $in"
|
% sys.executable
|
),
|
)
|
|
if flavor not in ("ios", "mac", "win"):
|
master_ninja.rule(
|
"alink",
|
description="AR $out",
|
command="rm -f $out && $ar rcs $arflags $out $in",
|
)
|
master_ninja.rule(
|
"alink_thin",
|
description="AR $out",
|
command="rm -f $out && $ar rcsT $arflags $out $in",
|
)
|
|
# This allows targets that only need to depend on $lib's API to declare an
|
# order-only dependency on $lib.TOC and avoid relinking such downstream
|
# dependencies when $lib changes only in non-public ways.
|
# The resulting string leaves an uninterpolated %{suffix} which
|
# is used in the final substitution below.
|
mtime_preserving_solink_base = (
|
"if [ ! -e $lib -o ! -e $lib.TOC ]; then "
|
"%(solink)s && %(extract_toc)s > $lib.TOC; else "
|
"%(solink)s && %(extract_toc)s > $lib.tmp && "
|
"if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; "
|
"fi; fi"
|
% {
|
"solink": "$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s",
|
"extract_toc": (
|
"{ $readelf -d $lib | grep SONAME ; "
|
"$nm -gD -f p $lib | cut -f1-2 -d' '; }"
|
),
|
}
|
)
|
|
master_ninja.rule(
|
"solink",
|
description="SOLINK $lib",
|
restat=True,
|
command=mtime_preserving_solink_base
|
% {"suffix": "@$link_file_list"},
|
rspfile="$link_file_list",
|
rspfile_content=(
|
"-Wl,--whole-archive $in $solibs -Wl," "--no-whole-archive $libs"
|
),
|
pool="link_pool",
|
)
|
master_ninja.rule(
|
"solink_module",
|
description="SOLINK(module) $lib",
|
restat=True,
|
command=mtime_preserving_solink_base % {"suffix": "@$link_file_list"},
|
rspfile="$link_file_list",
|
rspfile_content="-Wl,--start-group $in $solibs $libs -Wl,--end-group",
|
pool="link_pool",
|
)
|
master_ninja.rule(
|
"link",
|
description="LINK $out",
|
command=(
|
"$ld $ldflags -o $out "
|
"-Wl,--start-group $in $solibs $libs -Wl,--end-group"
|
),
|
pool="link_pool",
|
)
|
elif flavor == "win":
|
master_ninja.rule(
|
"alink",
|
description="LIB $out",
|
command=(
|
"%s gyp-win-tool link-wrapper $arch False "
|
"$ar /nologo /ignore:4221 /OUT:$out @$out.rsp" % sys.executable
|
),
|
rspfile="$out.rsp",
|
rspfile_content="$in_newline $libflags",
|
)
|
_AddWinLinkRules(master_ninja, embed_manifest=True)
|
_AddWinLinkRules(master_ninja, embed_manifest=False)
|
else:
|
master_ninja.rule(
|
"objc",
|
description="OBJC $out",
|
command=(
|
"$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc "
|
"$cflags_pch_objc -c $in -o $out"
|
),
|
depfile="$out.d",
|
deps=deps,
|
)
|
master_ninja.rule(
|
"objcxx",
|
description="OBJCXX $out",
|
command=(
|
"$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc "
|
"$cflags_pch_objcc -c $in -o $out"
|
),
|
depfile="$out.d",
|
deps=deps,
|
)
|
master_ninja.rule(
|
"alink",
|
description="LIBTOOL-STATIC $out, POSTBUILDS",
|
command="rm -f $out && "
|
"./gyp-mac-tool filter-libtool libtool $libtool_flags "
|
"-static -o $out $in"
|
"$postbuilds",
|
)
|
master_ninja.rule(
|
"lipo",
|
description="LIPO $out, POSTBUILDS",
|
command="rm -f $out && lipo -create $in -output $out$postbuilds",
|
)
|
master_ninja.rule(
|
"solipo",
|
description="SOLIPO $out, POSTBUILDS",
|
command=(
|
"rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&"
|
"%(extract_toc)s > $lib.TOC"
|
% {
|
"extract_toc": "{ otool -l $lib | grep LC_ID_DYLIB -A 5; "
|
"nm -gP $lib | cut -f1-2 -d' ' | grep -v U$$; true; }"
|
}
|
),
|
)
|
|
# Record the public interface of $lib in $lib.TOC. See the corresponding
|
# comment in the posix section above for details.
|
solink_base = "$ld %(type)s $ldflags -o $lib %(suffix)s"
|
mtime_preserving_solink_base = (
|
"if [ ! -e $lib -o ! -e $lib.TOC ] || "
|
# Always force dependent targets to relink if this library
|
# reexports something. Handling this correctly would require
|
# recursive TOC dumping but this is rare in practice, so punt.
|
"otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then "
|
"%(solink)s && %(extract_toc)s > $lib.TOC; "
|
"else "
|
"%(solink)s && %(extract_toc)s > $lib.tmp && "
|
"if ! cmp -s $lib.tmp $lib.TOC; then "
|
"mv $lib.tmp $lib.TOC ; "
|
"fi; "
|
"fi"
|
% {
|
"solink": solink_base,
|
"extract_toc": "{ otool -l $lib | grep LC_ID_DYLIB -A 5; "
|
"nm -gP $lib | cut -f1-2 -d' ' | grep -v U$$; true; }",
|
}
|
)
|
|
solink_suffix = "@$link_file_list$postbuilds"
|
master_ninja.rule(
|
"solink",
|
description="SOLINK $lib, POSTBUILDS",
|
restat=True,
|
command=mtime_preserving_solink_base
|
% {"suffix": solink_suffix, "type": "-shared"},
|
rspfile="$link_file_list",
|
rspfile_content="$in $solibs $libs",
|
pool="link_pool",
|
)
|
master_ninja.rule(
|
"solink_notoc",
|
description="SOLINK $lib, POSTBUILDS",
|
restat=True,
|
command=solink_base % {"suffix": solink_suffix, "type": "-shared"},
|
rspfile="$link_file_list",
|
rspfile_content="$in $solibs $libs",
|
pool="link_pool",
|
)
|
|
master_ninja.rule(
|
"solink_module",
|
description="SOLINK(module) $lib, POSTBUILDS",
|
restat=True,
|
command=mtime_preserving_solink_base
|
% {"suffix": solink_suffix, "type": "-bundle"},
|
rspfile="$link_file_list",
|
rspfile_content="$in $solibs $libs",
|
pool="link_pool",
|
)
|
master_ninja.rule(
|
"solink_module_notoc",
|
description="SOLINK(module) $lib, POSTBUILDS",
|
restat=True,
|
command=solink_base % {"suffix": solink_suffix, "type": "-bundle"},
|
rspfile="$link_file_list",
|
rspfile_content="$in $solibs $libs",
|
pool="link_pool",
|
)
|
|
master_ninja.rule(
|
"link",
|
description="LINK $out, POSTBUILDS",
|
command=("$ld $ldflags -o $out " "$in $solibs $libs$postbuilds"),
|
pool="link_pool",
|
)
|
master_ninja.rule(
|
"preprocess_infoplist",
|
description="PREPROCESS INFOPLIST $out",
|
command=(
|
"$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && "
|
"plutil -convert xml1 $out $out"
|
),
|
)
|
master_ninja.rule(
|
"copy_infoplist",
|
description="COPY INFOPLIST $in",
|
command="$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys",
|
)
|
master_ninja.rule(
|
"merge_infoplist",
|
description="MERGE INFOPLISTS $in",
|
command="$env ./gyp-mac-tool merge-info-plist $out $in",
|
)
|
master_ninja.rule(
|
"compile_xcassets",
|
description="COMPILE XCASSETS $in",
|
command="$env ./gyp-mac-tool compile-xcassets $keys $in",
|
)
|
master_ninja.rule(
|
"compile_ios_framework_headers",
|
description="COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in",
|
command="$env ./gyp-mac-tool compile-ios-framework-header-map $out "
|
"$framework $in && $env ./gyp-mac-tool "
|
"copy-ios-framework-headers $framework $copy_headers",
|
)
|
master_ninja.rule(
|
"mac_tool",
|
description="MACTOOL $mactool_cmd $in",
|
command="$env ./gyp-mac-tool $mactool_cmd $in $out $binary",
|
)
|
master_ninja.rule(
|
"package_framework",
|
description="PACKAGE FRAMEWORK $out, POSTBUILDS",
|
command="./gyp-mac-tool package-framework $out $version$postbuilds "
|
"&& touch $out",
|
)
|
master_ninja.rule(
|
"package_ios_framework",
|
description="PACKAGE IOS FRAMEWORK $out, POSTBUILDS",
|
command="./gyp-mac-tool package-ios-framework $out $postbuilds "
|
"&& touch $out",
|
)
|
if flavor == "win":
|
master_ninja.rule(
|
"stamp",
|
description="STAMP $out",
|
command="%s gyp-win-tool stamp $out" % sys.executable,
|
)
|
else:
|
master_ninja.rule(
|
"stamp", description="STAMP $out", command="${postbuilds}touch $out"
|
)
|
if flavor == "win":
|
master_ninja.rule(
|
"copy",
|
description="COPY $in $out",
|
command="%s gyp-win-tool recursive-mirror $in $out" % sys.executable,
|
)
|
elif flavor == "zos":
|
master_ninja.rule(
|
"copy",
|
description="COPY $in $out",
|
command="rm -rf $out && cp -fRP $in $out",
|
)
|
else:
|
master_ninja.rule(
|
"copy",
|
description="COPY $in $out",
|
command="ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)",
|
)
|
master_ninja.newline()
|
|
all_targets = set()
|
for build_file in params["build_files"]:
|
for target in gyp.common.AllTargets(
|
target_list, target_dicts, os.path.normpath(build_file)
|
):
|
all_targets.add(target)
|
all_outputs = set()
|
|
# target_outputs is a map from qualified target name to a Target object.
|
target_outputs = {}
|
# target_short_names is a map from target short name to a list of Target
|
# objects.
|
target_short_names = {}
|
|
# short name of targets that were skipped because they didn't contain anything
|
# interesting.
|
# NOTE: there may be overlap between this an non_empty_target_names.
|
empty_target_names = set()
|
|
# Set of non-empty short target names.
|
# NOTE: there may be overlap between this an empty_target_names.
|
non_empty_target_names = set()
|
|
for qualified_target in target_list:
|
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
|
build_file, name, toolset = gyp.common.ParseQualifiedTarget(qualified_target)
|
|
this_make_global_settings = data[build_file].get("make_global_settings", [])
|
assert make_global_settings == this_make_global_settings, (
|
"make_global_settings needs to be the same for all targets. "
|
f"{this_make_global_settings} vs. {make_global_settings}"
|
)
|
|
spec = target_dicts[qualified_target]
|
if flavor == "mac":
|
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
|
|
# If build_file is a symlink, we must not follow it because there's a chance
|
# it could point to a path above toplevel_dir, and we cannot correctly deal
|
# with that case at the moment.
|
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir, False)
|
|
qualified_target_for_hash = gyp.common.QualifiedTarget(
|
build_file, name, toolset
|
)
|
qualified_target_for_hash = qualified_target_for_hash.encode("utf-8")
|
hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
|
|
base_path = os.path.dirname(build_file)
|
obj = "obj"
|
if toolset != "target":
|
obj += "." + toolset
|
output_file = os.path.join(obj, base_path, name + ".ninja")
|
|
ninja_output = StringIO()
|
writer = NinjaWriter(
|
hash_for_rules,
|
target_outputs,
|
base_path,
|
build_dir,
|
ninja_output,
|
toplevel_build,
|
output_file,
|
flavor,
|
toplevel_dir=options.toplevel_dir,
|
)
|
|
target = writer.WriteSpec(spec, config_name, generator_flags)
|
|
if ninja_output.tell() > 0:
|
# Only create files for ninja files that actually have contents.
|
with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
|
ninja_file.write(ninja_output.getvalue())
|
ninja_output.close()
|
master_ninja.subninja(output_file)
|
|
if target:
|
if name != target.FinalOutput() and spec["toolset"] == "target":
|
target_short_names.setdefault(name, []).append(target)
|
target_outputs[qualified_target] = target
|
if qualified_target in all_targets:
|
all_outputs.add(target.FinalOutput())
|
non_empty_target_names.add(name)
|
else:
|
empty_target_names.add(name)
|
|
if target_short_names:
|
# Write a short name to build this target. This benefits both the
|
# "build chrome" case as well as the gyp tests, which expect to be
|
# able to run actions and build libraries by their short name.
|
master_ninja.newline()
|
master_ninja.comment("Short names for targets.")
|
for short_name in sorted(target_short_names):
|
master_ninja.build(
|
short_name,
|
"phony",
|
[x.FinalOutput() for x in target_short_names[short_name]],
|
)
|
|
# Write phony targets for any empty targets that weren't written yet. As
|
# short names are not necessarily unique only do this for short names that
|
# haven't already been output for another target.
|
empty_target_names = empty_target_names - non_empty_target_names
|
if empty_target_names:
|
master_ninja.newline()
|
master_ninja.comment("Empty targets (output for completeness).")
|
for name in sorted(empty_target_names):
|
master_ninja.build(name, "phony")
|
|
if all_outputs:
|
master_ninja.newline()
|
master_ninja.build("all", "phony", sorted(all_outputs))
|
master_ninja.default(generator_flags.get("default_target", "all"))
|
|
master_ninja_file.close()
|
|
|
def PerformBuild(data, configurations, params):
|
options = params["options"]
|
for config in configurations:
|
builddir = os.path.join(options.toplevel_dir, "out", config)
|
arguments = ["ninja", "-C", builddir]
|
print(f"Building [{config}]: {arguments}")
|
subprocess.check_call(arguments)
|
|
|
def CallGenerateOutputForConfig(arglist):
|
# Ignore the interrupt signal so that the parent process catches it and
|
# kills all multiprocessing children.
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
(target_list, target_dicts, data, params, config_name) = arglist
|
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
|
|
|
def GenerateOutput(target_list, target_dicts, data, params):
|
# Update target_dicts for iOS device builds.
|
target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
|
target_dicts
|
)
|
|
user_config = params.get("generator_flags", {}).get("config", None)
|
if gyp.common.GetFlavor(params) == "win":
|
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
|
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
|
target_list, target_dicts, generator_default_variables
|
)
|
|
if user_config:
|
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
|
else:
|
config_names = target_dicts[target_list[0]]["configurations"]
|
if params["parallel"]:
|
try:
|
pool = multiprocessing.Pool(len(config_names))
|
arglists = []
|
for config_name in config_names:
|
arglists.append(
|
(target_list, target_dicts, data, params, config_name)
|
)
|
pool.map(CallGenerateOutputForConfig, arglists)
|
except KeyboardInterrupt as e:
|
pool.terminate()
|
raise e
|
else:
|
for config_name in config_names:
|
GenerateOutputForConfig(
|
target_list, target_dicts, data, params, config_name
|
)
|